sha
null | last_modified
null | library_name
stringclasses 154
values | text
stringlengths 1
900k
| metadata
stringlengths 2
348k
| pipeline_tag
stringclasses 45
values | id
stringlengths 5
122
| tags
listlengths 1
1.84k
| created_at
stringlengths 25
25
| arxiv
listlengths 0
201
| languages
listlengths 0
1.83k
| tags_str
stringlengths 17
9.34k
| text_str
stringlengths 0
389k
| text_lists
listlengths 0
722
| processed_texts
listlengths 1
723
| tokens_length
listlengths 1
723
| input_texts
listlengths 1
61
| embeddings
listlengths 768
768
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
null | null |
transformers
|
## Usage
The model can be used directly (without a language model) as follows:
```python
import soundfile as sf
import torch
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
import argparse
def parse_transcription(wav_file):
# load pretrained model
processor = Wav2Vec2Processor.from_pretrained("addy88/wav2vec2-english-stt")
model = Wav2Vec2ForCTC.from_pretrained("addy88/wav2vec2-english-stt")
# load audio
audio_input, sample_rate = sf.read(wav_file)
# pad input values and return pt tensor
input_values = processor(audio_input, sampling_rate=sample_rate, return_tensors="pt").input_values
# INFERENCE
# retrieve logits & take argmax
logits = model(input_values).logits
predicted_ids = torch.argmax(logits, dim=-1)
# transcribe
transcription = processor.decode(predicted_ids[0], skip_special_tokens=True)
print(transcription)
```
|
{}
|
automatic-speech-recognition
|
addy88/wav2vec2-english-stt
|
[
"transformers",
"pytorch",
"wav2vec2",
"automatic-speech-recognition",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #region-us
|
## Usage
The model can be used directly (without a language model) as follows:
|
[
"## Usage\nThe model can be used directly (without a language model) as follows:"
] |
[
"TAGS\n#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #region-us \n",
"## Usage\nThe model can be used directly (without a language model) as follows:"
] |
[
37,
20
] |
[
"passage: TAGS\n#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #region-us \n## Usage\nThe model can be used directly (without a language model) as follows:"
] |
[
-0.05252622812986374,
-0.019617876037955284,
-0.0058790817856788635,
-0.053425274789333344,
0.10687846690416336,
-0.007705265656113625,
0.09170906990766525,
0.06302708387374878,
0.1208203136920929,
-0.05187021568417549,
0.10668445378541946,
0.2061861753463745,
-0.01615987904369831,
0.01154998317360878,
-0.02408706024289131,
-0.2849704623222351,
0.05921001359820366,
0.006724327802658081,
0.08173583447933197,
0.11632698774337769,
0.10728606581687927,
-0.0745929405093193,
0.043392978608608246,
0.07603111863136292,
-0.09562446177005768,
0.044834449887275696,
0.017546148970723152,
-0.12676472961902618,
0.12212342768907547,
0.05740610510110855,
0.09601490944623947,
0.003504887456074357,
0.03245728462934494,
-0.23084574937820435,
0.004520881455391645,
-0.04129945859313011,
-0.005402150098234415,
-0.015265563502907753,
0.05411279946565628,
-0.10321149230003357,
0.1183098778128624,
0.09698744118213654,
0.008157354779541492,
0.045819319784641266,
-0.06994444131851196,
-0.09883728623390198,
0.09624841809272766,
0.007222834043204784,
0.06262826174497604,
0.10634984821081161,
-0.05064747855067253,
0.13851769268512726,
-0.10710646957159042,
0.08310505002737045,
0.04849497973918915,
-0.28126662969589233,
0.029068727046251297,
0.07243984192609787,
0.07708492875099182,
0.01645354926586151,
-0.02037384919822216,
0.10069116950035095,
-0.02416316792368889,
0.04858396574854851,
-0.041516125202178955,
-0.05117788165807724,
-0.13475511968135834,
0.03324274718761444,
-0.11539667844772339,
-0.040561530739068985,
0.15148606896400452,
-0.050762925297021866,
0.03511665016412735,
-0.11290891468524933,
-0.06844023615121841,
0.044039443135261536,
-0.07302548736333847,
-0.07782286405563354,
-0.02189541794359684,
0.08985205739736557,
0.006181041710078716,
-0.08480656892061234,
-0.10149180144071579,
-0.07703463733196259,
-0.15133780241012573,
0.2693607807159424,
0.019680902361869812,
0.08886801451444626,
-0.2036275714635849,
0.03727252781391144,
-0.06865793466567993,
-0.03657369315624237,
-0.010131645016372204,
-0.08290375769138336,
-0.003481371561065316,
0.041202232241630554,
-0.10632151365280151,
-0.008017139509320259,
0.08221583068370819,
-0.027638090774416924,
0.06801564246416092,
0.05372175574302673,
-0.025688808411359787,
0.08469673246145248,
0.02976100891828537,
0.1266242116689682,
-0.08318299055099487,
-0.01359215285629034,
-0.006264910567551851,
-0.12346780300140381,
-0.02692805789411068,
-0.026080166921019554,
-0.14001379907131195,
-0.0929582342505455,
-0.03027261234819889,
0.08267602324485779,
-0.03435491770505905,
0.04342223331332207,
-0.017561985179781914,
-0.07181911915540695,
-0.022716619074344635,
-0.0683937594294548,
-0.025212876498699188,
0.08283547312021255,
0.028712647035717964,
0.19050057232379913,
0.03218787536025047,
0.02990150637924671,
-0.10521915555000305,
-0.06501829624176025,
-0.001435491838492453,
0.0477556511759758,
-0.0038637355901300907,
-0.0636293813586235,
-0.050521429628133774,
-0.047594863921403885,
0.04504020884633064,
-0.16827283799648285,
-0.08006849139928818,
0.006139486562460661,
0.016433287411928177,
-0.012976926751434803,
0.03135204687714577,
-0.11849917471408844,
0.039808642119169235,
-0.0012580605689436197,
-0.05513211712241173,
-0.023246455937623978,
-0.04426079988479614,
0.04696167632937431,
0.008784018456935883,
0.08877347409725189,
-0.1122790277004242,
0.07852248102426529,
-0.08129951357841492,
-0.04550950974225998,
-0.01164686307311058,
0.10328144580125809,
-0.017674513161182404,
0.05000694468617439,
-0.08133397996425629,
-0.045672718435525894,
-0.1159110814332962,
0.0878932997584343,
-0.03355850651860237,
0.15796400606632233,
-0.15420709550380707,
-0.10080987215042114,
0.21767915785312653,
-0.09306889027357101,
-0.09239067882299423,
0.08364612609148026,
0.01256752572953701,
0.07035309821367264,
0.08287971466779709,
0.22752153873443604,
0.028992680832743645,
-0.11375638842582703,
0.15816456079483032,
0.1461673527956009,
-0.11467607319355011,
-0.045014068484306335,
0.010342531837522984,
-0.060074593871831894,
-0.11382783204317093,
0.03658890351653099,
-0.03728248178958893,
0.08358000218868256,
-0.04293033108115196,
-0.0835445374250412,
-0.01683056727051735,
-0.11543910205364227,
0.07083569467067719,
0.001472658826969564,
0.10082688927650452,
0.03857358917593956,
0.009115398861467838,
0.008496548049151897,
0.06438606232404709,
-0.08900038152933121,
0.04502258822321892,
-0.15757586061954498,
0.06532309949398041,
-0.04050298035144806,
0.015240225940942764,
-0.202682763338089,
0.12728376686573029,
-0.01672365702688694,
0.10747754573822021,
0.054517317563295364,
0.12177800387144089,
0.08942614495754242,
-0.03367462754249573,
0.03980250656604767,
0.0010367206996306777,
0.16547656059265137,
0.035678017884492874,
-0.03053026832640171,
-0.04889436811208725,
0.00681394012644887,
-0.055863723158836365,
-0.013186636380851269,
-0.048216186463832855,
-0.04109345003962517,
-0.011756274849176407,
0.0651380866765976,
-0.03668820485472679,
0.0516076385974884,
-0.005080144386738539,
0.025842025876045227,
-0.0025852490216493607,
0.040376920253038406,
0.09369415789842606,
-0.044856440275907516,
-0.09525914490222931,
0.21410244703292847,
-0.13405893743038177,
0.18770310282707214,
0.20924758911132812,
-0.2815588414669037,
0.044230442494153976,
0.062409620732069016,
0.011556003242731094,
0.03797592222690582,
0.04959757253527641,
0.006467180326581001,
0.293790340423584,
0.0155835235491395,
0.11668486893177032,
-0.05374575033783913,
0.032668016850948334,
0.02992015704512596,
-0.05695001780986786,
-0.01879408396780491,
0.034647136926651,
0.0661093145608902,
-0.08204877376556396,
0.04923979938030243,
0.11010090261697769,
-0.0453057736158371,
0.148123100399971,
0.024933019652962685,
-0.039744652807712555,
0.03881930187344551,
-0.02343410812318325,
-0.06886041164398193,
-0.009936339221894741,
-0.33419716358184814,
-0.09599199891090393,
0.07709821313619614,
-0.004785764962434769,
0.12489327788352966,
-0.08804792165756226,
0.009168051183223724,
0.019291682168841362,
-0.06878803670406342,
-0.0647115707397461,
0.05472118407487869,
-0.002762231044471264,
0.04009811580181122,
-0.03818083554506302,
-0.1263684183359146,
0.044684890657663345,
-0.0302386824041605,
-0.11999684572219849,
0.12441056966781616,
-0.10402670502662659,
-0.24305014312267303,
-0.10676499456167221,
-0.11100881546735764,
-0.012243036180734634,
0.0840906873345375,
0.06344853341579437,
-0.09637530148029327,
-0.031256671994924545,
0.023415599018335342,
0.03565197065472603,
-0.07387632876634598,
-0.0077576665207743645,
-0.00415276363492012,
0.007432916201651096,
-0.03729229420423508,
-0.11296577751636505,
-0.04147093743085861,
-0.06601216644048691,
-0.027051186189055443,
0.034905724227428436,
-0.10912566632032394,
0.023992136120796204,
0.2160838544368744,
0.049506090581417084,
0.10071378946304321,
0.017306605353951454,
0.1917818933725357,
-0.045892760157585144,
-0.1069929301738739,
0.16272033751010895,
-0.0352884940803051,
0.0028067543171346188,
0.12233041226863861,
0.04503423348069191,
-0.0888904333114624,
-0.057114340364933014,
-0.07670506834983826,
-0.08987729251384735,
-0.14921152591705322,
-0.18451803922653198,
-0.07496201992034912,
-0.10192596167325974,
0.008611632511019707,
0.01191422063857317,
0.08689485490322113,
0.04952683299779892,
0.05695516616106033,
-0.08443386107683182,
0.0356837660074234,
0.03766658902168274,
0.2046319842338562,
-0.05954502522945404,
0.1242118775844574,
-0.044658876955509186,
-0.1400134563446045,
0.020373381674289703,
0.03893480822443962,
0.12193314731121063,
0.17272241413593292,
0.03627229854464531,
0.0488450825214386,
0.10247788578271866,
0.14994199573993683,
0.17463889718055725,
0.0029014565516263247,
-0.020511463284492493,
-0.0009147358941845596,
-0.05476776137948036,
-0.07201310247182846,
0.09347954392433167,
0.2079763263463974,
-0.1406993716955185,
-0.009409161284565926,
-0.10983754694461823,
0.06955263763666153,
0.1199074313044548,
0.05642293393611908,
-0.20243626832962036,
0.03341635689139366,
0.04153240844607353,
-0.06629457324743271,
-0.07251711189746857,
0.1465679109096527,
0.012260537594556808,
-0.11557546257972717,
0.05262777954339981,
0.04174318537116051,
0.07618976384401321,
-0.026631271466612816,
0.08335631340742111,
-0.15152700245380402,
-0.13310033082962036,
0.0689893513917923,
0.09733282774686813,
-0.22627536952495575,
0.2599017024040222,
-0.0082806795835495,
0.015054757706820965,
-0.09506656974554062,
-0.03614956513047218,
0.026938162744045258,
0.13990642130374908,
0.16889335215091705,
-0.005832785740494728,
0.018060486763715744,
-0.06294215470552444,
-0.02094322256743908,
0.0794750228524208,
0.1289294809103012,
0.0012923552421852946,
-0.025144003331661224,
-0.01851782575249672,
-0.048820991069078445,
-0.0035712714307010174,
-0.044398605823516846,
-0.07460972666740417,
-0.11215569823980331,
0.010362415574491024,
0.1303505152463913,
0.10614973306655884,
0.033407822251319885,
-0.00502749951556325,
-0.08575832098722458,
0.10764406621456146,
-0.11254726350307465,
-0.035979606211185455,
-0.06864117085933685,
-0.16859184205532074,
0.1345091015100479,
-0.0476752370595932,
0.06849204748868942,
-0.0048405323177576065,
0.032381389290094376,
-0.05020912364125252,
-0.1380978226661682,
0.09602635353803635,
-0.12306658923625946,
0.01976417936384678,
0.0015626787208020687,
0.15944983065128326,
0.025481410324573517,
-0.007206962909549475,
0.10792868584394455,
0.0020110411569476128,
-0.09983616322278976,
-0.0903809517621994,
-0.032549209892749786,
0.15346577763557434,
-0.10951274633407593,
0.015927769243717194,
0.018179042264819145,
-0.14347873628139496,
-0.08231712877750397,
0.03777335211634636,
0.27825725078582764,
-0.014915425330400467,
-0.04817360267043114,
0.17336682975292206,
0.266446590423584,
-0.052146051079034805,
-0.22492991387844086,
-0.1739804446697235,
-0.04863237589597702,
0.0378878153860569,
-0.0715700313448906,
-0.11964539438486099,
0.10777436196804047,
-0.09367436915636063,
-0.04155983403325081,
-0.02952319197356701,
-0.18605610728263855,
-0.10452525317668915,
0.2878834009170532,
0.008731041103601456,
0.31272685527801514,
-0.034225042909383774,
-0.08491139113903046,
-0.0462961308658123,
-0.1168622300028801,
0.11990545690059662,
-0.028133777901530266,
0.07553482055664062,
0.02659502625465393,
0.14482992887496948,
0.07080701738595963,
-0.03058517538011074,
0.09739825129508972,
0.0694439560174942,
-0.04932545870542526,
-0.014205054379999638,
-0.09434179961681366,
0.010993984527885914,
0.041735049337148666,
0.07998612523078918,
0.05293517932295799,
0.031744420528411865,
-0.09471290558576584,
-0.10459836572408676,
-0.11717119812965393,
0.04566803202033043,
0.06686970591545105,
-0.05674618482589722,
0.08453842252492905,
-0.12748411297798157,
0.013795166276395321,
0.046415891498327255,
0.05644693598151207,
-0.14555446803569794,
0.01097900327295065,
0.20175482332706451,
0.19673597812652588,
-0.11582087725400925,
-0.035082004964351654,
-0.03929390013217926,
-0.06582492589950562,
0.12552955746650696,
-0.016348805278539658,
0.04150126501917839,
0.06624189019203186,
0.011761232279241085,
0.044623930007219315,
0.08187094330787659,
-0.0014814226888120174,
0.011807871982455254,
0.0706041157245636,
-0.1026955246925354,
-0.07286912202835083,
-0.03209858015179634,
0.03451545536518097,
0.1219853013753891,
0.05769479647278786,
0.1266922652721405,
-0.0197153240442276,
-0.029428185895085335,
-0.05409902706742287,
-0.02111060544848442,
-0.1510176807641983,
0.06585611402988434,
0.05681091919541359,
0.029282737523317337,
-0.1461006999015808,
-0.0009589263936504722,
-0.06233185529708862,
-0.11463384330272675,
-0.004083676729351282,
-0.024750350043177605,
-0.09568831324577332,
-0.13677221536636353,
-0.11707108467817307,
0.10505697876214981,
-0.13075333833694458,
-0.12530504167079926,
0.0472247488796711,
-0.11614292114973068,
0.01793769747018814,
0.14788326621055603,
0.07750163227319717,
0.07349028438329697,
-0.1580013483762741,
-0.04895436018705368,
0.014872894622385502,
-0.03940632566809654,
0.00006847670010756701,
-0.0814877450466156,
-0.10139909386634827,
0.07051457464694977,
0.030785532668232918,
0.10237010568380356,
-0.1006712019443512,
-0.11157841235399246,
-0.08842196315526962,
0.1053212434053421,
-0.1773672252893448,
-0.011937949806451797,
-0.12276988476514816,
-0.009165732190012932,
0.054897814989089966,
-0.06066597253084183,
-0.034183766692876816,
0.007981495931744576,
-0.13472281396389008,
0.08353378623723984,
0.003475533565506339,
0.013883791863918304,
-0.07277137786149979,
0.027176927775144577,
0.047363173216581345,
-0.021777629852294922,
0.08778230845928192,
0.23831427097320557,
-0.16073819994926453,
0.15058839321136475,
-0.16987963020801544,
-0.11391031742095947,
0.09087315201759338,
0.04639875516295433,
0.04060527682304382,
-0.008711026981472969,
0.024921422824263573,
0.10381405800580978,
0.030109092593193054,
0.007147052325308323,
0.15466874837875366,
-0.06457111984491348,
0.05303109064698219,
-0.013168524950742722,
-0.08773212879896164,
-0.00940337311476469,
-0.04804586246609688,
0.07879312336444855,
0.09026934206485748,
0.0635034441947937,
-0.04155603051185608,
0.0813690721988678,
0.029936833307147026,
0.060264427214860916,
-0.082431860268116,
-0.04972422868013382,
-0.02518003061413765,
-0.12266892194747925,
0.023915033787488937,
-0.03064400888979435,
0.28028959035873413,
-0.03677959367632866,
0.14348864555358887,
-0.022635672241449356,
0.018306247889995575,
-0.03131228685379028,
0.023785337805747986,
0.3033658564090729,
0.09851063042879105,
0.033345457166433334,
-0.0743090957403183,
0.054648082703351974,
0.01324823684990406,
0.03891727328300476,
-0.058958083391189575,
0.12437312304973602,
-0.02557576633989811,
0.1556762158870697,
0.08825253695249557,
0.011792381294071674,
-0.10271283239126205,
-0.1517172008752823,
-0.03477974236011505,
0.022329291328787804,
-0.09139315038919449,
0.08728893101215363,
0.1347426176071167,
-0.011509649455547333,
0.04300074279308319,
0.008906800299882889,
-0.0395376943051815,
-0.17708246409893036,
-0.1229119822382927,
-0.06867998838424683,
-0.15299323201179504,
0.012428054586052895,
-0.06315319240093231,
0.01787642389535904,
0.04234951362013817,
0.066338911652565,
-0.03069286420941353,
0.11699683219194412,
-0.03180370479822159,
-0.08137953281402588,
0.06664858013391495,
-0.034303344786167145,
0.02629096433520317,
-0.012935973703861237,
-0.008460517041385174,
-0.038132403045892715,
-0.017841307446360588,
0.0013736779801547527,
0.011932797729969025,
-0.11747316271066666,
-0.014412318356335163,
-0.07583178579807281,
-0.04851336404681206,
-0.08428135514259338,
-0.0008740581688471138,
-0.01677345670759678,
0.11235248297452927,
0.04048636183142662,
-0.0890006273984909,
-0.011831426061689854,
0.15025228261947632,
-0.11666398495435715,
-0.20691975951194763,
-0.06340430676937103,
0.274996280670166,
0.0627456083893776,
0.1450004279613495,
-0.04091713950037956,
0.00462886318564415,
-0.09783204644918442,
0.3448697328567505,
0.25440648198127747,
-0.06393852829933167,
0.04810115322470665,
0.051103997975587845,
0.04404282942414284,
0.049606598913669586,
0.023405157029628754,
0.08343277871608734,
0.32936808466911316,
-0.04472985491156578,
-0.03660992160439491,
-0.05594843626022339,
-0.06329580396413803,
-0.038297075778245926,
0.04348982498049736,
-0.020259791985154152,
-0.11740419268608093,
-0.022186610847711563,
0.11966544389724731,
-0.24192485213279724,
0.133467897772789,
-0.04571826010942459,
-0.09056868404150009,
-0.02946968376636505,
-0.015986280515789986,
0.05871385708451271,
0.07608629763126373,
0.07382916659116745,
-0.05554657801985741,
-0.09896405041217804,
0.10340635478496552,
0.05036648362874985,
-0.2329118847846985,
0.018541546538472176,
0.04919476434588432,
-0.03017665445804596,
-0.021637072786688805,
0.010790947824716568,
0.09790199249982834,
0.012425770983099937,
0.13141566514968872,
0.03426911309361458,
0.15303249657154083,
-0.010131238028407097,
-0.08131913840770721,
0.03293213993310928,
0.05652814731001854,
-0.02393581159412861,
-0.035055771470069885,
-0.01140469778329134,
-0.2610277831554413,
0.08833248913288116,
0.00015771633479744196,
-0.04229233041405678,
-0.03033015877008438,
0.006403541192412376,
-0.057553213089704514,
0.047388553619384766,
0.010490331798791885,
-0.0019301060819998384,
-0.0627487376332283,
-0.011176691390573978,
0.08832677453756332,
0.005391156300902367,
-0.11479656398296356,
-0.11939159780740738,
-0.1622319370508194,
-0.09806107729673386,
-0.03324146568775177,
-0.00944250263273716,
-0.0971030741930008,
0.00019520068599376827,
0.010740520432591438,
0.029052171856164932,
-0.0255824513733387,
0.038386229425668716,
0.0961468517780304,
0.013008412905037403,
0.015402606688439846,
0.0020534591749310493,
0.09052255004644394,
0.10167006403207779,
-0.15448948740959167,
-0.1295212209224701
] |
null | null |
transformers
|
## Usage
The model can be used directly (without a language model) as follows:
```python
import soundfile as sf
import torch
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
import argparse
def parse_transcription(wav_file):
# load pretrained model
processor = Wav2Vec2Processor.from_pretrained("addy88/wav2vec2-gujarati-stt")
model = Wav2Vec2ForCTC.from_pretrained("addy88/wav2vec2-gujarati-stt")
# load audio
audio_input, sample_rate = sf.read(wav_file)
# pad input values and return pt tensor
input_values = processor(audio_input, sampling_rate=sample_rate, return_tensors="pt").input_values
# INFERENCE
# retrieve logits & take argmax
logits = model(input_values).logits
predicted_ids = torch.argmax(logits, dim=-1)
# transcribe
transcription = processor.decode(predicted_ids[0], skip_special_tokens=True)
print(transcription)
```
|
{}
|
automatic-speech-recognition
|
addy88/wav2vec2-gujarati-stt
|
[
"transformers",
"pytorch",
"wav2vec2",
"automatic-speech-recognition",
"endpoints_compatible",
"has_space",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #has_space #region-us
|
## Usage
The model can be used directly (without a language model) as follows:
|
[
"## Usage\nThe model can be used directly (without a language model) as follows:"
] |
[
"TAGS\n#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #has_space #region-us \n",
"## Usage\nThe model can be used directly (without a language model) as follows:"
] |
[
41,
20
] |
[
"passage: TAGS\n#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #has_space #region-us \n## Usage\nThe model can be used directly (without a language model) as follows:"
] |
[
-0.03186299279332161,
-0.004200390540063381,
-0.004327940288931131,
-0.0486639104783535,
0.08296903967857361,
-0.01482841931283474,
0.09188038110733032,
0.0731409564614296,
0.10195652395486832,
-0.01986560970544815,
0.11330433189868927,
0.15528587996959686,
-0.03331088274717331,
0.03081902675330639,
-0.017118386924266815,
-0.2904382646083832,
0.06393804401159286,
-0.0017997336108237505,
0.05552246421575546,
0.11145319044589996,
0.10314425826072693,
-0.09685413539409637,
0.03575778380036354,
0.07701276242733002,
-0.0914183184504509,
0.04680001735687256,
-0.0029669166542589664,
-0.12749430537223816,
0.11869028955698013,
0.05191557854413986,
0.11529816687107086,
0.011094547808170319,
0.026011688634753227,
-0.20258396863937378,
0.00643300823867321,
-0.02760254219174385,
-0.015765393152832985,
0.0018986233044415712,
0.03428981080651283,
-0.09199697524309158,
0.13884153962135315,
0.053002260625362396,
0.0033086107578128576,
0.017443235963582993,
-0.07980665564537048,
-0.12262566387653351,
0.08424817025661469,
-0.0017996759852394462,
0.027105284854769707,
0.10755113512277603,
-0.0620075948536396,
0.14594286680221558,
-0.11785798519849777,
0.06648331880569458,
0.0752410739660263,
-0.29929932951927185,
0.02066616155207157,
0.12076394259929657,
0.1212797686457634,
0.028650730848312378,
-0.03099503181874752,
0.11366453766822815,
-0.013605164363980293,
0.04599415883421898,
0.006525254342705011,
-0.041668154299259186,
-0.14168903231620789,
0.05691952630877495,
-0.13300234079360962,
-0.04457761347293854,
0.1750788688659668,
-0.055770061910152435,
0.057813454419374466,
-0.1281365007162094,
-0.08233718574047089,
0.016624772921204567,
-0.05392378568649292,
-0.06286374479532242,
-0.01210800837725401,
0.08046729117631912,
0.005854764487594366,
-0.09527720510959625,
-0.11920042335987091,
-0.05765800178050995,
-0.15215566754341125,
0.26112106442451477,
0.0013486547395586967,
0.08475995808839798,
-0.18662519752979279,
0.04474426433444023,
-0.07748544961214066,
-0.0493711456656456,
0.010454943403601646,
-0.09063330292701721,
-0.004149819258600473,
0.05147605389356613,
-0.13550545275211334,
0.0006644324166700244,
0.06711526960134506,
-0.04005381837487221,
0.049110475927591324,
0.02506011724472046,
0.008123738691210747,
0.09447431564331055,
0.05308625474572182,
0.13312822580337524,
-0.1151767373085022,
-0.02024880237877369,
-0.031005462631583214,
-0.08725766837596893,
-0.013956311158835888,
-0.04309866577386856,
-0.1530262976884842,
-0.09433525055646896,
-0.026319850236177444,
0.053456734865903854,
-0.0022562723606824875,
0.056485362350940704,
0.009100757539272308,
-0.04809367656707764,
-0.014979030936956406,
-0.06992588192224503,
-0.0044015999883413315,
0.07363616675138474,
0.03219737485051155,
0.13770169019699097,
0.049049120396375656,
0.026546694338321686,
-0.07486826926469803,
-0.08767493814229965,
-0.017501184716820717,
0.049232978373765945,
-0.018362952396273613,
-0.10748149454593658,
-0.04447443410754204,
-0.0504486970603466,
0.04093729332089424,
-0.17446352541446686,
-0.019713927060365677,
-0.012745914049446583,
0.02187197469174862,
-0.025472726672887802,
0.013299843296408653,
-0.07812383025884628,
0.0005650148959830403,
0.03361773118376732,
-0.057588331401348114,
-0.0005850289016962051,
-0.04611543193459511,
0.0406295545399189,
-0.01571734808385372,
0.11426667869091034,
-0.12685365974903107,
0.08685997873544693,
-0.07004048675298691,
-0.03767699748277664,
-0.018257353454828262,
0.08876976370811462,
-0.026859046891331673,
0.023612478747963905,
-0.0668015405535698,
-0.056011736392974854,
-0.1329401582479477,
0.10101764649152756,
-0.022770002484321594,
0.13146695494651794,
-0.1781366914510727,
-0.07330234348773956,
0.17375485599040985,
-0.0765891969203949,
-0.08186094462871552,
0.07459931820631027,
0.005113431252539158,
0.07495049387216568,
0.030202113091945648,
0.2607187032699585,
0.030237937346100807,
-0.11562485992908478,
0.11961887031793594,
0.15332596004009247,
-0.08929361402988434,
-0.020350810140371323,
0.015474130399525166,
-0.040896568447351456,
-0.06548767536878586,
0.020308874547481537,
-0.03140954300761223,
0.07157962769269943,
-0.032317254692316055,
-0.08963483572006226,
-0.036205995827913284,
-0.11073638498783112,
0.09265921264886856,
-0.006225192453712225,
0.10999901592731476,
0.026855267584323883,
-0.016249407082796097,
0.03916789963841438,
0.06132557988166809,
-0.07346528768539429,
0.04525895044207573,
-0.12483206391334534,
0.08704348653554916,
-0.056279223412275314,
-0.0002356834156671539,
-0.20731103420257568,
0.10612635314464569,
-0.0329204685986042,
0.09835223108530045,
0.046723347157239914,
0.20851416885852814,
0.08706885576248169,
-0.04032886400818825,
0.04078107699751854,
0.0021282462403178215,
0.1400391161441803,
0.060296230018138885,
-0.04852994158864021,
-0.040831129997968674,
-0.01617157831788063,
-0.07386111468076706,
-0.021308621391654015,
-0.05658708140254021,
-0.020341159775853157,
-0.0017006858251988888,
0.07406903058290482,
-0.04182248190045357,
0.062068864703178406,
0.005664403550326824,
0.024661099538207054,
-0.02104867622256279,
0.03134768456220627,
0.07795621454715729,
-0.0451931357383728,
-0.08047739416360855,
0.23785850405693054,
-0.18467977643013,
0.2054787129163742,
0.22624149918556213,
-0.2767010033130646,
0.04765373468399048,
0.09380259364843369,
0.005579759832471609,
0.050762396305799484,
0.035999804735183716,
-0.00412161648273468,
0.2355819195508957,
-0.012296522036194801,
0.09885377436876297,
-0.05497748404741287,
0.02428996004164219,
0.02131466381251812,
-0.05625699460506439,
-0.04733964428305626,
0.03332201763987541,
0.04095600172877312,
-0.06289898604154587,
0.07928013801574707,
0.1789366900920868,
-0.060826804488897324,
0.1682494580745697,
0.046612534672021866,
-0.028556805104017258,
0.021601088345050812,
-0.05685633048415184,
-0.09857689589262009,
0.02368246577680111,
-0.33015525341033936,
-0.11741338670253754,
0.08700934797525406,
-0.0008049802272580564,
0.12229079008102417,
-0.09561337530612946,
-0.0006401394493877888,
0.0372639000415802,
-0.04316796734929085,
-0.062308188527822495,
0.08264674246311188,
0.014820830896496773,
0.05012909695506096,
-0.03209609165787697,
-0.12405548244714737,
0.0256104227155447,
-0.022697653621435165,
-0.09694897383451462,
0.10004496574401855,
-0.10422287881374359,
-0.239841029047966,
-0.08007767051458359,
-0.07519228756427765,
-0.009652567096054554,
0.08299211412668228,
0.07465042918920517,
-0.07841744273900986,
-0.01996261440217495,
-0.0219185259193182,
0.021210219711065292,
-0.09579060971736908,
-0.0022155973128974438,
-0.020832009613513947,
0.004588881973177195,
-0.02990828827023506,
-0.12492093443870544,
-0.041977658867836,
-0.05378863960504532,
-0.020001566037535667,
0.04504897817969322,
-0.061678532510995865,
0.037179816514253616,
0.21639688313007355,
0.024647142738103867,
0.08510708808898926,
0.01776488870382309,
0.21598969399929047,
-0.0553479939699173,
-0.08129427582025528,
0.16384631395339966,
0.0020732360426336527,
0.0021272553130984306,
0.14624497294425964,
0.04757542535662651,
-0.06477635353803635,
-0.05576341971755028,
-0.062323350459337234,
-0.1024550348520279,
-0.10130661725997925,
-0.18866626918315887,
-0.09400071203708649,
-0.11551478505134583,
0.005995338782668114,
0.02283010073006153,
0.06296941637992859,
0.0449184887111187,
0.05855361372232437,
-0.08619730919599533,
0.010989388450980186,
0.027222296223044395,
0.21244625747203827,
-0.08213528245687485,
0.15385127067565918,
-0.045660100877285004,
-0.14614064991474152,
0.02699928730726242,
0.04718083143234253,
0.1009853258728981,
0.15217342972755432,
-0.013332042843103409,
0.05871689319610596,
0.12141083925962448,
0.14600148797035217,
0.1511966735124588,
0.008384093642234802,
-0.018715213984251022,
-0.017723066732287407,
-0.05917373299598694,
-0.03853991627693176,
0.12076987326145172,
0.23551490902900696,
-0.1625279039144516,
0.00006200006464496255,
-0.13946455717086792,
0.08088137954473495,
0.09655515849590302,
0.09075010567903519,
-0.19134820997714996,
0.03841414675116539,
0.05711402744054794,
-0.05948886275291443,
-0.07806068658828735,
0.12918806076049805,
0.0766913965344429,
-0.09090772271156311,
0.02147851139307022,
0.05853215605020523,
0.07172980159521103,
0.01708107255399227,
0.09498762339353561,
-0.1620451956987381,
-0.16137097775936127,
0.06017930433154106,
0.0859639048576355,
-0.20755580067634583,
0.25956448912620544,
-0.021552052348852158,
-0.028829634189605713,
-0.08821998536586761,
-0.038566622883081436,
0.03710592910647392,
0.14298014342784882,
0.14423507452011108,
0.008452809415757656,
-0.02585403434932232,
-0.057590190321207047,
0.002214724663645029,
0.0645398199558258,
0.11707662045955658,
-0.007839933037757874,
-0.010711874812841415,
-0.01779228448867798,
-0.0307039562612772,
0.02045309916138649,
0.04438058286905289,
-0.07279125601053238,
-0.12816175818443298,
0.0250592902302742,
0.14450125396251678,
0.0647921934723854,
0.026827633380889893,
-0.018518028780817986,
-0.1423524171113968,
0.08977345377206802,
-0.05523562431335449,
-0.026097722351551056,
-0.06682457029819489,
-0.1699259728193283,
0.15337422490119934,
-0.026866713538765907,
0.07358696311712265,
-0.00047379909665323794,
0.038425035774707794,
-0.06224082037806511,
-0.126923605799675,
0.10364693403244019,
-0.1191260814666748,
0.019326673820614815,
0.0025196722708642483,
0.11652641743421555,
-0.012214134447276592,
0.010707332752645016,
0.09665895253419876,
0.04197024181485176,
-0.14929239451885223,
-0.10257202386856079,
-0.03015541471540928,
0.13182519376277924,
-0.08205161988735199,
0.00803716853260994,
0.023610740900039673,
-0.13923189043998718,
-0.03323044627904892,
0.053449999541044235,
0.2788114845752716,
-0.002338047605007887,
-0.09274545311927795,
0.16376468539237976,
0.20548291504383087,
-0.03790527582168579,
-0.2533778250217438,
-0.16469749808311462,
-0.07474901527166367,
0.051656194031238556,
-0.02491854503750801,
-0.08451970666646957,
0.07087007164955139,
-0.10586416721343994,
-0.06272540986537933,
-0.04853689670562744,
-0.18022161722183228,
-0.11338511109352112,
0.27455079555511475,
-0.031454652547836304,
0.2984967827796936,
-0.053311578929424286,
-0.05433935299515724,
-0.024840066209435463,
-0.0933004766702652,
0.12145275622606277,
-0.08249333500862122,
0.08710568398237228,
0.02995762601494789,
0.14894017577171326,
0.07329157739877701,
-0.03350840508937836,
0.125051349401474,
0.039623767137527466,
-0.03769025206565857,
-0.028186652809381485,
-0.11694729328155518,
0.03790958598256111,
0.0075390879064798355,
0.0748821422457695,
0.020518172532320023,
0.03214528039097786,
-0.12657295167446136,
-0.07631182670593262,
-0.12119220942258835,
0.05539520084857941,
0.05062667280435562,
-0.06636221706867218,
0.04283962771296501,
-0.10488126426935196,
0.02665773220360279,
0.04102179408073425,
0.07186318188905716,
-0.15190935134887695,
0.045765191316604614,
0.2468288391828537,
0.14495931565761566,
-0.10368319600820541,
-0.030337555333971977,
0.005129113793373108,
-0.05862453579902649,
0.13051952421665192,
-0.06973199546337128,
0.03461240604519844,
0.047396764159202576,
0.003934831358492374,
0.037755731493234634,
0.08890917897224426,
-0.0019721146672964096,
0.011391771025955677,
0.0837189108133316,
-0.1068313866853714,
-0.09119442850351334,
-0.02313355728983879,
0.03751590847969055,
0.11006053537130356,
0.03051132522523403,
0.13355541229248047,
-0.03843075782060623,
-0.02349076420068741,
-0.04947882518172264,
-0.028117509558796883,
-0.1504010558128357,
0.04494497552514076,
0.07733725756406784,
0.04389898478984833,
-0.11686959862709045,
-0.024535194039344788,
-0.030698545277118683,
-0.10629171133041382,
0.007254380267113447,
-0.03915035352110863,
-0.0760270282626152,
-0.16227421164512634,
-0.12629833817481995,
0.05908539146184921,
-0.11001002043485641,
-0.09731046855449677,
0.06325901299715042,
-0.1110185980796814,
0.008821898140013218,
0.1296229064464569,
0.08721306174993515,
0.06524926424026489,
-0.14028146862983704,
-0.04252591356635094,
0.04095478355884552,
-0.03241032361984253,
-0.007516405079513788,
-0.0663459300994873,
-0.1063365563750267,
0.09696641564369202,
0.0182547215372324,
0.10888571292161942,
-0.10907489061355591,
-0.08413825184106827,
-0.09533492475748062,
0.0742587223649025,
-0.17228174209594727,
-0.03686191514134407,
-0.11839939653873444,
-0.019397787749767303,
0.052220847457647324,
-0.0750964805483818,
-0.05174725130200386,
0.00314967124722898,
-0.14319871366024017,
0.0708819106221199,
-0.013260525651276112,
0.0363905094563961,
-0.07985072582960129,
0.024278810247778893,
0.04325252026319504,
-0.029213296249508858,
0.07775893062353134,
0.19990959763526917,
-0.14924731850624084,
0.10904547572135925,
-0.12039992213249207,
-0.12007620930671692,
0.08155656605958939,
0.04603108391165733,
0.05010863393545151,
-0.009226856753230095,
0.010532091371715069,
0.07986065000295639,
0.0565689280629158,
0.008975248783826828,
0.13580963015556335,
-0.048855461180210114,
0.0591491162776947,
-0.021293548867106438,
-0.08196473121643066,
0.0034934289287775755,
-0.04101786017417908,
0.08425158262252808,
0.09296426177024841,
0.0530308373272419,
-0.02404419146478176,
0.0602082833647728,
0.009954490698873997,
0.06297457963228226,
-0.09766310453414917,
-0.058397967368364334,
0.0017010924639180303,
-0.1149732694029808,
0.02868911810219288,
-0.03445316478610039,
0.31077465415000916,
0.02406521700322628,
0.09689889848232269,
-0.012704682536423206,
0.03339372202754021,
-0.022320294752717018,
0.026361403986811638,
0.28254321217536926,
0.08619052171707153,
0.02972693182528019,
-0.08472433686256409,
0.05374154821038246,
0.019960317760705948,
0.05461052060127258,
-0.06914269179105759,
0.1477220356464386,
0.0004169055027887225,
0.161441832780838,
0.08504056185483932,
-0.025822602212429047,
-0.11634687334299088,
-0.13491134345531464,
-0.02108769491314888,
0.04009724035859108,
-0.13352912664413452,
0.04716862738132477,
0.14726927876472473,
-0.019542457535862923,
0.056865572929382324,
-0.015263020992279053,
-0.024152567610144615,
-0.16835054755210876,
-0.1113729476928711,
-0.06429774314165115,
-0.1562766283750534,
-0.019899778068065643,
-0.06868623197078705,
0.04550342634320259,
0.06901273876428604,
0.052924200892448425,
-0.009547594003379345,
0.11870699375867844,
-0.039900172501802444,
-0.0672885924577713,
0.05728130415081978,
-0.025267023593187332,
0.043157126754522324,
-0.04051828011870384,
-0.0012900256551802158,
-0.03725013881921768,
-0.0036901559215039015,
-0.004493960179388523,
0.00901018362492323,
-0.12058661878108978,
-0.018879251554608345,
-0.08181025087833405,
-0.0466916486620903,
-0.09973856061697006,
-0.007444834802299738,
-0.0023623299784958363,
0.12552790343761444,
0.04034178704023361,
-0.09013421088457108,
-0.0213156845420599,
0.19663746654987335,
-0.14585576951503754,
-0.1983063519001007,
-0.060555797070264816,
0.2386734038591385,
0.04274589940905571,
0.13851799070835114,
-0.06495395302772522,
-0.01585833728313446,
-0.1207629069685936,
0.3061027526855469,
0.30868634581565857,
-0.08475799858570099,
0.07288450002670288,
0.05102105066180229,
0.04086671397089958,
0.04316530376672745,
-0.00002310849231434986,
0.0951421856880188,
0.29498761892318726,
-0.05289041996002197,
-0.027323465794324875,
-0.06335887312889099,
-0.054028332233428955,
-0.0403280071914196,
0.05857134610414505,
-0.009126032702624798,
-0.12757092714309692,
-0.029525544494390488,
0.09027303010225296,
-0.21515251696109772,
0.11078815907239914,
-0.032654643058776855,
-0.13363830745220184,
-0.04089254140853882,
0.017599821090698242,
0.09252285957336426,
0.04992211237549782,
0.08632275462150574,
-0.06598566472530365,
-0.09164350479841232,
0.0805046558380127,
0.037150487303733826,
-0.19401578605175018,
0.03754609450697899,
0.06103724241256714,
-0.023955514654517174,
-0.010921783745288849,
0.0023282684851437807,
0.0658923014998436,
0.0385051853954792,
0.12565383315086365,
0.027140973135828972,
0.1537025421857834,
0.0058655645698308945,
-0.09518881142139435,
0.008974296040832996,
0.047800976783037186,
-0.03270869329571724,
-0.03860403597354889,
0.010653484612703323,
-0.27797478437423706,
0.09113006293773651,
-0.013058957643806934,
-0.026218395680189133,
-0.032628025859594345,
-0.020102104172110558,
-0.04513247311115265,
0.06231781095266342,
0.012721139006316662,
-0.010963412933051586,
-0.07055005431175232,
0.005820227321237326,
0.06350240856409073,
-0.006245844066143036,
-0.09239339828491211,
-0.13312841951847076,
-0.14528027176856995,
-0.08218343555927277,
-0.04653673619031906,
-0.0013480675406754017,
-0.10029658675193787,
-0.009050268679857254,
0.03277643769979477,
0.038988884538412094,
0.006692338269203901,
0.03783223778009415,
0.11002694815397263,
-0.00606278283521533,
0.0019004064379259944,
0.0003056168498005718,
0.08056733757257462,
0.10057663917541504,
-0.16002751886844635,
-0.11340176314115524
] |
null | null |
transformers
|
## Usage
The model can be used directly (without a language model) as follows:
```python
import soundfile as sf
import torch
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
import argparse
def parse_transcription(wav_file):
# load pretrained model
processor = Wav2Vec2Processor.from_pretrained("addy88/wav2vec2-kannada-stt")
model = Wav2Vec2ForCTC.from_pretrained("addy88/wav2vec2-kannada-stt")
# load audio
audio_input, sample_rate = sf.read(wav_file)
# pad input values and return pt tensor
input_values = processor(audio_input, sampling_rate=sample_rate, return_tensors="pt").input_values
# INFERENCE
# retrieve logits & take argmax
logits = model(input_values).logits
predicted_ids = torch.argmax(logits, dim=-1)
# transcribe
transcription = processor.decode(predicted_ids[0], skip_special_tokens=True)
print(transcription)
```
|
{}
|
automatic-speech-recognition
|
addy88/wav2vec2-kannada-stt
|
[
"transformers",
"pytorch",
"wav2vec2",
"automatic-speech-recognition",
"endpoints_compatible",
"has_space",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #has_space #region-us
|
## Usage
The model can be used directly (without a language model) as follows:
|
[
"## Usage\nThe model can be used directly (without a language model) as follows:"
] |
[
"TAGS\n#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #has_space #region-us \n",
"## Usage\nThe model can be used directly (without a language model) as follows:"
] |
[
41,
20
] |
[
"passage: TAGS\n#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #has_space #region-us \n## Usage\nThe model can be used directly (without a language model) as follows:"
] |
[
-0.03186299279332161,
-0.004200390540063381,
-0.004327940288931131,
-0.0486639104783535,
0.08296903967857361,
-0.01482841931283474,
0.09188038110733032,
0.0731409564614296,
0.10195652395486832,
-0.01986560970544815,
0.11330433189868927,
0.15528587996959686,
-0.03331088274717331,
0.03081902675330639,
-0.017118386924266815,
-0.2904382646083832,
0.06393804401159286,
-0.0017997336108237505,
0.05552246421575546,
0.11145319044589996,
0.10314425826072693,
-0.09685413539409637,
0.03575778380036354,
0.07701276242733002,
-0.0914183184504509,
0.04680001735687256,
-0.0029669166542589664,
-0.12749430537223816,
0.11869028955698013,
0.05191557854413986,
0.11529816687107086,
0.011094547808170319,
0.026011688634753227,
-0.20258396863937378,
0.00643300823867321,
-0.02760254219174385,
-0.015765393152832985,
0.0018986233044415712,
0.03428981080651283,
-0.09199697524309158,
0.13884153962135315,
0.053002260625362396,
0.0033086107578128576,
0.017443235963582993,
-0.07980665564537048,
-0.12262566387653351,
0.08424817025661469,
-0.0017996759852394462,
0.027105284854769707,
0.10755113512277603,
-0.0620075948536396,
0.14594286680221558,
-0.11785798519849777,
0.06648331880569458,
0.0752410739660263,
-0.29929932951927185,
0.02066616155207157,
0.12076394259929657,
0.1212797686457634,
0.028650730848312378,
-0.03099503181874752,
0.11366453766822815,
-0.013605164363980293,
0.04599415883421898,
0.006525254342705011,
-0.041668154299259186,
-0.14168903231620789,
0.05691952630877495,
-0.13300234079360962,
-0.04457761347293854,
0.1750788688659668,
-0.055770061910152435,
0.057813454419374466,
-0.1281365007162094,
-0.08233718574047089,
0.016624772921204567,
-0.05392378568649292,
-0.06286374479532242,
-0.01210800837725401,
0.08046729117631912,
0.005854764487594366,
-0.09527720510959625,
-0.11920042335987091,
-0.05765800178050995,
-0.15215566754341125,
0.26112106442451477,
0.0013486547395586967,
0.08475995808839798,
-0.18662519752979279,
0.04474426433444023,
-0.07748544961214066,
-0.0493711456656456,
0.010454943403601646,
-0.09063330292701721,
-0.004149819258600473,
0.05147605389356613,
-0.13550545275211334,
0.0006644324166700244,
0.06711526960134506,
-0.04005381837487221,
0.049110475927591324,
0.02506011724472046,
0.008123738691210747,
0.09447431564331055,
0.05308625474572182,
0.13312822580337524,
-0.1151767373085022,
-0.02024880237877369,
-0.031005462631583214,
-0.08725766837596893,
-0.013956311158835888,
-0.04309866577386856,
-0.1530262976884842,
-0.09433525055646896,
-0.026319850236177444,
0.053456734865903854,
-0.0022562723606824875,
0.056485362350940704,
0.009100757539272308,
-0.04809367656707764,
-0.014979030936956406,
-0.06992588192224503,
-0.0044015999883413315,
0.07363616675138474,
0.03219737485051155,
0.13770169019699097,
0.049049120396375656,
0.026546694338321686,
-0.07486826926469803,
-0.08767493814229965,
-0.017501184716820717,
0.049232978373765945,
-0.018362952396273613,
-0.10748149454593658,
-0.04447443410754204,
-0.0504486970603466,
0.04093729332089424,
-0.17446352541446686,
-0.019713927060365677,
-0.012745914049446583,
0.02187197469174862,
-0.025472726672887802,
0.013299843296408653,
-0.07812383025884628,
0.0005650148959830403,
0.03361773118376732,
-0.057588331401348114,
-0.0005850289016962051,
-0.04611543193459511,
0.0406295545399189,
-0.01571734808385372,
0.11426667869091034,
-0.12685365974903107,
0.08685997873544693,
-0.07004048675298691,
-0.03767699748277664,
-0.018257353454828262,
0.08876976370811462,
-0.026859046891331673,
0.023612478747963905,
-0.0668015405535698,
-0.056011736392974854,
-0.1329401582479477,
0.10101764649152756,
-0.022770002484321594,
0.13146695494651794,
-0.1781366914510727,
-0.07330234348773956,
0.17375485599040985,
-0.0765891969203949,
-0.08186094462871552,
0.07459931820631027,
0.005113431252539158,
0.07495049387216568,
0.030202113091945648,
0.2607187032699585,
0.030237937346100807,
-0.11562485992908478,
0.11961887031793594,
0.15332596004009247,
-0.08929361402988434,
-0.020350810140371323,
0.015474130399525166,
-0.040896568447351456,
-0.06548767536878586,
0.020308874547481537,
-0.03140954300761223,
0.07157962769269943,
-0.032317254692316055,
-0.08963483572006226,
-0.036205995827913284,
-0.11073638498783112,
0.09265921264886856,
-0.006225192453712225,
0.10999901592731476,
0.026855267584323883,
-0.016249407082796097,
0.03916789963841438,
0.06132557988166809,
-0.07346528768539429,
0.04525895044207573,
-0.12483206391334534,
0.08704348653554916,
-0.056279223412275314,
-0.0002356834156671539,
-0.20731103420257568,
0.10612635314464569,
-0.0329204685986042,
0.09835223108530045,
0.046723347157239914,
0.20851416885852814,
0.08706885576248169,
-0.04032886400818825,
0.04078107699751854,
0.0021282462403178215,
0.1400391161441803,
0.060296230018138885,
-0.04852994158864021,
-0.040831129997968674,
-0.01617157831788063,
-0.07386111468076706,
-0.021308621391654015,
-0.05658708140254021,
-0.020341159775853157,
-0.0017006858251988888,
0.07406903058290482,
-0.04182248190045357,
0.062068864703178406,
0.005664403550326824,
0.024661099538207054,
-0.02104867622256279,
0.03134768456220627,
0.07795621454715729,
-0.0451931357383728,
-0.08047739416360855,
0.23785850405693054,
-0.18467977643013,
0.2054787129163742,
0.22624149918556213,
-0.2767010033130646,
0.04765373468399048,
0.09380259364843369,
0.005579759832471609,
0.050762396305799484,
0.035999804735183716,
-0.00412161648273468,
0.2355819195508957,
-0.012296522036194801,
0.09885377436876297,
-0.05497748404741287,
0.02428996004164219,
0.02131466381251812,
-0.05625699460506439,
-0.04733964428305626,
0.03332201763987541,
0.04095600172877312,
-0.06289898604154587,
0.07928013801574707,
0.1789366900920868,
-0.060826804488897324,
0.1682494580745697,
0.046612534672021866,
-0.028556805104017258,
0.021601088345050812,
-0.05685633048415184,
-0.09857689589262009,
0.02368246577680111,
-0.33015525341033936,
-0.11741338670253754,
0.08700934797525406,
-0.0008049802272580564,
0.12229079008102417,
-0.09561337530612946,
-0.0006401394493877888,
0.0372639000415802,
-0.04316796734929085,
-0.062308188527822495,
0.08264674246311188,
0.014820830896496773,
0.05012909695506096,
-0.03209609165787697,
-0.12405548244714737,
0.0256104227155447,
-0.022697653621435165,
-0.09694897383451462,
0.10004496574401855,
-0.10422287881374359,
-0.239841029047966,
-0.08007767051458359,
-0.07519228756427765,
-0.009652567096054554,
0.08299211412668228,
0.07465042918920517,
-0.07841744273900986,
-0.01996261440217495,
-0.0219185259193182,
0.021210219711065292,
-0.09579060971736908,
-0.0022155973128974438,
-0.020832009613513947,
0.004588881973177195,
-0.02990828827023506,
-0.12492093443870544,
-0.041977658867836,
-0.05378863960504532,
-0.020001566037535667,
0.04504897817969322,
-0.061678532510995865,
0.037179816514253616,
0.21639688313007355,
0.024647142738103867,
0.08510708808898926,
0.01776488870382309,
0.21598969399929047,
-0.0553479939699173,
-0.08129427582025528,
0.16384631395339966,
0.0020732360426336527,
0.0021272553130984306,
0.14624497294425964,
0.04757542535662651,
-0.06477635353803635,
-0.05576341971755028,
-0.062323350459337234,
-0.1024550348520279,
-0.10130661725997925,
-0.18866626918315887,
-0.09400071203708649,
-0.11551478505134583,
0.005995338782668114,
0.02283010073006153,
0.06296941637992859,
0.0449184887111187,
0.05855361372232437,
-0.08619730919599533,
0.010989388450980186,
0.027222296223044395,
0.21244625747203827,
-0.08213528245687485,
0.15385127067565918,
-0.045660100877285004,
-0.14614064991474152,
0.02699928730726242,
0.04718083143234253,
0.1009853258728981,
0.15217342972755432,
-0.013332042843103409,
0.05871689319610596,
0.12141083925962448,
0.14600148797035217,
0.1511966735124588,
0.008384093642234802,
-0.018715213984251022,
-0.017723066732287407,
-0.05917373299598694,
-0.03853991627693176,
0.12076987326145172,
0.23551490902900696,
-0.1625279039144516,
0.00006200006464496255,
-0.13946455717086792,
0.08088137954473495,
0.09655515849590302,
0.09075010567903519,
-0.19134820997714996,
0.03841414675116539,
0.05711402744054794,
-0.05948886275291443,
-0.07806068658828735,
0.12918806076049805,
0.0766913965344429,
-0.09090772271156311,
0.02147851139307022,
0.05853215605020523,
0.07172980159521103,
0.01708107255399227,
0.09498762339353561,
-0.1620451956987381,
-0.16137097775936127,
0.06017930433154106,
0.0859639048576355,
-0.20755580067634583,
0.25956448912620544,
-0.021552052348852158,
-0.028829634189605713,
-0.08821998536586761,
-0.038566622883081436,
0.03710592910647392,
0.14298014342784882,
0.14423507452011108,
0.008452809415757656,
-0.02585403434932232,
-0.057590190321207047,
0.002214724663645029,
0.0645398199558258,
0.11707662045955658,
-0.007839933037757874,
-0.010711874812841415,
-0.01779228448867798,
-0.0307039562612772,
0.02045309916138649,
0.04438058286905289,
-0.07279125601053238,
-0.12816175818443298,
0.0250592902302742,
0.14450125396251678,
0.0647921934723854,
0.026827633380889893,
-0.018518028780817986,
-0.1423524171113968,
0.08977345377206802,
-0.05523562431335449,
-0.026097722351551056,
-0.06682457029819489,
-0.1699259728193283,
0.15337422490119934,
-0.026866713538765907,
0.07358696311712265,
-0.00047379909665323794,
0.038425035774707794,
-0.06224082037806511,
-0.126923605799675,
0.10364693403244019,
-0.1191260814666748,
0.019326673820614815,
0.0025196722708642483,
0.11652641743421555,
-0.012214134447276592,
0.010707332752645016,
0.09665895253419876,
0.04197024181485176,
-0.14929239451885223,
-0.10257202386856079,
-0.03015541471540928,
0.13182519376277924,
-0.08205161988735199,
0.00803716853260994,
0.023610740900039673,
-0.13923189043998718,
-0.03323044627904892,
0.053449999541044235,
0.2788114845752716,
-0.002338047605007887,
-0.09274545311927795,
0.16376468539237976,
0.20548291504383087,
-0.03790527582168579,
-0.2533778250217438,
-0.16469749808311462,
-0.07474901527166367,
0.051656194031238556,
-0.02491854503750801,
-0.08451970666646957,
0.07087007164955139,
-0.10586416721343994,
-0.06272540986537933,
-0.04853689670562744,
-0.18022161722183228,
-0.11338511109352112,
0.27455079555511475,
-0.031454652547836304,
0.2984967827796936,
-0.053311578929424286,
-0.05433935299515724,
-0.024840066209435463,
-0.0933004766702652,
0.12145275622606277,
-0.08249333500862122,
0.08710568398237228,
0.02995762601494789,
0.14894017577171326,
0.07329157739877701,
-0.03350840508937836,
0.125051349401474,
0.039623767137527466,
-0.03769025206565857,
-0.028186652809381485,
-0.11694729328155518,
0.03790958598256111,
0.0075390879064798355,
0.0748821422457695,
0.020518172532320023,
0.03214528039097786,
-0.12657295167446136,
-0.07631182670593262,
-0.12119220942258835,
0.05539520084857941,
0.05062667280435562,
-0.06636221706867218,
0.04283962771296501,
-0.10488126426935196,
0.02665773220360279,
0.04102179408073425,
0.07186318188905716,
-0.15190935134887695,
0.045765191316604614,
0.2468288391828537,
0.14495931565761566,
-0.10368319600820541,
-0.030337555333971977,
0.005129113793373108,
-0.05862453579902649,
0.13051952421665192,
-0.06973199546337128,
0.03461240604519844,
0.047396764159202576,
0.003934831358492374,
0.037755731493234634,
0.08890917897224426,
-0.0019721146672964096,
0.011391771025955677,
0.0837189108133316,
-0.1068313866853714,
-0.09119442850351334,
-0.02313355728983879,
0.03751590847969055,
0.11006053537130356,
0.03051132522523403,
0.13355541229248047,
-0.03843075782060623,
-0.02349076420068741,
-0.04947882518172264,
-0.028117509558796883,
-0.1504010558128357,
0.04494497552514076,
0.07733725756406784,
0.04389898478984833,
-0.11686959862709045,
-0.024535194039344788,
-0.030698545277118683,
-0.10629171133041382,
0.007254380267113447,
-0.03915035352110863,
-0.0760270282626152,
-0.16227421164512634,
-0.12629833817481995,
0.05908539146184921,
-0.11001002043485641,
-0.09731046855449677,
0.06325901299715042,
-0.1110185980796814,
0.008821898140013218,
0.1296229064464569,
0.08721306174993515,
0.06524926424026489,
-0.14028146862983704,
-0.04252591356635094,
0.04095478355884552,
-0.03241032361984253,
-0.007516405079513788,
-0.0663459300994873,
-0.1063365563750267,
0.09696641564369202,
0.0182547215372324,
0.10888571292161942,
-0.10907489061355591,
-0.08413825184106827,
-0.09533492475748062,
0.0742587223649025,
-0.17228174209594727,
-0.03686191514134407,
-0.11839939653873444,
-0.019397787749767303,
0.052220847457647324,
-0.0750964805483818,
-0.05174725130200386,
0.00314967124722898,
-0.14319871366024017,
0.0708819106221199,
-0.013260525651276112,
0.0363905094563961,
-0.07985072582960129,
0.024278810247778893,
0.04325252026319504,
-0.029213296249508858,
0.07775893062353134,
0.19990959763526917,
-0.14924731850624084,
0.10904547572135925,
-0.12039992213249207,
-0.12007620930671692,
0.08155656605958939,
0.04603108391165733,
0.05010863393545151,
-0.009226856753230095,
0.010532091371715069,
0.07986065000295639,
0.0565689280629158,
0.008975248783826828,
0.13580963015556335,
-0.048855461180210114,
0.0591491162776947,
-0.021293548867106438,
-0.08196473121643066,
0.0034934289287775755,
-0.04101786017417908,
0.08425158262252808,
0.09296426177024841,
0.0530308373272419,
-0.02404419146478176,
0.0602082833647728,
0.009954490698873997,
0.06297457963228226,
-0.09766310453414917,
-0.058397967368364334,
0.0017010924639180303,
-0.1149732694029808,
0.02868911810219288,
-0.03445316478610039,
0.31077465415000916,
0.02406521700322628,
0.09689889848232269,
-0.012704682536423206,
0.03339372202754021,
-0.022320294752717018,
0.026361403986811638,
0.28254321217536926,
0.08619052171707153,
0.02972693182528019,
-0.08472433686256409,
0.05374154821038246,
0.019960317760705948,
0.05461052060127258,
-0.06914269179105759,
0.1477220356464386,
0.0004169055027887225,
0.161441832780838,
0.08504056185483932,
-0.025822602212429047,
-0.11634687334299088,
-0.13491134345531464,
-0.02108769491314888,
0.04009724035859108,
-0.13352912664413452,
0.04716862738132477,
0.14726927876472473,
-0.019542457535862923,
0.056865572929382324,
-0.015263020992279053,
-0.024152567610144615,
-0.16835054755210876,
-0.1113729476928711,
-0.06429774314165115,
-0.1562766283750534,
-0.019899778068065643,
-0.06868623197078705,
0.04550342634320259,
0.06901273876428604,
0.052924200892448425,
-0.009547594003379345,
0.11870699375867844,
-0.039900172501802444,
-0.0672885924577713,
0.05728130415081978,
-0.025267023593187332,
0.043157126754522324,
-0.04051828011870384,
-0.0012900256551802158,
-0.03725013881921768,
-0.0036901559215039015,
-0.004493960179388523,
0.00901018362492323,
-0.12058661878108978,
-0.018879251554608345,
-0.08181025087833405,
-0.0466916486620903,
-0.09973856061697006,
-0.007444834802299738,
-0.0023623299784958363,
0.12552790343761444,
0.04034178704023361,
-0.09013421088457108,
-0.0213156845420599,
0.19663746654987335,
-0.14585576951503754,
-0.1983063519001007,
-0.060555797070264816,
0.2386734038591385,
0.04274589940905571,
0.13851799070835114,
-0.06495395302772522,
-0.01585833728313446,
-0.1207629069685936,
0.3061027526855469,
0.30868634581565857,
-0.08475799858570099,
0.07288450002670288,
0.05102105066180229,
0.04086671397089958,
0.04316530376672745,
-0.00002310849231434986,
0.0951421856880188,
0.29498761892318726,
-0.05289041996002197,
-0.027323465794324875,
-0.06335887312889099,
-0.054028332233428955,
-0.0403280071914196,
0.05857134610414505,
-0.009126032702624798,
-0.12757092714309692,
-0.029525544494390488,
0.09027303010225296,
-0.21515251696109772,
0.11078815907239914,
-0.032654643058776855,
-0.13363830745220184,
-0.04089254140853882,
0.017599821090698242,
0.09252285957336426,
0.04992211237549782,
0.08632275462150574,
-0.06598566472530365,
-0.09164350479841232,
0.0805046558380127,
0.037150487303733826,
-0.19401578605175018,
0.03754609450697899,
0.06103724241256714,
-0.023955514654517174,
-0.010921783745288849,
0.0023282684851437807,
0.0658923014998436,
0.0385051853954792,
0.12565383315086365,
0.027140973135828972,
0.1537025421857834,
0.0058655645698308945,
-0.09518881142139435,
0.008974296040832996,
0.047800976783037186,
-0.03270869329571724,
-0.03860403597354889,
0.010653484612703323,
-0.27797478437423706,
0.09113006293773651,
-0.013058957643806934,
-0.026218395680189133,
-0.032628025859594345,
-0.020102104172110558,
-0.04513247311115265,
0.06231781095266342,
0.012721139006316662,
-0.010963412933051586,
-0.07055005431175232,
0.005820227321237326,
0.06350240856409073,
-0.006245844066143036,
-0.09239339828491211,
-0.13312841951847076,
-0.14528027176856995,
-0.08218343555927277,
-0.04653673619031906,
-0.0013480675406754017,
-0.10029658675193787,
-0.009050268679857254,
0.03277643769979477,
0.038988884538412094,
0.006692338269203901,
0.03783223778009415,
0.11002694815397263,
-0.00606278283521533,
0.0019004064379259944,
0.0003056168498005718,
0.08056733757257462,
0.10057663917541504,
-0.16002751886844635,
-0.11340176314115524
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# wav2vec2-large-xls-r-300m-hindi-colab
This model is a fine-tuned version of [facebook/wav2vec2-xls-r-300m](https://huggingface.co/facebook/wav2vec2-xls-r-300m) on the common_voice dataset.
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 0.0003
- train_batch_size: 16
- eval_batch_size: 8
- seed: 42
- gradient_accumulation_steps: 2
- total_train_batch_size: 32
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- lr_scheduler_warmup_steps: 500
- num_epochs: 30
- mixed_precision_training: Native AMP
### Training results
### Framework versions
- Transformers 4.12.5
- Pytorch 1.10.0+cu111
- Datasets 1.16.1
- Tokenizers 0.10.3
|
{"license": "apache-2.0", "tags": ["generated_from_trainer"], "datasets": ["common_voice"], "model-index": [{"name": "wav2vec2-large-xls-r-300m-hindi-colab", "results": []}]}
|
automatic-speech-recognition
|
addy88/wav2vec2-large-xls-r-300m-hindi-colab
|
[
"transformers",
"pytorch",
"tensorboard",
"wav2vec2",
"automatic-speech-recognition",
"generated_from_trainer",
"dataset:common_voice",
"license:apache-2.0",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tensorboard #wav2vec2 #automatic-speech-recognition #generated_from_trainer #dataset-common_voice #license-apache-2.0 #endpoints_compatible #region-us
|
# wav2vec2-large-xls-r-300m-hindi-colab
This model is a fine-tuned version of facebook/wav2vec2-xls-r-300m on the common_voice dataset.
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 0.0003
- train_batch_size: 16
- eval_batch_size: 8
- seed: 42
- gradient_accumulation_steps: 2
- total_train_batch_size: 32
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- lr_scheduler_warmup_steps: 500
- num_epochs: 30
- mixed_precision_training: Native AMP
### Training results
### Framework versions
- Transformers 4.12.5
- Pytorch 1.10.0+cu111
- Datasets 1.16.1
- Tokenizers 0.10.3
|
[
"# wav2vec2-large-xls-r-300m-hindi-colab\n\nThis model is a fine-tuned version of facebook/wav2vec2-xls-r-300m on the common_voice dataset.",
"## Model description\n\nMore information needed",
"## Intended uses & limitations\n\nMore information needed",
"## Training and evaluation data\n\nMore information needed",
"## Training procedure",
"### Training hyperparameters\n\nThe following hyperparameters were used during training:\n- learning_rate: 0.0003\n- train_batch_size: 16\n- eval_batch_size: 8\n- seed: 42\n- gradient_accumulation_steps: 2\n- total_train_batch_size: 32\n- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n- lr_scheduler_type: linear\n- lr_scheduler_warmup_steps: 500\n- num_epochs: 30\n- mixed_precision_training: Native AMP",
"### Training results",
"### Framework versions\n\n- Transformers 4.12.5\n- Pytorch 1.10.0+cu111\n- Datasets 1.16.1\n- Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #wav2vec2 #automatic-speech-recognition #generated_from_trainer #dataset-common_voice #license-apache-2.0 #endpoints_compatible #region-us \n",
"# wav2vec2-large-xls-r-300m-hindi-colab\n\nThis model is a fine-tuned version of facebook/wav2vec2-xls-r-300m on the common_voice dataset.",
"## Model description\n\nMore information needed",
"## Intended uses & limitations\n\nMore information needed",
"## Training and evaluation data\n\nMore information needed",
"## Training procedure",
"### Training hyperparameters\n\nThe following hyperparameters were used during training:\n- learning_rate: 0.0003\n- train_batch_size: 16\n- eval_batch_size: 8\n- seed: 42\n- gradient_accumulation_steps: 2\n- total_train_batch_size: 32\n- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n- lr_scheduler_type: linear\n- lr_scheduler_warmup_steps: 500\n- num_epochs: 30\n- mixed_precision_training: Native AMP",
"### Training results",
"### Framework versions\n\n- Transformers 4.12.5\n- Pytorch 1.10.0+cu111\n- Datasets 1.16.1\n- Tokenizers 0.10.3"
] |
[
65,
52,
6,
12,
8,
3,
140,
4,
33
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #wav2vec2 #automatic-speech-recognition #generated_from_trainer #dataset-common_voice #license-apache-2.0 #endpoints_compatible #region-us \n# wav2vec2-large-xls-r-300m-hindi-colab\n\nThis model is a fine-tuned version of facebook/wav2vec2-xls-r-300m on the common_voice dataset.## Model description\n\nMore information needed## Intended uses & limitations\n\nMore information needed## Training and evaluation data\n\nMore information needed## Training procedure### Training hyperparameters\n\nThe following hyperparameters were used during training:\n- learning_rate: 0.0003\n- train_batch_size: 16\n- eval_batch_size: 8\n- seed: 42\n- gradient_accumulation_steps: 2\n- total_train_batch_size: 32\n- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n- lr_scheduler_type: linear\n- lr_scheduler_warmup_steps: 500\n- num_epochs: 30\n- mixed_precision_training: Native AMP### Training results### Framework versions\n\n- Transformers 4.12.5\n- Pytorch 1.10.0+cu111\n- Datasets 1.16.1\n- Tokenizers 0.10.3"
] |
[
-0.10074292123317719,
0.16276007890701294,
-0.0013576173223555088,
0.0413646325469017,
0.11980830132961273,
0.016988929361104965,
0.0827861875295639,
0.13327908515930176,
-0.0798683613538742,
0.09442505240440369,
0.06480737775564194,
0.024332642555236816,
0.08932729065418243,
0.10428764671087265,
-0.0023875331971794367,
-0.2460201531648636,
-0.00220339372754097,
0.012763036414980888,
-0.055332183837890625,
0.09265413880348206,
0.11813727021217346,
-0.08080073446035385,
0.029399974271655083,
0.03628964722156525,
-0.14603236317634583,
0.031234418973326683,
-0.07104325294494629,
-0.059826310724020004,
0.0723809227347374,
0.026462223380804062,
0.04384564608335495,
0.0029327247757464647,
0.10230359435081482,
-0.27575889229774475,
0.0038718469440937042,
0.05138951912522316,
0.029608523473143578,
0.05820266157388687,
0.07807251811027527,
0.01517556793987751,
0.12556248903274536,
-0.1436685025691986,
0.09749427437782288,
0.038370247930288315,
-0.041268568485975266,
-0.17451691627502441,
-0.08272092789411545,
0.09399354457855225,
0.11589109897613525,
0.12636181712150574,
-0.026975782588124275,
0.12951669096946716,
-0.06920482963323593,
0.06293196231126785,
0.14907550811767578,
-0.2532622516155243,
-0.06117981672286987,
0.020610621199011803,
0.052418649196624756,
0.05321933701634407,
-0.12151505053043365,
-0.01838238723576069,
0.051275067031383514,
0.021650709211826324,
0.04941964149475098,
0.01774417981505394,
-0.008854464627802372,
-0.008502544835209846,
-0.1068759635090828,
-0.02807559259235859,
0.16696184873580933,
0.10120373219251633,
-0.03662574291229248,
-0.1609327495098114,
0.004847893491387367,
-0.12844796478748322,
-0.024901965633034706,
-0.026034390553832054,
0.015642136335372925,
-0.02618974633514881,
-0.08080682158470154,
-0.0016670506447553635,
-0.07360190898180008,
-0.03912413865327835,
0.07084400951862335,
0.08982142060995102,
0.03630773723125458,
-0.02765793353319168,
-0.011966291815042496,
0.062505342066288,
0.03021981380879879,
-0.12863855063915253,
-0.02158356085419655,
0.036275241523981094,
-0.11605051159858704,
-0.04880380630493164,
-0.04153769835829735,
-0.07582663744688034,
-0.006499688606709242,
0.11880600452423096,
0.010031752288341522,
0.08598759025335312,
0.014072418212890625,
-0.016948891803622246,
0.005454682745039463,
0.145485520362854,
-0.06412824243307114,
-0.07588353753089905,
-0.029569553211331367,
0.0945797860622406,
0.02201499044895172,
-0.009550330229103565,
-0.06881286203861237,
-0.010621857829391956,
0.09283880889415741,
0.05402904748916626,
-0.05522043630480766,
-0.00965053029358387,
-0.06060657277703285,
-0.032993804663419724,
0.03631464019417763,
-0.1137399896979332,
0.049413587898015976,
-0.017853273078799248,
-0.04669945314526558,
0.003061748808249831,
-0.02088378742337227,
0.03196220472455025,
-0.025272779166698456,
0.0650593563914299,
-0.07785648107528687,
-0.01407332718372345,
-0.06329810619354248,
-0.026830781251192093,
0.03610572591423988,
-0.02079271338880062,
0.010985570959746838,
-0.05122799426317215,
-0.12252401560544968,
-0.050703421235084534,
0.032423779368400574,
-0.07241478562355042,
-0.09027744084596634,
-0.048210494220256805,
-0.012956741265952587,
0.0352640226483345,
-0.009651490487158298,
0.15021809935569763,
-0.038610879331827164,
0.06624620407819748,
0.003099258290603757,
0.01613234542310238,
0.07289711385965347,
0.06176457926630974,
-0.05469343066215515,
0.042989350855350494,
-0.030789991840720177,
0.08800564706325531,
-0.08805590122938156,
0.02696976810693741,
-0.13631728291511536,
-0.10038471221923828,
-0.02287488617002964,
-0.025582199916243553,
0.06000438705086708,
0.09082602709531784,
-0.14996525645256042,
-0.05293991416692734,
0.14399483799934387,
-0.05594588443636894,
-0.11186663806438446,
0.12577779591083527,
-0.01856674626469612,
-0.0002835134800989181,
0.053760774433612823,
0.12263171374797821,
0.12381943315267563,
-0.09983213990926743,
-0.060198646038770676,
-0.021413756534457207,
0.10437343269586563,
0.017515549436211586,
0.09801741689443588,
-0.026637708768248558,
0.03928643465042114,
0.0002079200785374269,
0.0031681726686656475,
0.014915398322045803,
-0.06467309594154358,
-0.08671597391366959,
-0.028259318321943283,
-0.10314186662435532,
0.003005286445841193,
0.03633507713675499,
0.029558327049016953,
-0.08094646036624908,
-0.1329425424337387,
0.06244397908449173,
0.14827847480773926,
-0.06939619034528732,
0.009171911515295506,
-0.09407802671194077,
0.0033676871098577976,
-0.0725262314081192,
-0.023409225046634674,
-0.17091085016727448,
-0.04789213463664055,
0.0547819584608078,
-0.09213975816965103,
0.03790225833654404,
-0.008964672684669495,
0.05858568847179413,
0.0377529002726078,
-0.054625384509563446,
-0.0317704863846302,
-0.09643644839525223,
0.0019900831393897533,
-0.0808911994099617,
-0.13987986743450165,
-0.0752130001783371,
-0.03819575160741806,
0.2461755871772766,
-0.22376318275928497,
-0.00722768809646368,
0.02213815040886402,
0.1514272391796112,
0.008700842037796974,
-0.08123566210269928,
0.02496522292494774,
0.044788945466279984,
0.006164278369396925,
-0.0909714549779892,
0.009551923722028732,
0.010516381822526455,
-0.12885260581970215,
-0.06407323479652405,
-0.10234015434980392,
0.04386790096759796,
0.06774941086769104,
0.10134441405534744,
-0.0687253400683403,
-0.05781043320894241,
-0.05129449442028999,
-0.053730376064777374,
-0.0712747797369957,
-0.009520799852907658,
0.20570842921733856,
0.0329667329788208,
0.09635349363088608,
-0.04786072298884392,
-0.07265463471412659,
0.012447129003703594,
0.030617672950029373,
-0.05647813901305199,
0.07760588824748993,
0.04770466312766075,
-0.1433844417333603,
0.08459470421075821,
0.04391179606318474,
-0.03203640505671501,
0.13632626831531525,
-0.05152979865670204,
-0.1111924797296524,
-0.023391136899590492,
0.004368993919342756,
0.0007999043446034193,
0.08481637388467789,
-0.14267967641353607,
-0.0025763381272554398,
0.036709755659103394,
-0.012616364285349846,
0.030143525451421738,
-0.12834013998508453,
-0.006633191369473934,
0.04982815682888031,
-0.0209873765707016,
-0.02862589806318283,
-0.015740331262350082,
-0.006766794715076685,
0.05018697306513786,
0.034727249294519424,
0.01648271270096302,
0.017148200422525406,
-0.012917733751237392,
-0.09548693895339966,
0.14777854084968567,
-0.10088066011667252,
-0.18847402930259705,
-0.14508970081806183,
0.03209469094872475,
-0.04055865854024887,
-0.04093559458851814,
0.019008753821253777,
-0.132246732711792,
-0.07039380818605423,
-0.07480302453041077,
-0.030064955353736877,
-0.0623609684407711,
-0.0017209129873663187,
0.08287373930215836,
0.007461601868271828,
0.0785679966211319,
-0.12108854204416275,
0.032014694064855576,
0.022336585447192192,
-0.03741653263568878,
-0.02679074928164482,
0.02355513721704483,
0.10107888281345367,
0.1223740205168724,
0.008007775992155075,
0.0320347398519516,
-0.01977415755391121,
0.18930160999298096,
-0.11397191882133484,
-0.00946675892919302,
0.11617214232683182,
0.02793353982269764,
0.033102940768003464,
0.0995607003569603,
0.025883328169584274,
-0.08802899718284607,
0.03338136151432991,
0.060070838779211044,
-0.014056451618671417,
-0.2410263866186142,
-0.052430618554353714,
-0.04862510412931442,
-0.11955143511295319,
0.13320963084697723,
0.05438809096813202,
0.009140659123659134,
0.05308487266302109,
-0.02947169914841652,
0.05023274943232536,
-0.00913232285529375,
0.07742539048194885,
0.04397331178188324,
0.06510703265666962,
0.08669152855873108,
-0.034382209181785583,
-0.020947517827153206,
0.045509107410907745,
0.021117858588695526,
0.22727105021476746,
0.01850503496825695,
0.16554777324199677,
0.015244740061461926,
0.1375390738248825,
-0.005858496762812138,
0.02501363307237625,
0.0043347920291125774,
-0.016459103673696518,
0.03239751607179642,
-0.05225517600774765,
-0.04021237790584564,
0.05472955480217934,
0.09483014792203903,
0.026545559987425804,
-0.07241259515285492,
0.007479042746126652,
-0.0032093944028019905,
0.28249219059944153,
0.061337970197200775,
-0.2765374481678009,
-0.08773058652877808,
0.021543294191360474,
-0.05828307941555977,
-0.06828169524669647,
0.016189860180020332,
0.08021453022956848,
-0.12138457596302032,
0.09591008722782135,
-0.055427901446819305,
0.08999809622764587,
-0.0670376718044281,
-0.019024232402443886,
0.04944983497262001,
0.0702451542019844,
0.006564143113791943,
0.10268517583608627,
-0.1705235093832016,
0.19186648726463318,
0.010866672731935978,
0.10770318657159805,
-0.0782964825630188,
0.043359071016311646,
-0.008594349026679993,
-0.0030109721701592207,
0.0906815156340599,
-0.011774880811572075,
-0.020421098917722702,
-0.16185438632965088,
-0.07936307787895203,
0.035097744315862656,
0.10390254110097885,
-0.04851990193128586,
0.08660080283880234,
-0.03450780361890793,
-0.003104850184172392,
0.038306545466184616,
-0.038837920874357224,
-0.15779909491539001,
-0.19426147639751434,
0.029557926580309868,
0.03186599165201187,
0.04802001267671585,
-0.09371326118707657,
-0.11373051255941391,
-0.051428213715553284,
0.19803881645202637,
0.03867388889193535,
-0.03507089614868164,
-0.1335269808769226,
0.1077294796705246,
0.1397102326154709,
-0.05593549832701683,
0.024387432262301445,
0.03621625900268555,
0.19221824407577515,
0.007532666437327862,
-0.027438001707196236,
0.04339213669300079,
-0.05161034315824509,
-0.12946666777133942,
-0.0360548235476017,
0.17339298129081726,
0.049911268055438995,
0.056486014276742935,
0.019358711317181587,
0.017772862687706947,
0.01457950845360756,
-0.07166730612516403,
0.05010678619146347,
0.04283420741558075,
0.04026659205555916,
0.05496985837817192,
-0.021547812968492508,
0.016475912183523178,
-0.052583806216716766,
-0.06092147156596184,
0.14876066148281097,
0.2182416021823883,
-0.053613800555467606,
0.06671591848134995,
0.07410691678524017,
-0.05022305250167847,
-0.10639090836048126,
0.024737229570746422,
0.12078990042209625,
0.04739634692668915,
0.04152686148881912,
-0.1963861733675003,
0.08896858990192413,
0.11697279661893845,
-0.013937060721218586,
-0.018103502690792084,
-0.28249385952949524,
-0.11697559803724289,
0.08669771999120712,
0.08840826153755188,
-0.07988900691270828,
-0.12371411174535751,
-0.05972975492477417,
-0.07509540766477585,
-0.15600912272930145,
0.06460484862327576,
-0.04086080566048622,
0.09741510450839996,
0.0018828213214874268,
0.08463022857904434,
0.03039519675076008,
-0.04201935976743698,
0.14859138429164886,
0.025074724107980728,
0.03450659289956093,
-0.026739664375782013,
0.06386224925518036,
0.0552586168050766,
-0.04575587436556816,
0.0667792484164238,
-0.08030763268470764,
0.03859885036945343,
-0.16395999491214752,
-0.04806245118379593,
-0.04637530446052551,
0.0271417535841465,
-0.04280909150838852,
-0.04856724664568901,
-0.046640247106552124,
0.06197042018175125,
0.07428072392940521,
-0.02582971565425396,
0.08072136342525482,
0.010933849029242992,
0.1050935834646225,
0.0666620209813118,
0.12795083224773407,
-0.014610870741307735,
-0.12240420281887054,
-0.049039967358112335,
-0.02509637549519539,
0.04715351387858391,
-0.07999429106712341,
0.01898529753088951,
0.10721748322248459,
0.05785540118813515,
0.16722048819065094,
0.0028487076051533222,
-0.07255952805280685,
0.024405112490057945,
0.04855045676231384,
-0.005337525624781847,
-0.19152607023715973,
-0.03178354725241661,
0.04596056789159775,
-0.16816836595535278,
-0.028779905289411545,
0.07955358922481537,
-0.05115750432014465,
-0.03197959437966347,
-0.010891824029386044,
0.0276581309735775,
-0.03752698376774788,
0.17468298971652985,
0.015052955597639084,
0.0825587585568428,
-0.07447320967912674,
0.09420406818389893,
0.09752045571804047,
-0.12177842110395432,
0.05965191125869751,
0.047856613993644714,
-0.061755623668432236,
-0.018596015870571136,
0.03977421671152115,
0.09949860721826553,
0.011362452059984207,
-0.03898167982697487,
-0.06017598882317543,
-0.10784602910280228,
0.05516601726412773,
-0.018062913790345192,
0.01237307209521532,
-0.02384672872722149,
-0.04789367690682411,
0.020157866179943085,
-0.15076152980327606,
0.08035369217395782,
0.06224710866808891,
0.05869418755173683,
-0.14414678514003754,
0.051880232989788055,
0.01791015826165676,
0.010581503622233868,
0.005763687659054995,
0.000306821457343176,
-0.05187975987792015,
-0.006951458752155304,
-0.13394096493721008,
-0.03016768768429756,
-0.0587238147854805,
0.015158751048147678,
-0.020815104246139526,
-0.03451851010322571,
-0.04977601766586304,
0.04024776071310043,
-0.061448659747838974,
-0.08812680840492249,
0.007641669362783432,
0.08617004007101059,
-0.1102977842092514,
0.009729823097586632,
0.05070044845342636,
-0.10336646437644958,
0.06405112892389297,
0.04771079868078232,
0.03455617278814316,
0.022308997809886932,
-0.05433857813477516,
-0.011459888890385628,
0.03266710042953491,
0.033003922551870346,
0.05059285834431648,
-0.15369395911693573,
-0.00882874894887209,
0.0005015170900151134,
0.006235970184206963,
0.014010551385581493,
0.03519819676876068,
-0.10262259840965271,
-0.06587424874305725,
-0.0948442816734314,
-0.04009639099240303,
-0.06241166219115257,
0.06258703768253326,
0.115057572722435,
0.03166837990283966,
0.15788568556308746,
-0.069747194647789,
0.04872722923755646,
-0.20429793000221252,
-0.01986984722316265,
-0.021737653762102127,
0.004212364554405212,
-0.04551345482468605,
-0.025967568159103394,
0.06790900230407715,
-0.04536743834614754,
0.097409687936306,
-0.06444147974252701,
0.07246445119380951,
0.04837222397327423,
-0.0452033095061779,
0.0028082793578505516,
-0.0010717598488554358,
0.223109170794487,
0.09120118618011475,
-0.008785954676568508,
0.10612618923187256,
-0.048050522804260254,
0.05071153864264488,
0.10172717273235321,
0.07542764395475388,
0.15900662541389465,
0.0007026295643299818,
0.05393802374601364,
0.0675397515296936,
-0.1217353492975235,
-0.1486717015504837,
0.12371756136417389,
-0.04233958199620247,
0.10523264855146408,
-0.00697786919772625,
0.1762869954109192,
0.12748296558856964,
-0.18510091304779053,
0.046114224940538406,
-0.05390224978327751,
-0.10742765665054321,
-0.048507459461688995,
-0.08720122277736664,
-0.09189919382333755,
-0.12344080954790115,
0.043223969638347626,
-0.10617014020681381,
0.018051758408546448,
0.060811955481767654,
0.02656709775328636,
0.013569587841629982,
0.1557879000902176,
-0.032685574144124985,
0.002469089813530445,
0.08174329996109009,
-0.0012914460385218263,
-0.020978739485144615,
-0.06861459463834763,
-0.04144971817731857,
0.05607491731643677,
-0.0017440173542127013,
0.10569508373737335,
-0.04606013745069504,
-0.03618936240673065,
0.04173356667160988,
0.021001657471060753,
-0.08547695726156235,
0.032443057745695114,
-0.006254284642636776,
0.05098884925246239,
0.07266619056463242,
0.04401744157075882,
0.008918209001421928,
-0.054353661835193634,
0.21340996026992798,
-0.053369514644145966,
-0.0464043915271759,
-0.14468206465244293,
0.11902524530887604,
0.029377102851867676,
-0.01312948390841484,
0.07420539110898972,
-0.10625476390123367,
-0.009577976539731026,
0.10905493795871735,
0.09758298099040985,
-0.020168209448456764,
-0.02171820029616356,
-0.003964737989008427,
-0.020516540855169296,
-0.07549731433391571,
0.08953522890806198,
0.10581820458173752,
0.002256048144772649,
-0.0457647442817688,
0.03055892325937748,
-0.02497754991054535,
-0.07381241023540497,
-0.049291349947452545,
0.09368439763784409,
0.005033773835748434,
0.003956886474043131,
-0.01521573681384325,
0.12598557770252228,
0.010563354007899761,
-0.16759201884269714,
0.011148686520755291,
-0.14301864802837372,
-0.20868778228759766,
-0.016342930495738983,
0.03530609607696533,
-0.0031633125618100166,
0.04734983667731285,
0.016285112127661705,
0.0026066324207931757,
0.12259012460708618,
0.01420561596751213,
-0.04351721331477165,
-0.10344372689723969,
0.10951997339725494,
-0.05852997675538063,
0.1988963633775711,
-0.0026834586169570684,
0.05964335426688194,
0.10898007452487946,
0.04792380332946777,
-0.12885481119155884,
0.03444276005029678,
0.08317559212446213,
-0.04954582452774048,
0.05875198915600777,
0.19506283104419708,
-0.052924372255802155,
0.12284456938505173,
0.04861612990498543,
-0.11389711499214172,
-0.016893548890948296,
-0.09864786267280579,
0.03661476820707321,
-0.09335754811763763,
0.03146469593048096,
-0.04968868941068649,
0.16908209025859833,
0.1780908852815628,
-0.06474525481462479,
-0.041371021419763565,
-0.04736536368727684,
0.03881880268454552,
0.04773365706205368,
0.13322889804840088,
-0.025277795270085335,
-0.22552640736103058,
0.00855438131839037,
-0.03750187158584595,
0.03009253554046154,
-0.2541876435279846,
-0.10077062249183655,
0.051029838621616364,
-0.06800205260515213,
-0.02754460647702217,
0.11484227329492569,
0.07688993215560913,
0.018856219947338104,
-0.05015762895345688,
-0.14375591278076172,
-0.037585679441690445,
0.1288793683052063,
-0.16801677644252777,
-0.03757749870419502
] |
null | null |
transformers
|
## Usage
The model can be used directly (without a language model) as follows:
```python
import soundfile as sf
import torch
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
import argparse
def parse_transcription(wav_file):
# load pretrained model
processor = Wav2Vec2Processor.from_pretrained("addy88/wav2vec2-maithili-stt")
model = Wav2Vec2ForCTC.from_pretrained("addy88/wav2vec2-maithili-stt")
# load audio
audio_input, sample_rate = sf.read(wav_file)
# pad input values and return pt tensor
input_values = processor(audio_input, sampling_rate=sample_rate, return_tensors="pt").input_values
# INFERENCE
# retrieve logits & take argmax
logits = model(input_values).logits
predicted_ids = torch.argmax(logits, dim=-1)
# transcribe
transcription = processor.decode(predicted_ids[0], skip_special_tokens=True)
print(transcription)
```
|
{}
|
automatic-speech-recognition
|
addy88/wav2vec2-maithili-stt
|
[
"transformers",
"pytorch",
"wav2vec2",
"automatic-speech-recognition",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #region-us
|
## Usage
The model can be used directly (without a language model) as follows:
|
[
"## Usage\nThe model can be used directly (without a language model) as follows:"
] |
[
"TAGS\n#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #region-us \n",
"## Usage\nThe model can be used directly (without a language model) as follows:"
] |
[
37,
20
] |
[
"passage: TAGS\n#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #region-us \n## Usage\nThe model can be used directly (without a language model) as follows:"
] |
[
-0.05252622812986374,
-0.019617876037955284,
-0.0058790817856788635,
-0.053425274789333344,
0.10687846690416336,
-0.007705265656113625,
0.09170906990766525,
0.06302708387374878,
0.1208203136920929,
-0.05187021568417549,
0.10668445378541946,
0.2061861753463745,
-0.01615987904369831,
0.01154998317360878,
-0.02408706024289131,
-0.2849704623222351,
0.05921001359820366,
0.006724327802658081,
0.08173583447933197,
0.11632698774337769,
0.10728606581687927,
-0.0745929405093193,
0.043392978608608246,
0.07603111863136292,
-0.09562446177005768,
0.044834449887275696,
0.017546148970723152,
-0.12676472961902618,
0.12212342768907547,
0.05740610510110855,
0.09601490944623947,
0.003504887456074357,
0.03245728462934494,
-0.23084574937820435,
0.004520881455391645,
-0.04129945859313011,
-0.005402150098234415,
-0.015265563502907753,
0.05411279946565628,
-0.10321149230003357,
0.1183098778128624,
0.09698744118213654,
0.008157354779541492,
0.045819319784641266,
-0.06994444131851196,
-0.09883728623390198,
0.09624841809272766,
0.007222834043204784,
0.06262826174497604,
0.10634984821081161,
-0.05064747855067253,
0.13851769268512726,
-0.10710646957159042,
0.08310505002737045,
0.04849497973918915,
-0.28126662969589233,
0.029068727046251297,
0.07243984192609787,
0.07708492875099182,
0.01645354926586151,
-0.02037384919822216,
0.10069116950035095,
-0.02416316792368889,
0.04858396574854851,
-0.041516125202178955,
-0.05117788165807724,
-0.13475511968135834,
0.03324274718761444,
-0.11539667844772339,
-0.040561530739068985,
0.15148606896400452,
-0.050762925297021866,
0.03511665016412735,
-0.11290891468524933,
-0.06844023615121841,
0.044039443135261536,
-0.07302548736333847,
-0.07782286405563354,
-0.02189541794359684,
0.08985205739736557,
0.006181041710078716,
-0.08480656892061234,
-0.10149180144071579,
-0.07703463733196259,
-0.15133780241012573,
0.2693607807159424,
0.019680902361869812,
0.08886801451444626,
-0.2036275714635849,
0.03727252781391144,
-0.06865793466567993,
-0.03657369315624237,
-0.010131645016372204,
-0.08290375769138336,
-0.003481371561065316,
0.041202232241630554,
-0.10632151365280151,
-0.008017139509320259,
0.08221583068370819,
-0.027638090774416924,
0.06801564246416092,
0.05372175574302673,
-0.025688808411359787,
0.08469673246145248,
0.02976100891828537,
0.1266242116689682,
-0.08318299055099487,
-0.01359215285629034,
-0.006264910567551851,
-0.12346780300140381,
-0.02692805789411068,
-0.026080166921019554,
-0.14001379907131195,
-0.0929582342505455,
-0.03027261234819889,
0.08267602324485779,
-0.03435491770505905,
0.04342223331332207,
-0.017561985179781914,
-0.07181911915540695,
-0.022716619074344635,
-0.0683937594294548,
-0.025212876498699188,
0.08283547312021255,
0.028712647035717964,
0.19050057232379913,
0.03218787536025047,
0.02990150637924671,
-0.10521915555000305,
-0.06501829624176025,
-0.001435491838492453,
0.0477556511759758,
-0.0038637355901300907,
-0.0636293813586235,
-0.050521429628133774,
-0.047594863921403885,
0.04504020884633064,
-0.16827283799648285,
-0.08006849139928818,
0.006139486562460661,
0.016433287411928177,
-0.012976926751434803,
0.03135204687714577,
-0.11849917471408844,
0.039808642119169235,
-0.0012580605689436197,
-0.05513211712241173,
-0.023246455937623978,
-0.04426079988479614,
0.04696167632937431,
0.008784018456935883,
0.08877347409725189,
-0.1122790277004242,
0.07852248102426529,
-0.08129951357841492,
-0.04550950974225998,
-0.01164686307311058,
0.10328144580125809,
-0.017674513161182404,
0.05000694468617439,
-0.08133397996425629,
-0.045672718435525894,
-0.1159110814332962,
0.0878932997584343,
-0.03355850651860237,
0.15796400606632233,
-0.15420709550380707,
-0.10080987215042114,
0.21767915785312653,
-0.09306889027357101,
-0.09239067882299423,
0.08364612609148026,
0.01256752572953701,
0.07035309821367264,
0.08287971466779709,
0.22752153873443604,
0.028992680832743645,
-0.11375638842582703,
0.15816456079483032,
0.1461673527956009,
-0.11467607319355011,
-0.045014068484306335,
0.010342531837522984,
-0.060074593871831894,
-0.11382783204317093,
0.03658890351653099,
-0.03728248178958893,
0.08358000218868256,
-0.04293033108115196,
-0.0835445374250412,
-0.01683056727051735,
-0.11543910205364227,
0.07083569467067719,
0.001472658826969564,
0.10082688927650452,
0.03857358917593956,
0.009115398861467838,
0.008496548049151897,
0.06438606232404709,
-0.08900038152933121,
0.04502258822321892,
-0.15757586061954498,
0.06532309949398041,
-0.04050298035144806,
0.015240225940942764,
-0.202682763338089,
0.12728376686573029,
-0.01672365702688694,
0.10747754573822021,
0.054517317563295364,
0.12177800387144089,
0.08942614495754242,
-0.03367462754249573,
0.03980250656604767,
0.0010367206996306777,
0.16547656059265137,
0.035678017884492874,
-0.03053026832640171,
-0.04889436811208725,
0.00681394012644887,
-0.055863723158836365,
-0.013186636380851269,
-0.048216186463832855,
-0.04109345003962517,
-0.011756274849176407,
0.0651380866765976,
-0.03668820485472679,
0.0516076385974884,
-0.005080144386738539,
0.025842025876045227,
-0.0025852490216493607,
0.040376920253038406,
0.09369415789842606,
-0.044856440275907516,
-0.09525914490222931,
0.21410244703292847,
-0.13405893743038177,
0.18770310282707214,
0.20924758911132812,
-0.2815588414669037,
0.044230442494153976,
0.062409620732069016,
0.011556003242731094,
0.03797592222690582,
0.04959757253527641,
0.006467180326581001,
0.293790340423584,
0.0155835235491395,
0.11668486893177032,
-0.05374575033783913,
0.032668016850948334,
0.02992015704512596,
-0.05695001780986786,
-0.01879408396780491,
0.034647136926651,
0.0661093145608902,
-0.08204877376556396,
0.04923979938030243,
0.11010090261697769,
-0.0453057736158371,
0.148123100399971,
0.024933019652962685,
-0.039744652807712555,
0.03881930187344551,
-0.02343410812318325,
-0.06886041164398193,
-0.009936339221894741,
-0.33419716358184814,
-0.09599199891090393,
0.07709821313619614,
-0.004785764962434769,
0.12489327788352966,
-0.08804792165756226,
0.009168051183223724,
0.019291682168841362,
-0.06878803670406342,
-0.0647115707397461,
0.05472118407487869,
-0.002762231044471264,
0.04009811580181122,
-0.03818083554506302,
-0.1263684183359146,
0.044684890657663345,
-0.0302386824041605,
-0.11999684572219849,
0.12441056966781616,
-0.10402670502662659,
-0.24305014312267303,
-0.10676499456167221,
-0.11100881546735764,
-0.012243036180734634,
0.0840906873345375,
0.06344853341579437,
-0.09637530148029327,
-0.031256671994924545,
0.023415599018335342,
0.03565197065472603,
-0.07387632876634598,
-0.0077576665207743645,
-0.00415276363492012,
0.007432916201651096,
-0.03729229420423508,
-0.11296577751636505,
-0.04147093743085861,
-0.06601216644048691,
-0.027051186189055443,
0.034905724227428436,
-0.10912566632032394,
0.023992136120796204,
0.2160838544368744,
0.049506090581417084,
0.10071378946304321,
0.017306605353951454,
0.1917818933725357,
-0.045892760157585144,
-0.1069929301738739,
0.16272033751010895,
-0.0352884940803051,
0.0028067543171346188,
0.12233041226863861,
0.04503423348069191,
-0.0888904333114624,
-0.057114340364933014,
-0.07670506834983826,
-0.08987729251384735,
-0.14921152591705322,
-0.18451803922653198,
-0.07496201992034912,
-0.10192596167325974,
0.008611632511019707,
0.01191422063857317,
0.08689485490322113,
0.04952683299779892,
0.05695516616106033,
-0.08443386107683182,
0.0356837660074234,
0.03766658902168274,
0.2046319842338562,
-0.05954502522945404,
0.1242118775844574,
-0.044658876955509186,
-0.1400134563446045,
0.020373381674289703,
0.03893480822443962,
0.12193314731121063,
0.17272241413593292,
0.03627229854464531,
0.0488450825214386,
0.10247788578271866,
0.14994199573993683,
0.17463889718055725,
0.0029014565516263247,
-0.020511463284492493,
-0.0009147358941845596,
-0.05476776137948036,
-0.07201310247182846,
0.09347954392433167,
0.2079763263463974,
-0.1406993716955185,
-0.009409161284565926,
-0.10983754694461823,
0.06955263763666153,
0.1199074313044548,
0.05642293393611908,
-0.20243626832962036,
0.03341635689139366,
0.04153240844607353,
-0.06629457324743271,
-0.07251711189746857,
0.1465679109096527,
0.012260537594556808,
-0.11557546257972717,
0.05262777954339981,
0.04174318537116051,
0.07618976384401321,
-0.026631271466612816,
0.08335631340742111,
-0.15152700245380402,
-0.13310033082962036,
0.0689893513917923,
0.09733282774686813,
-0.22627536952495575,
0.2599017024040222,
-0.0082806795835495,
0.015054757706820965,
-0.09506656974554062,
-0.03614956513047218,
0.026938162744045258,
0.13990642130374908,
0.16889335215091705,
-0.005832785740494728,
0.018060486763715744,
-0.06294215470552444,
-0.02094322256743908,
0.0794750228524208,
0.1289294809103012,
0.0012923552421852946,
-0.025144003331661224,
-0.01851782575249672,
-0.048820991069078445,
-0.0035712714307010174,
-0.044398605823516846,
-0.07460972666740417,
-0.11215569823980331,
0.010362415574491024,
0.1303505152463913,
0.10614973306655884,
0.033407822251319885,
-0.00502749951556325,
-0.08575832098722458,
0.10764406621456146,
-0.11254726350307465,
-0.035979606211185455,
-0.06864117085933685,
-0.16859184205532074,
0.1345091015100479,
-0.0476752370595932,
0.06849204748868942,
-0.0048405323177576065,
0.032381389290094376,
-0.05020912364125252,
-0.1380978226661682,
0.09602635353803635,
-0.12306658923625946,
0.01976417936384678,
0.0015626787208020687,
0.15944983065128326,
0.025481410324573517,
-0.007206962909549475,
0.10792868584394455,
0.0020110411569476128,
-0.09983616322278976,
-0.0903809517621994,
-0.032549209892749786,
0.15346577763557434,
-0.10951274633407593,
0.015927769243717194,
0.018179042264819145,
-0.14347873628139496,
-0.08231712877750397,
0.03777335211634636,
0.27825725078582764,
-0.014915425330400467,
-0.04817360267043114,
0.17336682975292206,
0.266446590423584,
-0.052146051079034805,
-0.22492991387844086,
-0.1739804446697235,
-0.04863237589597702,
0.0378878153860569,
-0.0715700313448906,
-0.11964539438486099,
0.10777436196804047,
-0.09367436915636063,
-0.04155983403325081,
-0.02952319197356701,
-0.18605610728263855,
-0.10452525317668915,
0.2878834009170532,
0.008731041103601456,
0.31272685527801514,
-0.034225042909383774,
-0.08491139113903046,
-0.0462961308658123,
-0.1168622300028801,
0.11990545690059662,
-0.028133777901530266,
0.07553482055664062,
0.02659502625465393,
0.14482992887496948,
0.07080701738595963,
-0.03058517538011074,
0.09739825129508972,
0.0694439560174942,
-0.04932545870542526,
-0.014205054379999638,
-0.09434179961681366,
0.010993984527885914,
0.041735049337148666,
0.07998612523078918,
0.05293517932295799,
0.031744420528411865,
-0.09471290558576584,
-0.10459836572408676,
-0.11717119812965393,
0.04566803202033043,
0.06686970591545105,
-0.05674618482589722,
0.08453842252492905,
-0.12748411297798157,
0.013795166276395321,
0.046415891498327255,
0.05644693598151207,
-0.14555446803569794,
0.01097900327295065,
0.20175482332706451,
0.19673597812652588,
-0.11582087725400925,
-0.035082004964351654,
-0.03929390013217926,
-0.06582492589950562,
0.12552955746650696,
-0.016348805278539658,
0.04150126501917839,
0.06624189019203186,
0.011761232279241085,
0.044623930007219315,
0.08187094330787659,
-0.0014814226888120174,
0.011807871982455254,
0.0706041157245636,
-0.1026955246925354,
-0.07286912202835083,
-0.03209858015179634,
0.03451545536518097,
0.1219853013753891,
0.05769479647278786,
0.1266922652721405,
-0.0197153240442276,
-0.029428185895085335,
-0.05409902706742287,
-0.02111060544848442,
-0.1510176807641983,
0.06585611402988434,
0.05681091919541359,
0.029282737523317337,
-0.1461006999015808,
-0.0009589263936504722,
-0.06233185529708862,
-0.11463384330272675,
-0.004083676729351282,
-0.024750350043177605,
-0.09568831324577332,
-0.13677221536636353,
-0.11707108467817307,
0.10505697876214981,
-0.13075333833694458,
-0.12530504167079926,
0.0472247488796711,
-0.11614292114973068,
0.01793769747018814,
0.14788326621055603,
0.07750163227319717,
0.07349028438329697,
-0.1580013483762741,
-0.04895436018705368,
0.014872894622385502,
-0.03940632566809654,
0.00006847670010756701,
-0.0814877450466156,
-0.10139909386634827,
0.07051457464694977,
0.030785532668232918,
0.10237010568380356,
-0.1006712019443512,
-0.11157841235399246,
-0.08842196315526962,
0.1053212434053421,
-0.1773672252893448,
-0.011937949806451797,
-0.12276988476514816,
-0.009165732190012932,
0.054897814989089966,
-0.06066597253084183,
-0.034183766692876816,
0.007981495931744576,
-0.13472281396389008,
0.08353378623723984,
0.003475533565506339,
0.013883791863918304,
-0.07277137786149979,
0.027176927775144577,
0.047363173216581345,
-0.021777629852294922,
0.08778230845928192,
0.23831427097320557,
-0.16073819994926453,
0.15058839321136475,
-0.16987963020801544,
-0.11391031742095947,
0.09087315201759338,
0.04639875516295433,
0.04060527682304382,
-0.008711026981472969,
0.024921422824263573,
0.10381405800580978,
0.030109092593193054,
0.007147052325308323,
0.15466874837875366,
-0.06457111984491348,
0.05303109064698219,
-0.013168524950742722,
-0.08773212879896164,
-0.00940337311476469,
-0.04804586246609688,
0.07879312336444855,
0.09026934206485748,
0.0635034441947937,
-0.04155603051185608,
0.0813690721988678,
0.029936833307147026,
0.060264427214860916,
-0.082431860268116,
-0.04972422868013382,
-0.02518003061413765,
-0.12266892194747925,
0.023915033787488937,
-0.03064400888979435,
0.28028959035873413,
-0.03677959367632866,
0.14348864555358887,
-0.022635672241449356,
0.018306247889995575,
-0.03131228685379028,
0.023785337805747986,
0.3033658564090729,
0.09851063042879105,
0.033345457166433334,
-0.0743090957403183,
0.054648082703351974,
0.01324823684990406,
0.03891727328300476,
-0.058958083391189575,
0.12437312304973602,
-0.02557576633989811,
0.1556762158870697,
0.08825253695249557,
0.011792381294071674,
-0.10271283239126205,
-0.1517172008752823,
-0.03477974236011505,
0.022329291328787804,
-0.09139315038919449,
0.08728893101215363,
0.1347426176071167,
-0.011509649455547333,
0.04300074279308319,
0.008906800299882889,
-0.0395376943051815,
-0.17708246409893036,
-0.1229119822382927,
-0.06867998838424683,
-0.15299323201179504,
0.012428054586052895,
-0.06315319240093231,
0.01787642389535904,
0.04234951362013817,
0.066338911652565,
-0.03069286420941353,
0.11699683219194412,
-0.03180370479822159,
-0.08137953281402588,
0.06664858013391495,
-0.034303344786167145,
0.02629096433520317,
-0.012935973703861237,
-0.008460517041385174,
-0.038132403045892715,
-0.017841307446360588,
0.0013736779801547527,
0.011932797729969025,
-0.11747316271066666,
-0.014412318356335163,
-0.07583178579807281,
-0.04851336404681206,
-0.08428135514259338,
-0.0008740581688471138,
-0.01677345670759678,
0.11235248297452927,
0.04048636183142662,
-0.0890006273984909,
-0.011831426061689854,
0.15025228261947632,
-0.11666398495435715,
-0.20691975951194763,
-0.06340430676937103,
0.274996280670166,
0.0627456083893776,
0.1450004279613495,
-0.04091713950037956,
0.00462886318564415,
-0.09783204644918442,
0.3448697328567505,
0.25440648198127747,
-0.06393852829933167,
0.04810115322470665,
0.051103997975587845,
0.04404282942414284,
0.049606598913669586,
0.023405157029628754,
0.08343277871608734,
0.32936808466911316,
-0.04472985491156578,
-0.03660992160439491,
-0.05594843626022339,
-0.06329580396413803,
-0.038297075778245926,
0.04348982498049736,
-0.020259791985154152,
-0.11740419268608093,
-0.022186610847711563,
0.11966544389724731,
-0.24192485213279724,
0.133467897772789,
-0.04571826010942459,
-0.09056868404150009,
-0.02946968376636505,
-0.015986280515789986,
0.05871385708451271,
0.07608629763126373,
0.07382916659116745,
-0.05554657801985741,
-0.09896405041217804,
0.10340635478496552,
0.05036648362874985,
-0.2329118847846985,
0.018541546538472176,
0.04919476434588432,
-0.03017665445804596,
-0.021637072786688805,
0.010790947824716568,
0.09790199249982834,
0.012425770983099937,
0.13141566514968872,
0.03426911309361458,
0.15303249657154083,
-0.010131238028407097,
-0.08131913840770721,
0.03293213993310928,
0.05652814731001854,
-0.02393581159412861,
-0.035055771470069885,
-0.01140469778329134,
-0.2610277831554413,
0.08833248913288116,
0.00015771633479744196,
-0.04229233041405678,
-0.03033015877008438,
0.006403541192412376,
-0.057553213089704514,
0.047388553619384766,
0.010490331798791885,
-0.0019301060819998384,
-0.0627487376332283,
-0.011176691390573978,
0.08832677453756332,
0.005391156300902367,
-0.11479656398296356,
-0.11939159780740738,
-0.1622319370508194,
-0.09806107729673386,
-0.03324146568775177,
-0.00944250263273716,
-0.0971030741930008,
0.00019520068599376827,
0.010740520432591438,
0.029052171856164932,
-0.0255824513733387,
0.038386229425668716,
0.0961468517780304,
0.013008412905037403,
0.015402606688439846,
0.0020534591749310493,
0.09052255004644394,
0.10167006403207779,
-0.15448948740959167,
-0.1295212209224701
] |
null | null |
transformers
|
## Usage
The model can be used directly (without a language model) as follows:
```python
import soundfile as sf
import torch
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
import argparse
def parse_transcription(wav_file):
# load pretrained model
processor = Wav2Vec2Processor.from_pretrained("addy88/wav2vec2-malayalam-stt")
model = Wav2Vec2ForCTC.from_pretrained("addy88/wav2vec2-malayalam-stt")
# load audio
audio_input, sample_rate = sf.read(wav_file)
# pad input values and return pt tensor
input_values = processor(audio_input, sampling_rate=sample_rate, return_tensors="pt").input_values
# INFERENCE
# retrieve logits & take argmax
logits = model(input_values).logits
predicted_ids = torch.argmax(logits, dim=-1)
# transcribe
transcription = processor.decode(predicted_ids[0], skip_special_tokens=True)
print(transcription)
```
|
{}
|
automatic-speech-recognition
|
addy88/wav2vec2-malayalam-stt
|
[
"transformers",
"pytorch",
"wav2vec2",
"automatic-speech-recognition",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #region-us
|
## Usage
The model can be used directly (without a language model) as follows:
|
[
"## Usage\nThe model can be used directly (without a language model) as follows:"
] |
[
"TAGS\n#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #region-us \n",
"## Usage\nThe model can be used directly (without a language model) as follows:"
] |
[
37,
20
] |
[
"passage: TAGS\n#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #region-us \n## Usage\nThe model can be used directly (without a language model) as follows:"
] |
[
-0.05252622812986374,
-0.019617876037955284,
-0.0058790817856788635,
-0.053425274789333344,
0.10687846690416336,
-0.007705265656113625,
0.09170906990766525,
0.06302708387374878,
0.1208203136920929,
-0.05187021568417549,
0.10668445378541946,
0.2061861753463745,
-0.01615987904369831,
0.01154998317360878,
-0.02408706024289131,
-0.2849704623222351,
0.05921001359820366,
0.006724327802658081,
0.08173583447933197,
0.11632698774337769,
0.10728606581687927,
-0.0745929405093193,
0.043392978608608246,
0.07603111863136292,
-0.09562446177005768,
0.044834449887275696,
0.017546148970723152,
-0.12676472961902618,
0.12212342768907547,
0.05740610510110855,
0.09601490944623947,
0.003504887456074357,
0.03245728462934494,
-0.23084574937820435,
0.004520881455391645,
-0.04129945859313011,
-0.005402150098234415,
-0.015265563502907753,
0.05411279946565628,
-0.10321149230003357,
0.1183098778128624,
0.09698744118213654,
0.008157354779541492,
0.045819319784641266,
-0.06994444131851196,
-0.09883728623390198,
0.09624841809272766,
0.007222834043204784,
0.06262826174497604,
0.10634984821081161,
-0.05064747855067253,
0.13851769268512726,
-0.10710646957159042,
0.08310505002737045,
0.04849497973918915,
-0.28126662969589233,
0.029068727046251297,
0.07243984192609787,
0.07708492875099182,
0.01645354926586151,
-0.02037384919822216,
0.10069116950035095,
-0.02416316792368889,
0.04858396574854851,
-0.041516125202178955,
-0.05117788165807724,
-0.13475511968135834,
0.03324274718761444,
-0.11539667844772339,
-0.040561530739068985,
0.15148606896400452,
-0.050762925297021866,
0.03511665016412735,
-0.11290891468524933,
-0.06844023615121841,
0.044039443135261536,
-0.07302548736333847,
-0.07782286405563354,
-0.02189541794359684,
0.08985205739736557,
0.006181041710078716,
-0.08480656892061234,
-0.10149180144071579,
-0.07703463733196259,
-0.15133780241012573,
0.2693607807159424,
0.019680902361869812,
0.08886801451444626,
-0.2036275714635849,
0.03727252781391144,
-0.06865793466567993,
-0.03657369315624237,
-0.010131645016372204,
-0.08290375769138336,
-0.003481371561065316,
0.041202232241630554,
-0.10632151365280151,
-0.008017139509320259,
0.08221583068370819,
-0.027638090774416924,
0.06801564246416092,
0.05372175574302673,
-0.025688808411359787,
0.08469673246145248,
0.02976100891828537,
0.1266242116689682,
-0.08318299055099487,
-0.01359215285629034,
-0.006264910567551851,
-0.12346780300140381,
-0.02692805789411068,
-0.026080166921019554,
-0.14001379907131195,
-0.0929582342505455,
-0.03027261234819889,
0.08267602324485779,
-0.03435491770505905,
0.04342223331332207,
-0.017561985179781914,
-0.07181911915540695,
-0.022716619074344635,
-0.0683937594294548,
-0.025212876498699188,
0.08283547312021255,
0.028712647035717964,
0.19050057232379913,
0.03218787536025047,
0.02990150637924671,
-0.10521915555000305,
-0.06501829624176025,
-0.001435491838492453,
0.0477556511759758,
-0.0038637355901300907,
-0.0636293813586235,
-0.050521429628133774,
-0.047594863921403885,
0.04504020884633064,
-0.16827283799648285,
-0.08006849139928818,
0.006139486562460661,
0.016433287411928177,
-0.012976926751434803,
0.03135204687714577,
-0.11849917471408844,
0.039808642119169235,
-0.0012580605689436197,
-0.05513211712241173,
-0.023246455937623978,
-0.04426079988479614,
0.04696167632937431,
0.008784018456935883,
0.08877347409725189,
-0.1122790277004242,
0.07852248102426529,
-0.08129951357841492,
-0.04550950974225998,
-0.01164686307311058,
0.10328144580125809,
-0.017674513161182404,
0.05000694468617439,
-0.08133397996425629,
-0.045672718435525894,
-0.1159110814332962,
0.0878932997584343,
-0.03355850651860237,
0.15796400606632233,
-0.15420709550380707,
-0.10080987215042114,
0.21767915785312653,
-0.09306889027357101,
-0.09239067882299423,
0.08364612609148026,
0.01256752572953701,
0.07035309821367264,
0.08287971466779709,
0.22752153873443604,
0.028992680832743645,
-0.11375638842582703,
0.15816456079483032,
0.1461673527956009,
-0.11467607319355011,
-0.045014068484306335,
0.010342531837522984,
-0.060074593871831894,
-0.11382783204317093,
0.03658890351653099,
-0.03728248178958893,
0.08358000218868256,
-0.04293033108115196,
-0.0835445374250412,
-0.01683056727051735,
-0.11543910205364227,
0.07083569467067719,
0.001472658826969564,
0.10082688927650452,
0.03857358917593956,
0.009115398861467838,
0.008496548049151897,
0.06438606232404709,
-0.08900038152933121,
0.04502258822321892,
-0.15757586061954498,
0.06532309949398041,
-0.04050298035144806,
0.015240225940942764,
-0.202682763338089,
0.12728376686573029,
-0.01672365702688694,
0.10747754573822021,
0.054517317563295364,
0.12177800387144089,
0.08942614495754242,
-0.03367462754249573,
0.03980250656604767,
0.0010367206996306777,
0.16547656059265137,
0.035678017884492874,
-0.03053026832640171,
-0.04889436811208725,
0.00681394012644887,
-0.055863723158836365,
-0.013186636380851269,
-0.048216186463832855,
-0.04109345003962517,
-0.011756274849176407,
0.0651380866765976,
-0.03668820485472679,
0.0516076385974884,
-0.005080144386738539,
0.025842025876045227,
-0.0025852490216493607,
0.040376920253038406,
0.09369415789842606,
-0.044856440275907516,
-0.09525914490222931,
0.21410244703292847,
-0.13405893743038177,
0.18770310282707214,
0.20924758911132812,
-0.2815588414669037,
0.044230442494153976,
0.062409620732069016,
0.011556003242731094,
0.03797592222690582,
0.04959757253527641,
0.006467180326581001,
0.293790340423584,
0.0155835235491395,
0.11668486893177032,
-0.05374575033783913,
0.032668016850948334,
0.02992015704512596,
-0.05695001780986786,
-0.01879408396780491,
0.034647136926651,
0.0661093145608902,
-0.08204877376556396,
0.04923979938030243,
0.11010090261697769,
-0.0453057736158371,
0.148123100399971,
0.024933019652962685,
-0.039744652807712555,
0.03881930187344551,
-0.02343410812318325,
-0.06886041164398193,
-0.009936339221894741,
-0.33419716358184814,
-0.09599199891090393,
0.07709821313619614,
-0.004785764962434769,
0.12489327788352966,
-0.08804792165756226,
0.009168051183223724,
0.019291682168841362,
-0.06878803670406342,
-0.0647115707397461,
0.05472118407487869,
-0.002762231044471264,
0.04009811580181122,
-0.03818083554506302,
-0.1263684183359146,
0.044684890657663345,
-0.0302386824041605,
-0.11999684572219849,
0.12441056966781616,
-0.10402670502662659,
-0.24305014312267303,
-0.10676499456167221,
-0.11100881546735764,
-0.012243036180734634,
0.0840906873345375,
0.06344853341579437,
-0.09637530148029327,
-0.031256671994924545,
0.023415599018335342,
0.03565197065472603,
-0.07387632876634598,
-0.0077576665207743645,
-0.00415276363492012,
0.007432916201651096,
-0.03729229420423508,
-0.11296577751636505,
-0.04147093743085861,
-0.06601216644048691,
-0.027051186189055443,
0.034905724227428436,
-0.10912566632032394,
0.023992136120796204,
0.2160838544368744,
0.049506090581417084,
0.10071378946304321,
0.017306605353951454,
0.1917818933725357,
-0.045892760157585144,
-0.1069929301738739,
0.16272033751010895,
-0.0352884940803051,
0.0028067543171346188,
0.12233041226863861,
0.04503423348069191,
-0.0888904333114624,
-0.057114340364933014,
-0.07670506834983826,
-0.08987729251384735,
-0.14921152591705322,
-0.18451803922653198,
-0.07496201992034912,
-0.10192596167325974,
0.008611632511019707,
0.01191422063857317,
0.08689485490322113,
0.04952683299779892,
0.05695516616106033,
-0.08443386107683182,
0.0356837660074234,
0.03766658902168274,
0.2046319842338562,
-0.05954502522945404,
0.1242118775844574,
-0.044658876955509186,
-0.1400134563446045,
0.020373381674289703,
0.03893480822443962,
0.12193314731121063,
0.17272241413593292,
0.03627229854464531,
0.0488450825214386,
0.10247788578271866,
0.14994199573993683,
0.17463889718055725,
0.0029014565516263247,
-0.020511463284492493,
-0.0009147358941845596,
-0.05476776137948036,
-0.07201310247182846,
0.09347954392433167,
0.2079763263463974,
-0.1406993716955185,
-0.009409161284565926,
-0.10983754694461823,
0.06955263763666153,
0.1199074313044548,
0.05642293393611908,
-0.20243626832962036,
0.03341635689139366,
0.04153240844607353,
-0.06629457324743271,
-0.07251711189746857,
0.1465679109096527,
0.012260537594556808,
-0.11557546257972717,
0.05262777954339981,
0.04174318537116051,
0.07618976384401321,
-0.026631271466612816,
0.08335631340742111,
-0.15152700245380402,
-0.13310033082962036,
0.0689893513917923,
0.09733282774686813,
-0.22627536952495575,
0.2599017024040222,
-0.0082806795835495,
0.015054757706820965,
-0.09506656974554062,
-0.03614956513047218,
0.026938162744045258,
0.13990642130374908,
0.16889335215091705,
-0.005832785740494728,
0.018060486763715744,
-0.06294215470552444,
-0.02094322256743908,
0.0794750228524208,
0.1289294809103012,
0.0012923552421852946,
-0.025144003331661224,
-0.01851782575249672,
-0.048820991069078445,
-0.0035712714307010174,
-0.044398605823516846,
-0.07460972666740417,
-0.11215569823980331,
0.010362415574491024,
0.1303505152463913,
0.10614973306655884,
0.033407822251319885,
-0.00502749951556325,
-0.08575832098722458,
0.10764406621456146,
-0.11254726350307465,
-0.035979606211185455,
-0.06864117085933685,
-0.16859184205532074,
0.1345091015100479,
-0.0476752370595932,
0.06849204748868942,
-0.0048405323177576065,
0.032381389290094376,
-0.05020912364125252,
-0.1380978226661682,
0.09602635353803635,
-0.12306658923625946,
0.01976417936384678,
0.0015626787208020687,
0.15944983065128326,
0.025481410324573517,
-0.007206962909549475,
0.10792868584394455,
0.0020110411569476128,
-0.09983616322278976,
-0.0903809517621994,
-0.032549209892749786,
0.15346577763557434,
-0.10951274633407593,
0.015927769243717194,
0.018179042264819145,
-0.14347873628139496,
-0.08231712877750397,
0.03777335211634636,
0.27825725078582764,
-0.014915425330400467,
-0.04817360267043114,
0.17336682975292206,
0.266446590423584,
-0.052146051079034805,
-0.22492991387844086,
-0.1739804446697235,
-0.04863237589597702,
0.0378878153860569,
-0.0715700313448906,
-0.11964539438486099,
0.10777436196804047,
-0.09367436915636063,
-0.04155983403325081,
-0.02952319197356701,
-0.18605610728263855,
-0.10452525317668915,
0.2878834009170532,
0.008731041103601456,
0.31272685527801514,
-0.034225042909383774,
-0.08491139113903046,
-0.0462961308658123,
-0.1168622300028801,
0.11990545690059662,
-0.028133777901530266,
0.07553482055664062,
0.02659502625465393,
0.14482992887496948,
0.07080701738595963,
-0.03058517538011074,
0.09739825129508972,
0.0694439560174942,
-0.04932545870542526,
-0.014205054379999638,
-0.09434179961681366,
0.010993984527885914,
0.041735049337148666,
0.07998612523078918,
0.05293517932295799,
0.031744420528411865,
-0.09471290558576584,
-0.10459836572408676,
-0.11717119812965393,
0.04566803202033043,
0.06686970591545105,
-0.05674618482589722,
0.08453842252492905,
-0.12748411297798157,
0.013795166276395321,
0.046415891498327255,
0.05644693598151207,
-0.14555446803569794,
0.01097900327295065,
0.20175482332706451,
0.19673597812652588,
-0.11582087725400925,
-0.035082004964351654,
-0.03929390013217926,
-0.06582492589950562,
0.12552955746650696,
-0.016348805278539658,
0.04150126501917839,
0.06624189019203186,
0.011761232279241085,
0.044623930007219315,
0.08187094330787659,
-0.0014814226888120174,
0.011807871982455254,
0.0706041157245636,
-0.1026955246925354,
-0.07286912202835083,
-0.03209858015179634,
0.03451545536518097,
0.1219853013753891,
0.05769479647278786,
0.1266922652721405,
-0.0197153240442276,
-0.029428185895085335,
-0.05409902706742287,
-0.02111060544848442,
-0.1510176807641983,
0.06585611402988434,
0.05681091919541359,
0.029282737523317337,
-0.1461006999015808,
-0.0009589263936504722,
-0.06233185529708862,
-0.11463384330272675,
-0.004083676729351282,
-0.024750350043177605,
-0.09568831324577332,
-0.13677221536636353,
-0.11707108467817307,
0.10505697876214981,
-0.13075333833694458,
-0.12530504167079926,
0.0472247488796711,
-0.11614292114973068,
0.01793769747018814,
0.14788326621055603,
0.07750163227319717,
0.07349028438329697,
-0.1580013483762741,
-0.04895436018705368,
0.014872894622385502,
-0.03940632566809654,
0.00006847670010756701,
-0.0814877450466156,
-0.10139909386634827,
0.07051457464694977,
0.030785532668232918,
0.10237010568380356,
-0.1006712019443512,
-0.11157841235399246,
-0.08842196315526962,
0.1053212434053421,
-0.1773672252893448,
-0.011937949806451797,
-0.12276988476514816,
-0.009165732190012932,
0.054897814989089966,
-0.06066597253084183,
-0.034183766692876816,
0.007981495931744576,
-0.13472281396389008,
0.08353378623723984,
0.003475533565506339,
0.013883791863918304,
-0.07277137786149979,
0.027176927775144577,
0.047363173216581345,
-0.021777629852294922,
0.08778230845928192,
0.23831427097320557,
-0.16073819994926453,
0.15058839321136475,
-0.16987963020801544,
-0.11391031742095947,
0.09087315201759338,
0.04639875516295433,
0.04060527682304382,
-0.008711026981472969,
0.024921422824263573,
0.10381405800580978,
0.030109092593193054,
0.007147052325308323,
0.15466874837875366,
-0.06457111984491348,
0.05303109064698219,
-0.013168524950742722,
-0.08773212879896164,
-0.00940337311476469,
-0.04804586246609688,
0.07879312336444855,
0.09026934206485748,
0.0635034441947937,
-0.04155603051185608,
0.0813690721988678,
0.029936833307147026,
0.060264427214860916,
-0.082431860268116,
-0.04972422868013382,
-0.02518003061413765,
-0.12266892194747925,
0.023915033787488937,
-0.03064400888979435,
0.28028959035873413,
-0.03677959367632866,
0.14348864555358887,
-0.022635672241449356,
0.018306247889995575,
-0.03131228685379028,
0.023785337805747986,
0.3033658564090729,
0.09851063042879105,
0.033345457166433334,
-0.0743090957403183,
0.054648082703351974,
0.01324823684990406,
0.03891727328300476,
-0.058958083391189575,
0.12437312304973602,
-0.02557576633989811,
0.1556762158870697,
0.08825253695249557,
0.011792381294071674,
-0.10271283239126205,
-0.1517172008752823,
-0.03477974236011505,
0.022329291328787804,
-0.09139315038919449,
0.08728893101215363,
0.1347426176071167,
-0.011509649455547333,
0.04300074279308319,
0.008906800299882889,
-0.0395376943051815,
-0.17708246409893036,
-0.1229119822382927,
-0.06867998838424683,
-0.15299323201179504,
0.012428054586052895,
-0.06315319240093231,
0.01787642389535904,
0.04234951362013817,
0.066338911652565,
-0.03069286420941353,
0.11699683219194412,
-0.03180370479822159,
-0.08137953281402588,
0.06664858013391495,
-0.034303344786167145,
0.02629096433520317,
-0.012935973703861237,
-0.008460517041385174,
-0.038132403045892715,
-0.017841307446360588,
0.0013736779801547527,
0.011932797729969025,
-0.11747316271066666,
-0.014412318356335163,
-0.07583178579807281,
-0.04851336404681206,
-0.08428135514259338,
-0.0008740581688471138,
-0.01677345670759678,
0.11235248297452927,
0.04048636183142662,
-0.0890006273984909,
-0.011831426061689854,
0.15025228261947632,
-0.11666398495435715,
-0.20691975951194763,
-0.06340430676937103,
0.274996280670166,
0.0627456083893776,
0.1450004279613495,
-0.04091713950037956,
0.00462886318564415,
-0.09783204644918442,
0.3448697328567505,
0.25440648198127747,
-0.06393852829933167,
0.04810115322470665,
0.051103997975587845,
0.04404282942414284,
0.049606598913669586,
0.023405157029628754,
0.08343277871608734,
0.32936808466911316,
-0.04472985491156578,
-0.03660992160439491,
-0.05594843626022339,
-0.06329580396413803,
-0.038297075778245926,
0.04348982498049736,
-0.020259791985154152,
-0.11740419268608093,
-0.022186610847711563,
0.11966544389724731,
-0.24192485213279724,
0.133467897772789,
-0.04571826010942459,
-0.09056868404150009,
-0.02946968376636505,
-0.015986280515789986,
0.05871385708451271,
0.07608629763126373,
0.07382916659116745,
-0.05554657801985741,
-0.09896405041217804,
0.10340635478496552,
0.05036648362874985,
-0.2329118847846985,
0.018541546538472176,
0.04919476434588432,
-0.03017665445804596,
-0.021637072786688805,
0.010790947824716568,
0.09790199249982834,
0.012425770983099937,
0.13141566514968872,
0.03426911309361458,
0.15303249657154083,
-0.010131238028407097,
-0.08131913840770721,
0.03293213993310928,
0.05652814731001854,
-0.02393581159412861,
-0.035055771470069885,
-0.01140469778329134,
-0.2610277831554413,
0.08833248913288116,
0.00015771633479744196,
-0.04229233041405678,
-0.03033015877008438,
0.006403541192412376,
-0.057553213089704514,
0.047388553619384766,
0.010490331798791885,
-0.0019301060819998384,
-0.0627487376332283,
-0.011176691390573978,
0.08832677453756332,
0.005391156300902367,
-0.11479656398296356,
-0.11939159780740738,
-0.1622319370508194,
-0.09806107729673386,
-0.03324146568775177,
-0.00944250263273716,
-0.0971030741930008,
0.00019520068599376827,
0.010740520432591438,
0.029052171856164932,
-0.0255824513733387,
0.038386229425668716,
0.0961468517780304,
0.013008412905037403,
0.015402606688439846,
0.0020534591749310493,
0.09052255004644394,
0.10167006403207779,
-0.15448948740959167,
-0.1295212209224701
] |
null | null |
transformers
|
## Usage
The model can be used directly (without a language model) as follows:
```python
import soundfile as sf
import torch
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
import argparse
def parse_transcription(wav_file):
# load pretrained model
processor = Wav2Vec2Processor.from_pretrained("addy88/wav2vec2-marathi-stt")
model = Wav2Vec2ForCTC.from_pretrained("addy88/wav2vec2-marathi-stt")
# load audio
audio_input, sample_rate = sf.read(wav_file)
# pad input values and return pt tensor
input_values = processor(audio_input, sampling_rate=sample_rate, return_tensors="pt").input_values
# INFERENCE
# retrieve logits & take argmax
logits = model(input_values).logits
predicted_ids = torch.argmax(logits, dim=-1)
# transcribe
transcription = processor.decode(predicted_ids[0], skip_special_tokens=True)
print(transcription)
```
|
{}
|
automatic-speech-recognition
|
addy88/wav2vec2-marathi-stt
|
[
"transformers",
"pytorch",
"wav2vec2",
"automatic-speech-recognition",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #region-us
|
## Usage
The model can be used directly (without a language model) as follows:
|
[
"## Usage\nThe model can be used directly (without a language model) as follows:"
] |
[
"TAGS\n#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #region-us \n",
"## Usage\nThe model can be used directly (without a language model) as follows:"
] |
[
37,
20
] |
[
"passage: TAGS\n#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #region-us \n## Usage\nThe model can be used directly (without a language model) as follows:"
] |
[
-0.05252622812986374,
-0.019617876037955284,
-0.0058790817856788635,
-0.053425274789333344,
0.10687846690416336,
-0.007705265656113625,
0.09170906990766525,
0.06302708387374878,
0.1208203136920929,
-0.05187021568417549,
0.10668445378541946,
0.2061861753463745,
-0.01615987904369831,
0.01154998317360878,
-0.02408706024289131,
-0.2849704623222351,
0.05921001359820366,
0.006724327802658081,
0.08173583447933197,
0.11632698774337769,
0.10728606581687927,
-0.0745929405093193,
0.043392978608608246,
0.07603111863136292,
-0.09562446177005768,
0.044834449887275696,
0.017546148970723152,
-0.12676472961902618,
0.12212342768907547,
0.05740610510110855,
0.09601490944623947,
0.003504887456074357,
0.03245728462934494,
-0.23084574937820435,
0.004520881455391645,
-0.04129945859313011,
-0.005402150098234415,
-0.015265563502907753,
0.05411279946565628,
-0.10321149230003357,
0.1183098778128624,
0.09698744118213654,
0.008157354779541492,
0.045819319784641266,
-0.06994444131851196,
-0.09883728623390198,
0.09624841809272766,
0.007222834043204784,
0.06262826174497604,
0.10634984821081161,
-0.05064747855067253,
0.13851769268512726,
-0.10710646957159042,
0.08310505002737045,
0.04849497973918915,
-0.28126662969589233,
0.029068727046251297,
0.07243984192609787,
0.07708492875099182,
0.01645354926586151,
-0.02037384919822216,
0.10069116950035095,
-0.02416316792368889,
0.04858396574854851,
-0.041516125202178955,
-0.05117788165807724,
-0.13475511968135834,
0.03324274718761444,
-0.11539667844772339,
-0.040561530739068985,
0.15148606896400452,
-0.050762925297021866,
0.03511665016412735,
-0.11290891468524933,
-0.06844023615121841,
0.044039443135261536,
-0.07302548736333847,
-0.07782286405563354,
-0.02189541794359684,
0.08985205739736557,
0.006181041710078716,
-0.08480656892061234,
-0.10149180144071579,
-0.07703463733196259,
-0.15133780241012573,
0.2693607807159424,
0.019680902361869812,
0.08886801451444626,
-0.2036275714635849,
0.03727252781391144,
-0.06865793466567993,
-0.03657369315624237,
-0.010131645016372204,
-0.08290375769138336,
-0.003481371561065316,
0.041202232241630554,
-0.10632151365280151,
-0.008017139509320259,
0.08221583068370819,
-0.027638090774416924,
0.06801564246416092,
0.05372175574302673,
-0.025688808411359787,
0.08469673246145248,
0.02976100891828537,
0.1266242116689682,
-0.08318299055099487,
-0.01359215285629034,
-0.006264910567551851,
-0.12346780300140381,
-0.02692805789411068,
-0.026080166921019554,
-0.14001379907131195,
-0.0929582342505455,
-0.03027261234819889,
0.08267602324485779,
-0.03435491770505905,
0.04342223331332207,
-0.017561985179781914,
-0.07181911915540695,
-0.022716619074344635,
-0.0683937594294548,
-0.025212876498699188,
0.08283547312021255,
0.028712647035717964,
0.19050057232379913,
0.03218787536025047,
0.02990150637924671,
-0.10521915555000305,
-0.06501829624176025,
-0.001435491838492453,
0.0477556511759758,
-0.0038637355901300907,
-0.0636293813586235,
-0.050521429628133774,
-0.047594863921403885,
0.04504020884633064,
-0.16827283799648285,
-0.08006849139928818,
0.006139486562460661,
0.016433287411928177,
-0.012976926751434803,
0.03135204687714577,
-0.11849917471408844,
0.039808642119169235,
-0.0012580605689436197,
-0.05513211712241173,
-0.023246455937623978,
-0.04426079988479614,
0.04696167632937431,
0.008784018456935883,
0.08877347409725189,
-0.1122790277004242,
0.07852248102426529,
-0.08129951357841492,
-0.04550950974225998,
-0.01164686307311058,
0.10328144580125809,
-0.017674513161182404,
0.05000694468617439,
-0.08133397996425629,
-0.045672718435525894,
-0.1159110814332962,
0.0878932997584343,
-0.03355850651860237,
0.15796400606632233,
-0.15420709550380707,
-0.10080987215042114,
0.21767915785312653,
-0.09306889027357101,
-0.09239067882299423,
0.08364612609148026,
0.01256752572953701,
0.07035309821367264,
0.08287971466779709,
0.22752153873443604,
0.028992680832743645,
-0.11375638842582703,
0.15816456079483032,
0.1461673527956009,
-0.11467607319355011,
-0.045014068484306335,
0.010342531837522984,
-0.060074593871831894,
-0.11382783204317093,
0.03658890351653099,
-0.03728248178958893,
0.08358000218868256,
-0.04293033108115196,
-0.0835445374250412,
-0.01683056727051735,
-0.11543910205364227,
0.07083569467067719,
0.001472658826969564,
0.10082688927650452,
0.03857358917593956,
0.009115398861467838,
0.008496548049151897,
0.06438606232404709,
-0.08900038152933121,
0.04502258822321892,
-0.15757586061954498,
0.06532309949398041,
-0.04050298035144806,
0.015240225940942764,
-0.202682763338089,
0.12728376686573029,
-0.01672365702688694,
0.10747754573822021,
0.054517317563295364,
0.12177800387144089,
0.08942614495754242,
-0.03367462754249573,
0.03980250656604767,
0.0010367206996306777,
0.16547656059265137,
0.035678017884492874,
-0.03053026832640171,
-0.04889436811208725,
0.00681394012644887,
-0.055863723158836365,
-0.013186636380851269,
-0.048216186463832855,
-0.04109345003962517,
-0.011756274849176407,
0.0651380866765976,
-0.03668820485472679,
0.0516076385974884,
-0.005080144386738539,
0.025842025876045227,
-0.0025852490216493607,
0.040376920253038406,
0.09369415789842606,
-0.044856440275907516,
-0.09525914490222931,
0.21410244703292847,
-0.13405893743038177,
0.18770310282707214,
0.20924758911132812,
-0.2815588414669037,
0.044230442494153976,
0.062409620732069016,
0.011556003242731094,
0.03797592222690582,
0.04959757253527641,
0.006467180326581001,
0.293790340423584,
0.0155835235491395,
0.11668486893177032,
-0.05374575033783913,
0.032668016850948334,
0.02992015704512596,
-0.05695001780986786,
-0.01879408396780491,
0.034647136926651,
0.0661093145608902,
-0.08204877376556396,
0.04923979938030243,
0.11010090261697769,
-0.0453057736158371,
0.148123100399971,
0.024933019652962685,
-0.039744652807712555,
0.03881930187344551,
-0.02343410812318325,
-0.06886041164398193,
-0.009936339221894741,
-0.33419716358184814,
-0.09599199891090393,
0.07709821313619614,
-0.004785764962434769,
0.12489327788352966,
-0.08804792165756226,
0.009168051183223724,
0.019291682168841362,
-0.06878803670406342,
-0.0647115707397461,
0.05472118407487869,
-0.002762231044471264,
0.04009811580181122,
-0.03818083554506302,
-0.1263684183359146,
0.044684890657663345,
-0.0302386824041605,
-0.11999684572219849,
0.12441056966781616,
-0.10402670502662659,
-0.24305014312267303,
-0.10676499456167221,
-0.11100881546735764,
-0.012243036180734634,
0.0840906873345375,
0.06344853341579437,
-0.09637530148029327,
-0.031256671994924545,
0.023415599018335342,
0.03565197065472603,
-0.07387632876634598,
-0.0077576665207743645,
-0.00415276363492012,
0.007432916201651096,
-0.03729229420423508,
-0.11296577751636505,
-0.04147093743085861,
-0.06601216644048691,
-0.027051186189055443,
0.034905724227428436,
-0.10912566632032394,
0.023992136120796204,
0.2160838544368744,
0.049506090581417084,
0.10071378946304321,
0.017306605353951454,
0.1917818933725357,
-0.045892760157585144,
-0.1069929301738739,
0.16272033751010895,
-0.0352884940803051,
0.0028067543171346188,
0.12233041226863861,
0.04503423348069191,
-0.0888904333114624,
-0.057114340364933014,
-0.07670506834983826,
-0.08987729251384735,
-0.14921152591705322,
-0.18451803922653198,
-0.07496201992034912,
-0.10192596167325974,
0.008611632511019707,
0.01191422063857317,
0.08689485490322113,
0.04952683299779892,
0.05695516616106033,
-0.08443386107683182,
0.0356837660074234,
0.03766658902168274,
0.2046319842338562,
-0.05954502522945404,
0.1242118775844574,
-0.044658876955509186,
-0.1400134563446045,
0.020373381674289703,
0.03893480822443962,
0.12193314731121063,
0.17272241413593292,
0.03627229854464531,
0.0488450825214386,
0.10247788578271866,
0.14994199573993683,
0.17463889718055725,
0.0029014565516263247,
-0.020511463284492493,
-0.0009147358941845596,
-0.05476776137948036,
-0.07201310247182846,
0.09347954392433167,
0.2079763263463974,
-0.1406993716955185,
-0.009409161284565926,
-0.10983754694461823,
0.06955263763666153,
0.1199074313044548,
0.05642293393611908,
-0.20243626832962036,
0.03341635689139366,
0.04153240844607353,
-0.06629457324743271,
-0.07251711189746857,
0.1465679109096527,
0.012260537594556808,
-0.11557546257972717,
0.05262777954339981,
0.04174318537116051,
0.07618976384401321,
-0.026631271466612816,
0.08335631340742111,
-0.15152700245380402,
-0.13310033082962036,
0.0689893513917923,
0.09733282774686813,
-0.22627536952495575,
0.2599017024040222,
-0.0082806795835495,
0.015054757706820965,
-0.09506656974554062,
-0.03614956513047218,
0.026938162744045258,
0.13990642130374908,
0.16889335215091705,
-0.005832785740494728,
0.018060486763715744,
-0.06294215470552444,
-0.02094322256743908,
0.0794750228524208,
0.1289294809103012,
0.0012923552421852946,
-0.025144003331661224,
-0.01851782575249672,
-0.048820991069078445,
-0.0035712714307010174,
-0.044398605823516846,
-0.07460972666740417,
-0.11215569823980331,
0.010362415574491024,
0.1303505152463913,
0.10614973306655884,
0.033407822251319885,
-0.00502749951556325,
-0.08575832098722458,
0.10764406621456146,
-0.11254726350307465,
-0.035979606211185455,
-0.06864117085933685,
-0.16859184205532074,
0.1345091015100479,
-0.0476752370595932,
0.06849204748868942,
-0.0048405323177576065,
0.032381389290094376,
-0.05020912364125252,
-0.1380978226661682,
0.09602635353803635,
-0.12306658923625946,
0.01976417936384678,
0.0015626787208020687,
0.15944983065128326,
0.025481410324573517,
-0.007206962909549475,
0.10792868584394455,
0.0020110411569476128,
-0.09983616322278976,
-0.0903809517621994,
-0.032549209892749786,
0.15346577763557434,
-0.10951274633407593,
0.015927769243717194,
0.018179042264819145,
-0.14347873628139496,
-0.08231712877750397,
0.03777335211634636,
0.27825725078582764,
-0.014915425330400467,
-0.04817360267043114,
0.17336682975292206,
0.266446590423584,
-0.052146051079034805,
-0.22492991387844086,
-0.1739804446697235,
-0.04863237589597702,
0.0378878153860569,
-0.0715700313448906,
-0.11964539438486099,
0.10777436196804047,
-0.09367436915636063,
-0.04155983403325081,
-0.02952319197356701,
-0.18605610728263855,
-0.10452525317668915,
0.2878834009170532,
0.008731041103601456,
0.31272685527801514,
-0.034225042909383774,
-0.08491139113903046,
-0.0462961308658123,
-0.1168622300028801,
0.11990545690059662,
-0.028133777901530266,
0.07553482055664062,
0.02659502625465393,
0.14482992887496948,
0.07080701738595963,
-0.03058517538011074,
0.09739825129508972,
0.0694439560174942,
-0.04932545870542526,
-0.014205054379999638,
-0.09434179961681366,
0.010993984527885914,
0.041735049337148666,
0.07998612523078918,
0.05293517932295799,
0.031744420528411865,
-0.09471290558576584,
-0.10459836572408676,
-0.11717119812965393,
0.04566803202033043,
0.06686970591545105,
-0.05674618482589722,
0.08453842252492905,
-0.12748411297798157,
0.013795166276395321,
0.046415891498327255,
0.05644693598151207,
-0.14555446803569794,
0.01097900327295065,
0.20175482332706451,
0.19673597812652588,
-0.11582087725400925,
-0.035082004964351654,
-0.03929390013217926,
-0.06582492589950562,
0.12552955746650696,
-0.016348805278539658,
0.04150126501917839,
0.06624189019203186,
0.011761232279241085,
0.044623930007219315,
0.08187094330787659,
-0.0014814226888120174,
0.011807871982455254,
0.0706041157245636,
-0.1026955246925354,
-0.07286912202835083,
-0.03209858015179634,
0.03451545536518097,
0.1219853013753891,
0.05769479647278786,
0.1266922652721405,
-0.0197153240442276,
-0.029428185895085335,
-0.05409902706742287,
-0.02111060544848442,
-0.1510176807641983,
0.06585611402988434,
0.05681091919541359,
0.029282737523317337,
-0.1461006999015808,
-0.0009589263936504722,
-0.06233185529708862,
-0.11463384330272675,
-0.004083676729351282,
-0.024750350043177605,
-0.09568831324577332,
-0.13677221536636353,
-0.11707108467817307,
0.10505697876214981,
-0.13075333833694458,
-0.12530504167079926,
0.0472247488796711,
-0.11614292114973068,
0.01793769747018814,
0.14788326621055603,
0.07750163227319717,
0.07349028438329697,
-0.1580013483762741,
-0.04895436018705368,
0.014872894622385502,
-0.03940632566809654,
0.00006847670010756701,
-0.0814877450466156,
-0.10139909386634827,
0.07051457464694977,
0.030785532668232918,
0.10237010568380356,
-0.1006712019443512,
-0.11157841235399246,
-0.08842196315526962,
0.1053212434053421,
-0.1773672252893448,
-0.011937949806451797,
-0.12276988476514816,
-0.009165732190012932,
0.054897814989089966,
-0.06066597253084183,
-0.034183766692876816,
0.007981495931744576,
-0.13472281396389008,
0.08353378623723984,
0.003475533565506339,
0.013883791863918304,
-0.07277137786149979,
0.027176927775144577,
0.047363173216581345,
-0.021777629852294922,
0.08778230845928192,
0.23831427097320557,
-0.16073819994926453,
0.15058839321136475,
-0.16987963020801544,
-0.11391031742095947,
0.09087315201759338,
0.04639875516295433,
0.04060527682304382,
-0.008711026981472969,
0.024921422824263573,
0.10381405800580978,
0.030109092593193054,
0.007147052325308323,
0.15466874837875366,
-0.06457111984491348,
0.05303109064698219,
-0.013168524950742722,
-0.08773212879896164,
-0.00940337311476469,
-0.04804586246609688,
0.07879312336444855,
0.09026934206485748,
0.0635034441947937,
-0.04155603051185608,
0.0813690721988678,
0.029936833307147026,
0.060264427214860916,
-0.082431860268116,
-0.04972422868013382,
-0.02518003061413765,
-0.12266892194747925,
0.023915033787488937,
-0.03064400888979435,
0.28028959035873413,
-0.03677959367632866,
0.14348864555358887,
-0.022635672241449356,
0.018306247889995575,
-0.03131228685379028,
0.023785337805747986,
0.3033658564090729,
0.09851063042879105,
0.033345457166433334,
-0.0743090957403183,
0.054648082703351974,
0.01324823684990406,
0.03891727328300476,
-0.058958083391189575,
0.12437312304973602,
-0.02557576633989811,
0.1556762158870697,
0.08825253695249557,
0.011792381294071674,
-0.10271283239126205,
-0.1517172008752823,
-0.03477974236011505,
0.022329291328787804,
-0.09139315038919449,
0.08728893101215363,
0.1347426176071167,
-0.011509649455547333,
0.04300074279308319,
0.008906800299882889,
-0.0395376943051815,
-0.17708246409893036,
-0.1229119822382927,
-0.06867998838424683,
-0.15299323201179504,
0.012428054586052895,
-0.06315319240093231,
0.01787642389535904,
0.04234951362013817,
0.066338911652565,
-0.03069286420941353,
0.11699683219194412,
-0.03180370479822159,
-0.08137953281402588,
0.06664858013391495,
-0.034303344786167145,
0.02629096433520317,
-0.012935973703861237,
-0.008460517041385174,
-0.038132403045892715,
-0.017841307446360588,
0.0013736779801547527,
0.011932797729969025,
-0.11747316271066666,
-0.014412318356335163,
-0.07583178579807281,
-0.04851336404681206,
-0.08428135514259338,
-0.0008740581688471138,
-0.01677345670759678,
0.11235248297452927,
0.04048636183142662,
-0.0890006273984909,
-0.011831426061689854,
0.15025228261947632,
-0.11666398495435715,
-0.20691975951194763,
-0.06340430676937103,
0.274996280670166,
0.0627456083893776,
0.1450004279613495,
-0.04091713950037956,
0.00462886318564415,
-0.09783204644918442,
0.3448697328567505,
0.25440648198127747,
-0.06393852829933167,
0.04810115322470665,
0.051103997975587845,
0.04404282942414284,
0.049606598913669586,
0.023405157029628754,
0.08343277871608734,
0.32936808466911316,
-0.04472985491156578,
-0.03660992160439491,
-0.05594843626022339,
-0.06329580396413803,
-0.038297075778245926,
0.04348982498049736,
-0.020259791985154152,
-0.11740419268608093,
-0.022186610847711563,
0.11966544389724731,
-0.24192485213279724,
0.133467897772789,
-0.04571826010942459,
-0.09056868404150009,
-0.02946968376636505,
-0.015986280515789986,
0.05871385708451271,
0.07608629763126373,
0.07382916659116745,
-0.05554657801985741,
-0.09896405041217804,
0.10340635478496552,
0.05036648362874985,
-0.2329118847846985,
0.018541546538472176,
0.04919476434588432,
-0.03017665445804596,
-0.021637072786688805,
0.010790947824716568,
0.09790199249982834,
0.012425770983099937,
0.13141566514968872,
0.03426911309361458,
0.15303249657154083,
-0.010131238028407097,
-0.08131913840770721,
0.03293213993310928,
0.05652814731001854,
-0.02393581159412861,
-0.035055771470069885,
-0.01140469778329134,
-0.2610277831554413,
0.08833248913288116,
0.00015771633479744196,
-0.04229233041405678,
-0.03033015877008438,
0.006403541192412376,
-0.057553213089704514,
0.047388553619384766,
0.010490331798791885,
-0.0019301060819998384,
-0.0627487376332283,
-0.011176691390573978,
0.08832677453756332,
0.005391156300902367,
-0.11479656398296356,
-0.11939159780740738,
-0.1622319370508194,
-0.09806107729673386,
-0.03324146568775177,
-0.00944250263273716,
-0.0971030741930008,
0.00019520068599376827,
0.010740520432591438,
0.029052171856164932,
-0.0255824513733387,
0.038386229425668716,
0.0961468517780304,
0.013008412905037403,
0.015402606688439846,
0.0020534591749310493,
0.09052255004644394,
0.10167006403207779,
-0.15448948740959167,
-0.1295212209224701
] |
null | null |
transformers
|
## Usage
The model can be used directly (without a language model) as follows:
```python
import soundfile as sf
import torch
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
import argparse
def parse_transcription(wav_file):
# load pretrained model
processor = Wav2Vec2Processor.from_pretrained("addy88/wav2vec2-nepali-stt")
model = Wav2Vec2ForCTC.from_pretrained("addy88/wav2vec2-nepali-stt")
# load audio
audio_input, sample_rate = sf.read(wav_file)
# pad input values and return pt tensor
input_values = processor(audio_input, sampling_rate=sample_rate, return_tensors="pt").input_values
# INFERENCE
# retrieve logits & take argmax
logits = model(input_values).logits
predicted_ids = torch.argmax(logits, dim=-1)
# transcribe
transcription = processor.decode(predicted_ids[0], skip_special_tokens=True)
print(transcription)
```
|
{}
|
automatic-speech-recognition
|
addy88/wav2vec2-nepali-stt
|
[
"transformers",
"pytorch",
"wav2vec2",
"automatic-speech-recognition",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #region-us
|
## Usage
The model can be used directly (without a language model) as follows:
|
[
"## Usage\nThe model can be used directly (without a language model) as follows:"
] |
[
"TAGS\n#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #region-us \n",
"## Usage\nThe model can be used directly (without a language model) as follows:"
] |
[
37,
20
] |
[
"passage: TAGS\n#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #region-us \n## Usage\nThe model can be used directly (without a language model) as follows:"
] |
[
-0.05252622812986374,
-0.019617876037955284,
-0.0058790817856788635,
-0.053425274789333344,
0.10687846690416336,
-0.007705265656113625,
0.09170906990766525,
0.06302708387374878,
0.1208203136920929,
-0.05187021568417549,
0.10668445378541946,
0.2061861753463745,
-0.01615987904369831,
0.01154998317360878,
-0.02408706024289131,
-0.2849704623222351,
0.05921001359820366,
0.006724327802658081,
0.08173583447933197,
0.11632698774337769,
0.10728606581687927,
-0.0745929405093193,
0.043392978608608246,
0.07603111863136292,
-0.09562446177005768,
0.044834449887275696,
0.017546148970723152,
-0.12676472961902618,
0.12212342768907547,
0.05740610510110855,
0.09601490944623947,
0.003504887456074357,
0.03245728462934494,
-0.23084574937820435,
0.004520881455391645,
-0.04129945859313011,
-0.005402150098234415,
-0.015265563502907753,
0.05411279946565628,
-0.10321149230003357,
0.1183098778128624,
0.09698744118213654,
0.008157354779541492,
0.045819319784641266,
-0.06994444131851196,
-0.09883728623390198,
0.09624841809272766,
0.007222834043204784,
0.06262826174497604,
0.10634984821081161,
-0.05064747855067253,
0.13851769268512726,
-0.10710646957159042,
0.08310505002737045,
0.04849497973918915,
-0.28126662969589233,
0.029068727046251297,
0.07243984192609787,
0.07708492875099182,
0.01645354926586151,
-0.02037384919822216,
0.10069116950035095,
-0.02416316792368889,
0.04858396574854851,
-0.041516125202178955,
-0.05117788165807724,
-0.13475511968135834,
0.03324274718761444,
-0.11539667844772339,
-0.040561530739068985,
0.15148606896400452,
-0.050762925297021866,
0.03511665016412735,
-0.11290891468524933,
-0.06844023615121841,
0.044039443135261536,
-0.07302548736333847,
-0.07782286405563354,
-0.02189541794359684,
0.08985205739736557,
0.006181041710078716,
-0.08480656892061234,
-0.10149180144071579,
-0.07703463733196259,
-0.15133780241012573,
0.2693607807159424,
0.019680902361869812,
0.08886801451444626,
-0.2036275714635849,
0.03727252781391144,
-0.06865793466567993,
-0.03657369315624237,
-0.010131645016372204,
-0.08290375769138336,
-0.003481371561065316,
0.041202232241630554,
-0.10632151365280151,
-0.008017139509320259,
0.08221583068370819,
-0.027638090774416924,
0.06801564246416092,
0.05372175574302673,
-0.025688808411359787,
0.08469673246145248,
0.02976100891828537,
0.1266242116689682,
-0.08318299055099487,
-0.01359215285629034,
-0.006264910567551851,
-0.12346780300140381,
-0.02692805789411068,
-0.026080166921019554,
-0.14001379907131195,
-0.0929582342505455,
-0.03027261234819889,
0.08267602324485779,
-0.03435491770505905,
0.04342223331332207,
-0.017561985179781914,
-0.07181911915540695,
-0.022716619074344635,
-0.0683937594294548,
-0.025212876498699188,
0.08283547312021255,
0.028712647035717964,
0.19050057232379913,
0.03218787536025047,
0.02990150637924671,
-0.10521915555000305,
-0.06501829624176025,
-0.001435491838492453,
0.0477556511759758,
-0.0038637355901300907,
-0.0636293813586235,
-0.050521429628133774,
-0.047594863921403885,
0.04504020884633064,
-0.16827283799648285,
-0.08006849139928818,
0.006139486562460661,
0.016433287411928177,
-0.012976926751434803,
0.03135204687714577,
-0.11849917471408844,
0.039808642119169235,
-0.0012580605689436197,
-0.05513211712241173,
-0.023246455937623978,
-0.04426079988479614,
0.04696167632937431,
0.008784018456935883,
0.08877347409725189,
-0.1122790277004242,
0.07852248102426529,
-0.08129951357841492,
-0.04550950974225998,
-0.01164686307311058,
0.10328144580125809,
-0.017674513161182404,
0.05000694468617439,
-0.08133397996425629,
-0.045672718435525894,
-0.1159110814332962,
0.0878932997584343,
-0.03355850651860237,
0.15796400606632233,
-0.15420709550380707,
-0.10080987215042114,
0.21767915785312653,
-0.09306889027357101,
-0.09239067882299423,
0.08364612609148026,
0.01256752572953701,
0.07035309821367264,
0.08287971466779709,
0.22752153873443604,
0.028992680832743645,
-0.11375638842582703,
0.15816456079483032,
0.1461673527956009,
-0.11467607319355011,
-0.045014068484306335,
0.010342531837522984,
-0.060074593871831894,
-0.11382783204317093,
0.03658890351653099,
-0.03728248178958893,
0.08358000218868256,
-0.04293033108115196,
-0.0835445374250412,
-0.01683056727051735,
-0.11543910205364227,
0.07083569467067719,
0.001472658826969564,
0.10082688927650452,
0.03857358917593956,
0.009115398861467838,
0.008496548049151897,
0.06438606232404709,
-0.08900038152933121,
0.04502258822321892,
-0.15757586061954498,
0.06532309949398041,
-0.04050298035144806,
0.015240225940942764,
-0.202682763338089,
0.12728376686573029,
-0.01672365702688694,
0.10747754573822021,
0.054517317563295364,
0.12177800387144089,
0.08942614495754242,
-0.03367462754249573,
0.03980250656604767,
0.0010367206996306777,
0.16547656059265137,
0.035678017884492874,
-0.03053026832640171,
-0.04889436811208725,
0.00681394012644887,
-0.055863723158836365,
-0.013186636380851269,
-0.048216186463832855,
-0.04109345003962517,
-0.011756274849176407,
0.0651380866765976,
-0.03668820485472679,
0.0516076385974884,
-0.005080144386738539,
0.025842025876045227,
-0.0025852490216493607,
0.040376920253038406,
0.09369415789842606,
-0.044856440275907516,
-0.09525914490222931,
0.21410244703292847,
-0.13405893743038177,
0.18770310282707214,
0.20924758911132812,
-0.2815588414669037,
0.044230442494153976,
0.062409620732069016,
0.011556003242731094,
0.03797592222690582,
0.04959757253527641,
0.006467180326581001,
0.293790340423584,
0.0155835235491395,
0.11668486893177032,
-0.05374575033783913,
0.032668016850948334,
0.02992015704512596,
-0.05695001780986786,
-0.01879408396780491,
0.034647136926651,
0.0661093145608902,
-0.08204877376556396,
0.04923979938030243,
0.11010090261697769,
-0.0453057736158371,
0.148123100399971,
0.024933019652962685,
-0.039744652807712555,
0.03881930187344551,
-0.02343410812318325,
-0.06886041164398193,
-0.009936339221894741,
-0.33419716358184814,
-0.09599199891090393,
0.07709821313619614,
-0.004785764962434769,
0.12489327788352966,
-0.08804792165756226,
0.009168051183223724,
0.019291682168841362,
-0.06878803670406342,
-0.0647115707397461,
0.05472118407487869,
-0.002762231044471264,
0.04009811580181122,
-0.03818083554506302,
-0.1263684183359146,
0.044684890657663345,
-0.0302386824041605,
-0.11999684572219849,
0.12441056966781616,
-0.10402670502662659,
-0.24305014312267303,
-0.10676499456167221,
-0.11100881546735764,
-0.012243036180734634,
0.0840906873345375,
0.06344853341579437,
-0.09637530148029327,
-0.031256671994924545,
0.023415599018335342,
0.03565197065472603,
-0.07387632876634598,
-0.0077576665207743645,
-0.00415276363492012,
0.007432916201651096,
-0.03729229420423508,
-0.11296577751636505,
-0.04147093743085861,
-0.06601216644048691,
-0.027051186189055443,
0.034905724227428436,
-0.10912566632032394,
0.023992136120796204,
0.2160838544368744,
0.049506090581417084,
0.10071378946304321,
0.017306605353951454,
0.1917818933725357,
-0.045892760157585144,
-0.1069929301738739,
0.16272033751010895,
-0.0352884940803051,
0.0028067543171346188,
0.12233041226863861,
0.04503423348069191,
-0.0888904333114624,
-0.057114340364933014,
-0.07670506834983826,
-0.08987729251384735,
-0.14921152591705322,
-0.18451803922653198,
-0.07496201992034912,
-0.10192596167325974,
0.008611632511019707,
0.01191422063857317,
0.08689485490322113,
0.04952683299779892,
0.05695516616106033,
-0.08443386107683182,
0.0356837660074234,
0.03766658902168274,
0.2046319842338562,
-0.05954502522945404,
0.1242118775844574,
-0.044658876955509186,
-0.1400134563446045,
0.020373381674289703,
0.03893480822443962,
0.12193314731121063,
0.17272241413593292,
0.03627229854464531,
0.0488450825214386,
0.10247788578271866,
0.14994199573993683,
0.17463889718055725,
0.0029014565516263247,
-0.020511463284492493,
-0.0009147358941845596,
-0.05476776137948036,
-0.07201310247182846,
0.09347954392433167,
0.2079763263463974,
-0.1406993716955185,
-0.009409161284565926,
-0.10983754694461823,
0.06955263763666153,
0.1199074313044548,
0.05642293393611908,
-0.20243626832962036,
0.03341635689139366,
0.04153240844607353,
-0.06629457324743271,
-0.07251711189746857,
0.1465679109096527,
0.012260537594556808,
-0.11557546257972717,
0.05262777954339981,
0.04174318537116051,
0.07618976384401321,
-0.026631271466612816,
0.08335631340742111,
-0.15152700245380402,
-0.13310033082962036,
0.0689893513917923,
0.09733282774686813,
-0.22627536952495575,
0.2599017024040222,
-0.0082806795835495,
0.015054757706820965,
-0.09506656974554062,
-0.03614956513047218,
0.026938162744045258,
0.13990642130374908,
0.16889335215091705,
-0.005832785740494728,
0.018060486763715744,
-0.06294215470552444,
-0.02094322256743908,
0.0794750228524208,
0.1289294809103012,
0.0012923552421852946,
-0.025144003331661224,
-0.01851782575249672,
-0.048820991069078445,
-0.0035712714307010174,
-0.044398605823516846,
-0.07460972666740417,
-0.11215569823980331,
0.010362415574491024,
0.1303505152463913,
0.10614973306655884,
0.033407822251319885,
-0.00502749951556325,
-0.08575832098722458,
0.10764406621456146,
-0.11254726350307465,
-0.035979606211185455,
-0.06864117085933685,
-0.16859184205532074,
0.1345091015100479,
-0.0476752370595932,
0.06849204748868942,
-0.0048405323177576065,
0.032381389290094376,
-0.05020912364125252,
-0.1380978226661682,
0.09602635353803635,
-0.12306658923625946,
0.01976417936384678,
0.0015626787208020687,
0.15944983065128326,
0.025481410324573517,
-0.007206962909549475,
0.10792868584394455,
0.0020110411569476128,
-0.09983616322278976,
-0.0903809517621994,
-0.032549209892749786,
0.15346577763557434,
-0.10951274633407593,
0.015927769243717194,
0.018179042264819145,
-0.14347873628139496,
-0.08231712877750397,
0.03777335211634636,
0.27825725078582764,
-0.014915425330400467,
-0.04817360267043114,
0.17336682975292206,
0.266446590423584,
-0.052146051079034805,
-0.22492991387844086,
-0.1739804446697235,
-0.04863237589597702,
0.0378878153860569,
-0.0715700313448906,
-0.11964539438486099,
0.10777436196804047,
-0.09367436915636063,
-0.04155983403325081,
-0.02952319197356701,
-0.18605610728263855,
-0.10452525317668915,
0.2878834009170532,
0.008731041103601456,
0.31272685527801514,
-0.034225042909383774,
-0.08491139113903046,
-0.0462961308658123,
-0.1168622300028801,
0.11990545690059662,
-0.028133777901530266,
0.07553482055664062,
0.02659502625465393,
0.14482992887496948,
0.07080701738595963,
-0.03058517538011074,
0.09739825129508972,
0.0694439560174942,
-0.04932545870542526,
-0.014205054379999638,
-0.09434179961681366,
0.010993984527885914,
0.041735049337148666,
0.07998612523078918,
0.05293517932295799,
0.031744420528411865,
-0.09471290558576584,
-0.10459836572408676,
-0.11717119812965393,
0.04566803202033043,
0.06686970591545105,
-0.05674618482589722,
0.08453842252492905,
-0.12748411297798157,
0.013795166276395321,
0.046415891498327255,
0.05644693598151207,
-0.14555446803569794,
0.01097900327295065,
0.20175482332706451,
0.19673597812652588,
-0.11582087725400925,
-0.035082004964351654,
-0.03929390013217926,
-0.06582492589950562,
0.12552955746650696,
-0.016348805278539658,
0.04150126501917839,
0.06624189019203186,
0.011761232279241085,
0.044623930007219315,
0.08187094330787659,
-0.0014814226888120174,
0.011807871982455254,
0.0706041157245636,
-0.1026955246925354,
-0.07286912202835083,
-0.03209858015179634,
0.03451545536518097,
0.1219853013753891,
0.05769479647278786,
0.1266922652721405,
-0.0197153240442276,
-0.029428185895085335,
-0.05409902706742287,
-0.02111060544848442,
-0.1510176807641983,
0.06585611402988434,
0.05681091919541359,
0.029282737523317337,
-0.1461006999015808,
-0.0009589263936504722,
-0.06233185529708862,
-0.11463384330272675,
-0.004083676729351282,
-0.024750350043177605,
-0.09568831324577332,
-0.13677221536636353,
-0.11707108467817307,
0.10505697876214981,
-0.13075333833694458,
-0.12530504167079926,
0.0472247488796711,
-0.11614292114973068,
0.01793769747018814,
0.14788326621055603,
0.07750163227319717,
0.07349028438329697,
-0.1580013483762741,
-0.04895436018705368,
0.014872894622385502,
-0.03940632566809654,
0.00006847670010756701,
-0.0814877450466156,
-0.10139909386634827,
0.07051457464694977,
0.030785532668232918,
0.10237010568380356,
-0.1006712019443512,
-0.11157841235399246,
-0.08842196315526962,
0.1053212434053421,
-0.1773672252893448,
-0.011937949806451797,
-0.12276988476514816,
-0.009165732190012932,
0.054897814989089966,
-0.06066597253084183,
-0.034183766692876816,
0.007981495931744576,
-0.13472281396389008,
0.08353378623723984,
0.003475533565506339,
0.013883791863918304,
-0.07277137786149979,
0.027176927775144577,
0.047363173216581345,
-0.021777629852294922,
0.08778230845928192,
0.23831427097320557,
-0.16073819994926453,
0.15058839321136475,
-0.16987963020801544,
-0.11391031742095947,
0.09087315201759338,
0.04639875516295433,
0.04060527682304382,
-0.008711026981472969,
0.024921422824263573,
0.10381405800580978,
0.030109092593193054,
0.007147052325308323,
0.15466874837875366,
-0.06457111984491348,
0.05303109064698219,
-0.013168524950742722,
-0.08773212879896164,
-0.00940337311476469,
-0.04804586246609688,
0.07879312336444855,
0.09026934206485748,
0.0635034441947937,
-0.04155603051185608,
0.0813690721988678,
0.029936833307147026,
0.060264427214860916,
-0.082431860268116,
-0.04972422868013382,
-0.02518003061413765,
-0.12266892194747925,
0.023915033787488937,
-0.03064400888979435,
0.28028959035873413,
-0.03677959367632866,
0.14348864555358887,
-0.022635672241449356,
0.018306247889995575,
-0.03131228685379028,
0.023785337805747986,
0.3033658564090729,
0.09851063042879105,
0.033345457166433334,
-0.0743090957403183,
0.054648082703351974,
0.01324823684990406,
0.03891727328300476,
-0.058958083391189575,
0.12437312304973602,
-0.02557576633989811,
0.1556762158870697,
0.08825253695249557,
0.011792381294071674,
-0.10271283239126205,
-0.1517172008752823,
-0.03477974236011505,
0.022329291328787804,
-0.09139315038919449,
0.08728893101215363,
0.1347426176071167,
-0.011509649455547333,
0.04300074279308319,
0.008906800299882889,
-0.0395376943051815,
-0.17708246409893036,
-0.1229119822382927,
-0.06867998838424683,
-0.15299323201179504,
0.012428054586052895,
-0.06315319240093231,
0.01787642389535904,
0.04234951362013817,
0.066338911652565,
-0.03069286420941353,
0.11699683219194412,
-0.03180370479822159,
-0.08137953281402588,
0.06664858013391495,
-0.034303344786167145,
0.02629096433520317,
-0.012935973703861237,
-0.008460517041385174,
-0.038132403045892715,
-0.017841307446360588,
0.0013736779801547527,
0.011932797729969025,
-0.11747316271066666,
-0.014412318356335163,
-0.07583178579807281,
-0.04851336404681206,
-0.08428135514259338,
-0.0008740581688471138,
-0.01677345670759678,
0.11235248297452927,
0.04048636183142662,
-0.0890006273984909,
-0.011831426061689854,
0.15025228261947632,
-0.11666398495435715,
-0.20691975951194763,
-0.06340430676937103,
0.274996280670166,
0.0627456083893776,
0.1450004279613495,
-0.04091713950037956,
0.00462886318564415,
-0.09783204644918442,
0.3448697328567505,
0.25440648198127747,
-0.06393852829933167,
0.04810115322470665,
0.051103997975587845,
0.04404282942414284,
0.049606598913669586,
0.023405157029628754,
0.08343277871608734,
0.32936808466911316,
-0.04472985491156578,
-0.03660992160439491,
-0.05594843626022339,
-0.06329580396413803,
-0.038297075778245926,
0.04348982498049736,
-0.020259791985154152,
-0.11740419268608093,
-0.022186610847711563,
0.11966544389724731,
-0.24192485213279724,
0.133467897772789,
-0.04571826010942459,
-0.09056868404150009,
-0.02946968376636505,
-0.015986280515789986,
0.05871385708451271,
0.07608629763126373,
0.07382916659116745,
-0.05554657801985741,
-0.09896405041217804,
0.10340635478496552,
0.05036648362874985,
-0.2329118847846985,
0.018541546538472176,
0.04919476434588432,
-0.03017665445804596,
-0.021637072786688805,
0.010790947824716568,
0.09790199249982834,
0.012425770983099937,
0.13141566514968872,
0.03426911309361458,
0.15303249657154083,
-0.010131238028407097,
-0.08131913840770721,
0.03293213993310928,
0.05652814731001854,
-0.02393581159412861,
-0.035055771470069885,
-0.01140469778329134,
-0.2610277831554413,
0.08833248913288116,
0.00015771633479744196,
-0.04229233041405678,
-0.03033015877008438,
0.006403541192412376,
-0.057553213089704514,
0.047388553619384766,
0.010490331798791885,
-0.0019301060819998384,
-0.0627487376332283,
-0.011176691390573978,
0.08832677453756332,
0.005391156300902367,
-0.11479656398296356,
-0.11939159780740738,
-0.1622319370508194,
-0.09806107729673386,
-0.03324146568775177,
-0.00944250263273716,
-0.0971030741930008,
0.00019520068599376827,
0.010740520432591438,
0.029052171856164932,
-0.0255824513733387,
0.038386229425668716,
0.0961468517780304,
0.013008412905037403,
0.015402606688439846,
0.0020534591749310493,
0.09052255004644394,
0.10167006403207779,
-0.15448948740959167,
-0.1295212209224701
] |
null | null |
transformers
|
## Usage
The model can be used directly (without a language model) as follows:
```python
import soundfile as sf
import torch
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
import argparse
def parse_transcription(wav_file):
# load pretrained model
processor = Wav2Vec2Processor.from_pretrained("addy88/wav2vec2-punjabi-stt")
model = Wav2Vec2ForCTC.from_pretrained("addy88/wav2vec2-punjabi-stt")
# load audio
audio_input, sample_rate = sf.read(wav_file)
# pad input values and return pt tensor
input_values = processor(audio_input, sampling_rate=sample_rate, return_tensors="pt").input_values
# INFERENCE
# retrieve logits & take argmax
logits = model(input_values).logits
predicted_ids = torch.argmax(logits, dim=-1)
# transcribe
transcription = processor.decode(predicted_ids[0], skip_special_tokens=True)
print(transcription)
```
|
{}
|
automatic-speech-recognition
|
addy88/wav2vec2-punjabi-stt
|
[
"transformers",
"pytorch",
"wav2vec2",
"automatic-speech-recognition",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #region-us
|
## Usage
The model can be used directly (without a language model) as follows:
|
[
"## Usage\nThe model can be used directly (without a language model) as follows:"
] |
[
"TAGS\n#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #region-us \n",
"## Usage\nThe model can be used directly (without a language model) as follows:"
] |
[
37,
20
] |
[
"passage: TAGS\n#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #region-us \n## Usage\nThe model can be used directly (without a language model) as follows:"
] |
[
-0.05252622812986374,
-0.019617876037955284,
-0.0058790817856788635,
-0.053425274789333344,
0.10687846690416336,
-0.007705265656113625,
0.09170906990766525,
0.06302708387374878,
0.1208203136920929,
-0.05187021568417549,
0.10668445378541946,
0.2061861753463745,
-0.01615987904369831,
0.01154998317360878,
-0.02408706024289131,
-0.2849704623222351,
0.05921001359820366,
0.006724327802658081,
0.08173583447933197,
0.11632698774337769,
0.10728606581687927,
-0.0745929405093193,
0.043392978608608246,
0.07603111863136292,
-0.09562446177005768,
0.044834449887275696,
0.017546148970723152,
-0.12676472961902618,
0.12212342768907547,
0.05740610510110855,
0.09601490944623947,
0.003504887456074357,
0.03245728462934494,
-0.23084574937820435,
0.004520881455391645,
-0.04129945859313011,
-0.005402150098234415,
-0.015265563502907753,
0.05411279946565628,
-0.10321149230003357,
0.1183098778128624,
0.09698744118213654,
0.008157354779541492,
0.045819319784641266,
-0.06994444131851196,
-0.09883728623390198,
0.09624841809272766,
0.007222834043204784,
0.06262826174497604,
0.10634984821081161,
-0.05064747855067253,
0.13851769268512726,
-0.10710646957159042,
0.08310505002737045,
0.04849497973918915,
-0.28126662969589233,
0.029068727046251297,
0.07243984192609787,
0.07708492875099182,
0.01645354926586151,
-0.02037384919822216,
0.10069116950035095,
-0.02416316792368889,
0.04858396574854851,
-0.041516125202178955,
-0.05117788165807724,
-0.13475511968135834,
0.03324274718761444,
-0.11539667844772339,
-0.040561530739068985,
0.15148606896400452,
-0.050762925297021866,
0.03511665016412735,
-0.11290891468524933,
-0.06844023615121841,
0.044039443135261536,
-0.07302548736333847,
-0.07782286405563354,
-0.02189541794359684,
0.08985205739736557,
0.006181041710078716,
-0.08480656892061234,
-0.10149180144071579,
-0.07703463733196259,
-0.15133780241012573,
0.2693607807159424,
0.019680902361869812,
0.08886801451444626,
-0.2036275714635849,
0.03727252781391144,
-0.06865793466567993,
-0.03657369315624237,
-0.010131645016372204,
-0.08290375769138336,
-0.003481371561065316,
0.041202232241630554,
-0.10632151365280151,
-0.008017139509320259,
0.08221583068370819,
-0.027638090774416924,
0.06801564246416092,
0.05372175574302673,
-0.025688808411359787,
0.08469673246145248,
0.02976100891828537,
0.1266242116689682,
-0.08318299055099487,
-0.01359215285629034,
-0.006264910567551851,
-0.12346780300140381,
-0.02692805789411068,
-0.026080166921019554,
-0.14001379907131195,
-0.0929582342505455,
-0.03027261234819889,
0.08267602324485779,
-0.03435491770505905,
0.04342223331332207,
-0.017561985179781914,
-0.07181911915540695,
-0.022716619074344635,
-0.0683937594294548,
-0.025212876498699188,
0.08283547312021255,
0.028712647035717964,
0.19050057232379913,
0.03218787536025047,
0.02990150637924671,
-0.10521915555000305,
-0.06501829624176025,
-0.001435491838492453,
0.0477556511759758,
-0.0038637355901300907,
-0.0636293813586235,
-0.050521429628133774,
-0.047594863921403885,
0.04504020884633064,
-0.16827283799648285,
-0.08006849139928818,
0.006139486562460661,
0.016433287411928177,
-0.012976926751434803,
0.03135204687714577,
-0.11849917471408844,
0.039808642119169235,
-0.0012580605689436197,
-0.05513211712241173,
-0.023246455937623978,
-0.04426079988479614,
0.04696167632937431,
0.008784018456935883,
0.08877347409725189,
-0.1122790277004242,
0.07852248102426529,
-0.08129951357841492,
-0.04550950974225998,
-0.01164686307311058,
0.10328144580125809,
-0.017674513161182404,
0.05000694468617439,
-0.08133397996425629,
-0.045672718435525894,
-0.1159110814332962,
0.0878932997584343,
-0.03355850651860237,
0.15796400606632233,
-0.15420709550380707,
-0.10080987215042114,
0.21767915785312653,
-0.09306889027357101,
-0.09239067882299423,
0.08364612609148026,
0.01256752572953701,
0.07035309821367264,
0.08287971466779709,
0.22752153873443604,
0.028992680832743645,
-0.11375638842582703,
0.15816456079483032,
0.1461673527956009,
-0.11467607319355011,
-0.045014068484306335,
0.010342531837522984,
-0.060074593871831894,
-0.11382783204317093,
0.03658890351653099,
-0.03728248178958893,
0.08358000218868256,
-0.04293033108115196,
-0.0835445374250412,
-0.01683056727051735,
-0.11543910205364227,
0.07083569467067719,
0.001472658826969564,
0.10082688927650452,
0.03857358917593956,
0.009115398861467838,
0.008496548049151897,
0.06438606232404709,
-0.08900038152933121,
0.04502258822321892,
-0.15757586061954498,
0.06532309949398041,
-0.04050298035144806,
0.015240225940942764,
-0.202682763338089,
0.12728376686573029,
-0.01672365702688694,
0.10747754573822021,
0.054517317563295364,
0.12177800387144089,
0.08942614495754242,
-0.03367462754249573,
0.03980250656604767,
0.0010367206996306777,
0.16547656059265137,
0.035678017884492874,
-0.03053026832640171,
-0.04889436811208725,
0.00681394012644887,
-0.055863723158836365,
-0.013186636380851269,
-0.048216186463832855,
-0.04109345003962517,
-0.011756274849176407,
0.0651380866765976,
-0.03668820485472679,
0.0516076385974884,
-0.005080144386738539,
0.025842025876045227,
-0.0025852490216493607,
0.040376920253038406,
0.09369415789842606,
-0.044856440275907516,
-0.09525914490222931,
0.21410244703292847,
-0.13405893743038177,
0.18770310282707214,
0.20924758911132812,
-0.2815588414669037,
0.044230442494153976,
0.062409620732069016,
0.011556003242731094,
0.03797592222690582,
0.04959757253527641,
0.006467180326581001,
0.293790340423584,
0.0155835235491395,
0.11668486893177032,
-0.05374575033783913,
0.032668016850948334,
0.02992015704512596,
-0.05695001780986786,
-0.01879408396780491,
0.034647136926651,
0.0661093145608902,
-0.08204877376556396,
0.04923979938030243,
0.11010090261697769,
-0.0453057736158371,
0.148123100399971,
0.024933019652962685,
-0.039744652807712555,
0.03881930187344551,
-0.02343410812318325,
-0.06886041164398193,
-0.009936339221894741,
-0.33419716358184814,
-0.09599199891090393,
0.07709821313619614,
-0.004785764962434769,
0.12489327788352966,
-0.08804792165756226,
0.009168051183223724,
0.019291682168841362,
-0.06878803670406342,
-0.0647115707397461,
0.05472118407487869,
-0.002762231044471264,
0.04009811580181122,
-0.03818083554506302,
-0.1263684183359146,
0.044684890657663345,
-0.0302386824041605,
-0.11999684572219849,
0.12441056966781616,
-0.10402670502662659,
-0.24305014312267303,
-0.10676499456167221,
-0.11100881546735764,
-0.012243036180734634,
0.0840906873345375,
0.06344853341579437,
-0.09637530148029327,
-0.031256671994924545,
0.023415599018335342,
0.03565197065472603,
-0.07387632876634598,
-0.0077576665207743645,
-0.00415276363492012,
0.007432916201651096,
-0.03729229420423508,
-0.11296577751636505,
-0.04147093743085861,
-0.06601216644048691,
-0.027051186189055443,
0.034905724227428436,
-0.10912566632032394,
0.023992136120796204,
0.2160838544368744,
0.049506090581417084,
0.10071378946304321,
0.017306605353951454,
0.1917818933725357,
-0.045892760157585144,
-0.1069929301738739,
0.16272033751010895,
-0.0352884940803051,
0.0028067543171346188,
0.12233041226863861,
0.04503423348069191,
-0.0888904333114624,
-0.057114340364933014,
-0.07670506834983826,
-0.08987729251384735,
-0.14921152591705322,
-0.18451803922653198,
-0.07496201992034912,
-0.10192596167325974,
0.008611632511019707,
0.01191422063857317,
0.08689485490322113,
0.04952683299779892,
0.05695516616106033,
-0.08443386107683182,
0.0356837660074234,
0.03766658902168274,
0.2046319842338562,
-0.05954502522945404,
0.1242118775844574,
-0.044658876955509186,
-0.1400134563446045,
0.020373381674289703,
0.03893480822443962,
0.12193314731121063,
0.17272241413593292,
0.03627229854464531,
0.0488450825214386,
0.10247788578271866,
0.14994199573993683,
0.17463889718055725,
0.0029014565516263247,
-0.020511463284492493,
-0.0009147358941845596,
-0.05476776137948036,
-0.07201310247182846,
0.09347954392433167,
0.2079763263463974,
-0.1406993716955185,
-0.009409161284565926,
-0.10983754694461823,
0.06955263763666153,
0.1199074313044548,
0.05642293393611908,
-0.20243626832962036,
0.03341635689139366,
0.04153240844607353,
-0.06629457324743271,
-0.07251711189746857,
0.1465679109096527,
0.012260537594556808,
-0.11557546257972717,
0.05262777954339981,
0.04174318537116051,
0.07618976384401321,
-0.026631271466612816,
0.08335631340742111,
-0.15152700245380402,
-0.13310033082962036,
0.0689893513917923,
0.09733282774686813,
-0.22627536952495575,
0.2599017024040222,
-0.0082806795835495,
0.015054757706820965,
-0.09506656974554062,
-0.03614956513047218,
0.026938162744045258,
0.13990642130374908,
0.16889335215091705,
-0.005832785740494728,
0.018060486763715744,
-0.06294215470552444,
-0.02094322256743908,
0.0794750228524208,
0.1289294809103012,
0.0012923552421852946,
-0.025144003331661224,
-0.01851782575249672,
-0.048820991069078445,
-0.0035712714307010174,
-0.044398605823516846,
-0.07460972666740417,
-0.11215569823980331,
0.010362415574491024,
0.1303505152463913,
0.10614973306655884,
0.033407822251319885,
-0.00502749951556325,
-0.08575832098722458,
0.10764406621456146,
-0.11254726350307465,
-0.035979606211185455,
-0.06864117085933685,
-0.16859184205532074,
0.1345091015100479,
-0.0476752370595932,
0.06849204748868942,
-0.0048405323177576065,
0.032381389290094376,
-0.05020912364125252,
-0.1380978226661682,
0.09602635353803635,
-0.12306658923625946,
0.01976417936384678,
0.0015626787208020687,
0.15944983065128326,
0.025481410324573517,
-0.007206962909549475,
0.10792868584394455,
0.0020110411569476128,
-0.09983616322278976,
-0.0903809517621994,
-0.032549209892749786,
0.15346577763557434,
-0.10951274633407593,
0.015927769243717194,
0.018179042264819145,
-0.14347873628139496,
-0.08231712877750397,
0.03777335211634636,
0.27825725078582764,
-0.014915425330400467,
-0.04817360267043114,
0.17336682975292206,
0.266446590423584,
-0.052146051079034805,
-0.22492991387844086,
-0.1739804446697235,
-0.04863237589597702,
0.0378878153860569,
-0.0715700313448906,
-0.11964539438486099,
0.10777436196804047,
-0.09367436915636063,
-0.04155983403325081,
-0.02952319197356701,
-0.18605610728263855,
-0.10452525317668915,
0.2878834009170532,
0.008731041103601456,
0.31272685527801514,
-0.034225042909383774,
-0.08491139113903046,
-0.0462961308658123,
-0.1168622300028801,
0.11990545690059662,
-0.028133777901530266,
0.07553482055664062,
0.02659502625465393,
0.14482992887496948,
0.07080701738595963,
-0.03058517538011074,
0.09739825129508972,
0.0694439560174942,
-0.04932545870542526,
-0.014205054379999638,
-0.09434179961681366,
0.010993984527885914,
0.041735049337148666,
0.07998612523078918,
0.05293517932295799,
0.031744420528411865,
-0.09471290558576584,
-0.10459836572408676,
-0.11717119812965393,
0.04566803202033043,
0.06686970591545105,
-0.05674618482589722,
0.08453842252492905,
-0.12748411297798157,
0.013795166276395321,
0.046415891498327255,
0.05644693598151207,
-0.14555446803569794,
0.01097900327295065,
0.20175482332706451,
0.19673597812652588,
-0.11582087725400925,
-0.035082004964351654,
-0.03929390013217926,
-0.06582492589950562,
0.12552955746650696,
-0.016348805278539658,
0.04150126501917839,
0.06624189019203186,
0.011761232279241085,
0.044623930007219315,
0.08187094330787659,
-0.0014814226888120174,
0.011807871982455254,
0.0706041157245636,
-0.1026955246925354,
-0.07286912202835083,
-0.03209858015179634,
0.03451545536518097,
0.1219853013753891,
0.05769479647278786,
0.1266922652721405,
-0.0197153240442276,
-0.029428185895085335,
-0.05409902706742287,
-0.02111060544848442,
-0.1510176807641983,
0.06585611402988434,
0.05681091919541359,
0.029282737523317337,
-0.1461006999015808,
-0.0009589263936504722,
-0.06233185529708862,
-0.11463384330272675,
-0.004083676729351282,
-0.024750350043177605,
-0.09568831324577332,
-0.13677221536636353,
-0.11707108467817307,
0.10505697876214981,
-0.13075333833694458,
-0.12530504167079926,
0.0472247488796711,
-0.11614292114973068,
0.01793769747018814,
0.14788326621055603,
0.07750163227319717,
0.07349028438329697,
-0.1580013483762741,
-0.04895436018705368,
0.014872894622385502,
-0.03940632566809654,
0.00006847670010756701,
-0.0814877450466156,
-0.10139909386634827,
0.07051457464694977,
0.030785532668232918,
0.10237010568380356,
-0.1006712019443512,
-0.11157841235399246,
-0.08842196315526962,
0.1053212434053421,
-0.1773672252893448,
-0.011937949806451797,
-0.12276988476514816,
-0.009165732190012932,
0.054897814989089966,
-0.06066597253084183,
-0.034183766692876816,
0.007981495931744576,
-0.13472281396389008,
0.08353378623723984,
0.003475533565506339,
0.013883791863918304,
-0.07277137786149979,
0.027176927775144577,
0.047363173216581345,
-0.021777629852294922,
0.08778230845928192,
0.23831427097320557,
-0.16073819994926453,
0.15058839321136475,
-0.16987963020801544,
-0.11391031742095947,
0.09087315201759338,
0.04639875516295433,
0.04060527682304382,
-0.008711026981472969,
0.024921422824263573,
0.10381405800580978,
0.030109092593193054,
0.007147052325308323,
0.15466874837875366,
-0.06457111984491348,
0.05303109064698219,
-0.013168524950742722,
-0.08773212879896164,
-0.00940337311476469,
-0.04804586246609688,
0.07879312336444855,
0.09026934206485748,
0.0635034441947937,
-0.04155603051185608,
0.0813690721988678,
0.029936833307147026,
0.060264427214860916,
-0.082431860268116,
-0.04972422868013382,
-0.02518003061413765,
-0.12266892194747925,
0.023915033787488937,
-0.03064400888979435,
0.28028959035873413,
-0.03677959367632866,
0.14348864555358887,
-0.022635672241449356,
0.018306247889995575,
-0.03131228685379028,
0.023785337805747986,
0.3033658564090729,
0.09851063042879105,
0.033345457166433334,
-0.0743090957403183,
0.054648082703351974,
0.01324823684990406,
0.03891727328300476,
-0.058958083391189575,
0.12437312304973602,
-0.02557576633989811,
0.1556762158870697,
0.08825253695249557,
0.011792381294071674,
-0.10271283239126205,
-0.1517172008752823,
-0.03477974236011505,
0.022329291328787804,
-0.09139315038919449,
0.08728893101215363,
0.1347426176071167,
-0.011509649455547333,
0.04300074279308319,
0.008906800299882889,
-0.0395376943051815,
-0.17708246409893036,
-0.1229119822382927,
-0.06867998838424683,
-0.15299323201179504,
0.012428054586052895,
-0.06315319240093231,
0.01787642389535904,
0.04234951362013817,
0.066338911652565,
-0.03069286420941353,
0.11699683219194412,
-0.03180370479822159,
-0.08137953281402588,
0.06664858013391495,
-0.034303344786167145,
0.02629096433520317,
-0.012935973703861237,
-0.008460517041385174,
-0.038132403045892715,
-0.017841307446360588,
0.0013736779801547527,
0.011932797729969025,
-0.11747316271066666,
-0.014412318356335163,
-0.07583178579807281,
-0.04851336404681206,
-0.08428135514259338,
-0.0008740581688471138,
-0.01677345670759678,
0.11235248297452927,
0.04048636183142662,
-0.0890006273984909,
-0.011831426061689854,
0.15025228261947632,
-0.11666398495435715,
-0.20691975951194763,
-0.06340430676937103,
0.274996280670166,
0.0627456083893776,
0.1450004279613495,
-0.04091713950037956,
0.00462886318564415,
-0.09783204644918442,
0.3448697328567505,
0.25440648198127747,
-0.06393852829933167,
0.04810115322470665,
0.051103997975587845,
0.04404282942414284,
0.049606598913669586,
0.023405157029628754,
0.08343277871608734,
0.32936808466911316,
-0.04472985491156578,
-0.03660992160439491,
-0.05594843626022339,
-0.06329580396413803,
-0.038297075778245926,
0.04348982498049736,
-0.020259791985154152,
-0.11740419268608093,
-0.022186610847711563,
0.11966544389724731,
-0.24192485213279724,
0.133467897772789,
-0.04571826010942459,
-0.09056868404150009,
-0.02946968376636505,
-0.015986280515789986,
0.05871385708451271,
0.07608629763126373,
0.07382916659116745,
-0.05554657801985741,
-0.09896405041217804,
0.10340635478496552,
0.05036648362874985,
-0.2329118847846985,
0.018541546538472176,
0.04919476434588432,
-0.03017665445804596,
-0.021637072786688805,
0.010790947824716568,
0.09790199249982834,
0.012425770983099937,
0.13141566514968872,
0.03426911309361458,
0.15303249657154083,
-0.010131238028407097,
-0.08131913840770721,
0.03293213993310928,
0.05652814731001854,
-0.02393581159412861,
-0.035055771470069885,
-0.01140469778329134,
-0.2610277831554413,
0.08833248913288116,
0.00015771633479744196,
-0.04229233041405678,
-0.03033015877008438,
0.006403541192412376,
-0.057553213089704514,
0.047388553619384766,
0.010490331798791885,
-0.0019301060819998384,
-0.0627487376332283,
-0.011176691390573978,
0.08832677453756332,
0.005391156300902367,
-0.11479656398296356,
-0.11939159780740738,
-0.1622319370508194,
-0.09806107729673386,
-0.03324146568775177,
-0.00944250263273716,
-0.0971030741930008,
0.00019520068599376827,
0.010740520432591438,
0.029052171856164932,
-0.0255824513733387,
0.038386229425668716,
0.0961468517780304,
0.013008412905037403,
0.015402606688439846,
0.0020534591749310493,
0.09052255004644394,
0.10167006403207779,
-0.15448948740959167,
-0.1295212209224701
] |
null | null |
transformers
|
## Usage
The model can be used directly (without a language model) as follows:
```python
import soundfile as sf
import torch
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
import argparse
def parse_transcription(wav_file):
# load pretrained model
processor = Wav2Vec2Processor.from_pretrained("addy88/wav2vec2-rajsthani-stt")
model = Wav2Vec2ForCTC.from_pretrained("addy88/wav2vec2-rajsthani-stt")
# load audio
audio_input, sample_rate = sf.read(wav_file)
# pad input values and return pt tensor
input_values = processor(audio_input, sampling_rate=sample_rate, return_tensors="pt").input_values
# INFERENCE
# retrieve logits & take argmax
logits = model(input_values).logits
predicted_ids = torch.argmax(logits, dim=-1)
# transcribe
transcription = processor.decode(predicted_ids[0], skip_special_tokens=True)
print(transcription)
```
|
{}
|
automatic-speech-recognition
|
addy88/wav2vec2-rajsthani-stt
|
[
"transformers",
"pytorch",
"wav2vec2",
"automatic-speech-recognition",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #region-us
|
## Usage
The model can be used directly (without a language model) as follows:
|
[
"## Usage\nThe model can be used directly (without a language model) as follows:"
] |
[
"TAGS\n#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #region-us \n",
"## Usage\nThe model can be used directly (without a language model) as follows:"
] |
[
37,
20
] |
[
"passage: TAGS\n#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #region-us \n## Usage\nThe model can be used directly (without a language model) as follows:"
] |
[
-0.05252622812986374,
-0.019617876037955284,
-0.0058790817856788635,
-0.053425274789333344,
0.10687846690416336,
-0.007705265656113625,
0.09170906990766525,
0.06302708387374878,
0.1208203136920929,
-0.05187021568417549,
0.10668445378541946,
0.2061861753463745,
-0.01615987904369831,
0.01154998317360878,
-0.02408706024289131,
-0.2849704623222351,
0.05921001359820366,
0.006724327802658081,
0.08173583447933197,
0.11632698774337769,
0.10728606581687927,
-0.0745929405093193,
0.043392978608608246,
0.07603111863136292,
-0.09562446177005768,
0.044834449887275696,
0.017546148970723152,
-0.12676472961902618,
0.12212342768907547,
0.05740610510110855,
0.09601490944623947,
0.003504887456074357,
0.03245728462934494,
-0.23084574937820435,
0.004520881455391645,
-0.04129945859313011,
-0.005402150098234415,
-0.015265563502907753,
0.05411279946565628,
-0.10321149230003357,
0.1183098778128624,
0.09698744118213654,
0.008157354779541492,
0.045819319784641266,
-0.06994444131851196,
-0.09883728623390198,
0.09624841809272766,
0.007222834043204784,
0.06262826174497604,
0.10634984821081161,
-0.05064747855067253,
0.13851769268512726,
-0.10710646957159042,
0.08310505002737045,
0.04849497973918915,
-0.28126662969589233,
0.029068727046251297,
0.07243984192609787,
0.07708492875099182,
0.01645354926586151,
-0.02037384919822216,
0.10069116950035095,
-0.02416316792368889,
0.04858396574854851,
-0.041516125202178955,
-0.05117788165807724,
-0.13475511968135834,
0.03324274718761444,
-0.11539667844772339,
-0.040561530739068985,
0.15148606896400452,
-0.050762925297021866,
0.03511665016412735,
-0.11290891468524933,
-0.06844023615121841,
0.044039443135261536,
-0.07302548736333847,
-0.07782286405563354,
-0.02189541794359684,
0.08985205739736557,
0.006181041710078716,
-0.08480656892061234,
-0.10149180144071579,
-0.07703463733196259,
-0.15133780241012573,
0.2693607807159424,
0.019680902361869812,
0.08886801451444626,
-0.2036275714635849,
0.03727252781391144,
-0.06865793466567993,
-0.03657369315624237,
-0.010131645016372204,
-0.08290375769138336,
-0.003481371561065316,
0.041202232241630554,
-0.10632151365280151,
-0.008017139509320259,
0.08221583068370819,
-0.027638090774416924,
0.06801564246416092,
0.05372175574302673,
-0.025688808411359787,
0.08469673246145248,
0.02976100891828537,
0.1266242116689682,
-0.08318299055099487,
-0.01359215285629034,
-0.006264910567551851,
-0.12346780300140381,
-0.02692805789411068,
-0.026080166921019554,
-0.14001379907131195,
-0.0929582342505455,
-0.03027261234819889,
0.08267602324485779,
-0.03435491770505905,
0.04342223331332207,
-0.017561985179781914,
-0.07181911915540695,
-0.022716619074344635,
-0.0683937594294548,
-0.025212876498699188,
0.08283547312021255,
0.028712647035717964,
0.19050057232379913,
0.03218787536025047,
0.02990150637924671,
-0.10521915555000305,
-0.06501829624176025,
-0.001435491838492453,
0.0477556511759758,
-0.0038637355901300907,
-0.0636293813586235,
-0.050521429628133774,
-0.047594863921403885,
0.04504020884633064,
-0.16827283799648285,
-0.08006849139928818,
0.006139486562460661,
0.016433287411928177,
-0.012976926751434803,
0.03135204687714577,
-0.11849917471408844,
0.039808642119169235,
-0.0012580605689436197,
-0.05513211712241173,
-0.023246455937623978,
-0.04426079988479614,
0.04696167632937431,
0.008784018456935883,
0.08877347409725189,
-0.1122790277004242,
0.07852248102426529,
-0.08129951357841492,
-0.04550950974225998,
-0.01164686307311058,
0.10328144580125809,
-0.017674513161182404,
0.05000694468617439,
-0.08133397996425629,
-0.045672718435525894,
-0.1159110814332962,
0.0878932997584343,
-0.03355850651860237,
0.15796400606632233,
-0.15420709550380707,
-0.10080987215042114,
0.21767915785312653,
-0.09306889027357101,
-0.09239067882299423,
0.08364612609148026,
0.01256752572953701,
0.07035309821367264,
0.08287971466779709,
0.22752153873443604,
0.028992680832743645,
-0.11375638842582703,
0.15816456079483032,
0.1461673527956009,
-0.11467607319355011,
-0.045014068484306335,
0.010342531837522984,
-0.060074593871831894,
-0.11382783204317093,
0.03658890351653099,
-0.03728248178958893,
0.08358000218868256,
-0.04293033108115196,
-0.0835445374250412,
-0.01683056727051735,
-0.11543910205364227,
0.07083569467067719,
0.001472658826969564,
0.10082688927650452,
0.03857358917593956,
0.009115398861467838,
0.008496548049151897,
0.06438606232404709,
-0.08900038152933121,
0.04502258822321892,
-0.15757586061954498,
0.06532309949398041,
-0.04050298035144806,
0.015240225940942764,
-0.202682763338089,
0.12728376686573029,
-0.01672365702688694,
0.10747754573822021,
0.054517317563295364,
0.12177800387144089,
0.08942614495754242,
-0.03367462754249573,
0.03980250656604767,
0.0010367206996306777,
0.16547656059265137,
0.035678017884492874,
-0.03053026832640171,
-0.04889436811208725,
0.00681394012644887,
-0.055863723158836365,
-0.013186636380851269,
-0.048216186463832855,
-0.04109345003962517,
-0.011756274849176407,
0.0651380866765976,
-0.03668820485472679,
0.0516076385974884,
-0.005080144386738539,
0.025842025876045227,
-0.0025852490216493607,
0.040376920253038406,
0.09369415789842606,
-0.044856440275907516,
-0.09525914490222931,
0.21410244703292847,
-0.13405893743038177,
0.18770310282707214,
0.20924758911132812,
-0.2815588414669037,
0.044230442494153976,
0.062409620732069016,
0.011556003242731094,
0.03797592222690582,
0.04959757253527641,
0.006467180326581001,
0.293790340423584,
0.0155835235491395,
0.11668486893177032,
-0.05374575033783913,
0.032668016850948334,
0.02992015704512596,
-0.05695001780986786,
-0.01879408396780491,
0.034647136926651,
0.0661093145608902,
-0.08204877376556396,
0.04923979938030243,
0.11010090261697769,
-0.0453057736158371,
0.148123100399971,
0.024933019652962685,
-0.039744652807712555,
0.03881930187344551,
-0.02343410812318325,
-0.06886041164398193,
-0.009936339221894741,
-0.33419716358184814,
-0.09599199891090393,
0.07709821313619614,
-0.004785764962434769,
0.12489327788352966,
-0.08804792165756226,
0.009168051183223724,
0.019291682168841362,
-0.06878803670406342,
-0.0647115707397461,
0.05472118407487869,
-0.002762231044471264,
0.04009811580181122,
-0.03818083554506302,
-0.1263684183359146,
0.044684890657663345,
-0.0302386824041605,
-0.11999684572219849,
0.12441056966781616,
-0.10402670502662659,
-0.24305014312267303,
-0.10676499456167221,
-0.11100881546735764,
-0.012243036180734634,
0.0840906873345375,
0.06344853341579437,
-0.09637530148029327,
-0.031256671994924545,
0.023415599018335342,
0.03565197065472603,
-0.07387632876634598,
-0.0077576665207743645,
-0.00415276363492012,
0.007432916201651096,
-0.03729229420423508,
-0.11296577751636505,
-0.04147093743085861,
-0.06601216644048691,
-0.027051186189055443,
0.034905724227428436,
-0.10912566632032394,
0.023992136120796204,
0.2160838544368744,
0.049506090581417084,
0.10071378946304321,
0.017306605353951454,
0.1917818933725357,
-0.045892760157585144,
-0.1069929301738739,
0.16272033751010895,
-0.0352884940803051,
0.0028067543171346188,
0.12233041226863861,
0.04503423348069191,
-0.0888904333114624,
-0.057114340364933014,
-0.07670506834983826,
-0.08987729251384735,
-0.14921152591705322,
-0.18451803922653198,
-0.07496201992034912,
-0.10192596167325974,
0.008611632511019707,
0.01191422063857317,
0.08689485490322113,
0.04952683299779892,
0.05695516616106033,
-0.08443386107683182,
0.0356837660074234,
0.03766658902168274,
0.2046319842338562,
-0.05954502522945404,
0.1242118775844574,
-0.044658876955509186,
-0.1400134563446045,
0.020373381674289703,
0.03893480822443962,
0.12193314731121063,
0.17272241413593292,
0.03627229854464531,
0.0488450825214386,
0.10247788578271866,
0.14994199573993683,
0.17463889718055725,
0.0029014565516263247,
-0.020511463284492493,
-0.0009147358941845596,
-0.05476776137948036,
-0.07201310247182846,
0.09347954392433167,
0.2079763263463974,
-0.1406993716955185,
-0.009409161284565926,
-0.10983754694461823,
0.06955263763666153,
0.1199074313044548,
0.05642293393611908,
-0.20243626832962036,
0.03341635689139366,
0.04153240844607353,
-0.06629457324743271,
-0.07251711189746857,
0.1465679109096527,
0.012260537594556808,
-0.11557546257972717,
0.05262777954339981,
0.04174318537116051,
0.07618976384401321,
-0.026631271466612816,
0.08335631340742111,
-0.15152700245380402,
-0.13310033082962036,
0.0689893513917923,
0.09733282774686813,
-0.22627536952495575,
0.2599017024040222,
-0.0082806795835495,
0.015054757706820965,
-0.09506656974554062,
-0.03614956513047218,
0.026938162744045258,
0.13990642130374908,
0.16889335215091705,
-0.005832785740494728,
0.018060486763715744,
-0.06294215470552444,
-0.02094322256743908,
0.0794750228524208,
0.1289294809103012,
0.0012923552421852946,
-0.025144003331661224,
-0.01851782575249672,
-0.048820991069078445,
-0.0035712714307010174,
-0.044398605823516846,
-0.07460972666740417,
-0.11215569823980331,
0.010362415574491024,
0.1303505152463913,
0.10614973306655884,
0.033407822251319885,
-0.00502749951556325,
-0.08575832098722458,
0.10764406621456146,
-0.11254726350307465,
-0.035979606211185455,
-0.06864117085933685,
-0.16859184205532074,
0.1345091015100479,
-0.0476752370595932,
0.06849204748868942,
-0.0048405323177576065,
0.032381389290094376,
-0.05020912364125252,
-0.1380978226661682,
0.09602635353803635,
-0.12306658923625946,
0.01976417936384678,
0.0015626787208020687,
0.15944983065128326,
0.025481410324573517,
-0.007206962909549475,
0.10792868584394455,
0.0020110411569476128,
-0.09983616322278976,
-0.0903809517621994,
-0.032549209892749786,
0.15346577763557434,
-0.10951274633407593,
0.015927769243717194,
0.018179042264819145,
-0.14347873628139496,
-0.08231712877750397,
0.03777335211634636,
0.27825725078582764,
-0.014915425330400467,
-0.04817360267043114,
0.17336682975292206,
0.266446590423584,
-0.052146051079034805,
-0.22492991387844086,
-0.1739804446697235,
-0.04863237589597702,
0.0378878153860569,
-0.0715700313448906,
-0.11964539438486099,
0.10777436196804047,
-0.09367436915636063,
-0.04155983403325081,
-0.02952319197356701,
-0.18605610728263855,
-0.10452525317668915,
0.2878834009170532,
0.008731041103601456,
0.31272685527801514,
-0.034225042909383774,
-0.08491139113903046,
-0.0462961308658123,
-0.1168622300028801,
0.11990545690059662,
-0.028133777901530266,
0.07553482055664062,
0.02659502625465393,
0.14482992887496948,
0.07080701738595963,
-0.03058517538011074,
0.09739825129508972,
0.0694439560174942,
-0.04932545870542526,
-0.014205054379999638,
-0.09434179961681366,
0.010993984527885914,
0.041735049337148666,
0.07998612523078918,
0.05293517932295799,
0.031744420528411865,
-0.09471290558576584,
-0.10459836572408676,
-0.11717119812965393,
0.04566803202033043,
0.06686970591545105,
-0.05674618482589722,
0.08453842252492905,
-0.12748411297798157,
0.013795166276395321,
0.046415891498327255,
0.05644693598151207,
-0.14555446803569794,
0.01097900327295065,
0.20175482332706451,
0.19673597812652588,
-0.11582087725400925,
-0.035082004964351654,
-0.03929390013217926,
-0.06582492589950562,
0.12552955746650696,
-0.016348805278539658,
0.04150126501917839,
0.06624189019203186,
0.011761232279241085,
0.044623930007219315,
0.08187094330787659,
-0.0014814226888120174,
0.011807871982455254,
0.0706041157245636,
-0.1026955246925354,
-0.07286912202835083,
-0.03209858015179634,
0.03451545536518097,
0.1219853013753891,
0.05769479647278786,
0.1266922652721405,
-0.0197153240442276,
-0.029428185895085335,
-0.05409902706742287,
-0.02111060544848442,
-0.1510176807641983,
0.06585611402988434,
0.05681091919541359,
0.029282737523317337,
-0.1461006999015808,
-0.0009589263936504722,
-0.06233185529708862,
-0.11463384330272675,
-0.004083676729351282,
-0.024750350043177605,
-0.09568831324577332,
-0.13677221536636353,
-0.11707108467817307,
0.10505697876214981,
-0.13075333833694458,
-0.12530504167079926,
0.0472247488796711,
-0.11614292114973068,
0.01793769747018814,
0.14788326621055603,
0.07750163227319717,
0.07349028438329697,
-0.1580013483762741,
-0.04895436018705368,
0.014872894622385502,
-0.03940632566809654,
0.00006847670010756701,
-0.0814877450466156,
-0.10139909386634827,
0.07051457464694977,
0.030785532668232918,
0.10237010568380356,
-0.1006712019443512,
-0.11157841235399246,
-0.08842196315526962,
0.1053212434053421,
-0.1773672252893448,
-0.011937949806451797,
-0.12276988476514816,
-0.009165732190012932,
0.054897814989089966,
-0.06066597253084183,
-0.034183766692876816,
0.007981495931744576,
-0.13472281396389008,
0.08353378623723984,
0.003475533565506339,
0.013883791863918304,
-0.07277137786149979,
0.027176927775144577,
0.047363173216581345,
-0.021777629852294922,
0.08778230845928192,
0.23831427097320557,
-0.16073819994926453,
0.15058839321136475,
-0.16987963020801544,
-0.11391031742095947,
0.09087315201759338,
0.04639875516295433,
0.04060527682304382,
-0.008711026981472969,
0.024921422824263573,
0.10381405800580978,
0.030109092593193054,
0.007147052325308323,
0.15466874837875366,
-0.06457111984491348,
0.05303109064698219,
-0.013168524950742722,
-0.08773212879896164,
-0.00940337311476469,
-0.04804586246609688,
0.07879312336444855,
0.09026934206485748,
0.0635034441947937,
-0.04155603051185608,
0.0813690721988678,
0.029936833307147026,
0.060264427214860916,
-0.082431860268116,
-0.04972422868013382,
-0.02518003061413765,
-0.12266892194747925,
0.023915033787488937,
-0.03064400888979435,
0.28028959035873413,
-0.03677959367632866,
0.14348864555358887,
-0.022635672241449356,
0.018306247889995575,
-0.03131228685379028,
0.023785337805747986,
0.3033658564090729,
0.09851063042879105,
0.033345457166433334,
-0.0743090957403183,
0.054648082703351974,
0.01324823684990406,
0.03891727328300476,
-0.058958083391189575,
0.12437312304973602,
-0.02557576633989811,
0.1556762158870697,
0.08825253695249557,
0.011792381294071674,
-0.10271283239126205,
-0.1517172008752823,
-0.03477974236011505,
0.022329291328787804,
-0.09139315038919449,
0.08728893101215363,
0.1347426176071167,
-0.011509649455547333,
0.04300074279308319,
0.008906800299882889,
-0.0395376943051815,
-0.17708246409893036,
-0.1229119822382927,
-0.06867998838424683,
-0.15299323201179504,
0.012428054586052895,
-0.06315319240093231,
0.01787642389535904,
0.04234951362013817,
0.066338911652565,
-0.03069286420941353,
0.11699683219194412,
-0.03180370479822159,
-0.08137953281402588,
0.06664858013391495,
-0.034303344786167145,
0.02629096433520317,
-0.012935973703861237,
-0.008460517041385174,
-0.038132403045892715,
-0.017841307446360588,
0.0013736779801547527,
0.011932797729969025,
-0.11747316271066666,
-0.014412318356335163,
-0.07583178579807281,
-0.04851336404681206,
-0.08428135514259338,
-0.0008740581688471138,
-0.01677345670759678,
0.11235248297452927,
0.04048636183142662,
-0.0890006273984909,
-0.011831426061689854,
0.15025228261947632,
-0.11666398495435715,
-0.20691975951194763,
-0.06340430676937103,
0.274996280670166,
0.0627456083893776,
0.1450004279613495,
-0.04091713950037956,
0.00462886318564415,
-0.09783204644918442,
0.3448697328567505,
0.25440648198127747,
-0.06393852829933167,
0.04810115322470665,
0.051103997975587845,
0.04404282942414284,
0.049606598913669586,
0.023405157029628754,
0.08343277871608734,
0.32936808466911316,
-0.04472985491156578,
-0.03660992160439491,
-0.05594843626022339,
-0.06329580396413803,
-0.038297075778245926,
0.04348982498049736,
-0.020259791985154152,
-0.11740419268608093,
-0.022186610847711563,
0.11966544389724731,
-0.24192485213279724,
0.133467897772789,
-0.04571826010942459,
-0.09056868404150009,
-0.02946968376636505,
-0.015986280515789986,
0.05871385708451271,
0.07608629763126373,
0.07382916659116745,
-0.05554657801985741,
-0.09896405041217804,
0.10340635478496552,
0.05036648362874985,
-0.2329118847846985,
0.018541546538472176,
0.04919476434588432,
-0.03017665445804596,
-0.021637072786688805,
0.010790947824716568,
0.09790199249982834,
0.012425770983099937,
0.13141566514968872,
0.03426911309361458,
0.15303249657154083,
-0.010131238028407097,
-0.08131913840770721,
0.03293213993310928,
0.05652814731001854,
-0.02393581159412861,
-0.035055771470069885,
-0.01140469778329134,
-0.2610277831554413,
0.08833248913288116,
0.00015771633479744196,
-0.04229233041405678,
-0.03033015877008438,
0.006403541192412376,
-0.057553213089704514,
0.047388553619384766,
0.010490331798791885,
-0.0019301060819998384,
-0.0627487376332283,
-0.011176691390573978,
0.08832677453756332,
0.005391156300902367,
-0.11479656398296356,
-0.11939159780740738,
-0.1622319370508194,
-0.09806107729673386,
-0.03324146568775177,
-0.00944250263273716,
-0.0971030741930008,
0.00019520068599376827,
0.010740520432591438,
0.029052171856164932,
-0.0255824513733387,
0.038386229425668716,
0.0961468517780304,
0.013008412905037403,
0.015402606688439846,
0.0020534591749310493,
0.09052255004644394,
0.10167006403207779,
-0.15448948740959167,
-0.1295212209224701
] |
null | null |
transformers
|
## Usage
The model can be used directly (without a language model) as follows:
```python
import soundfile as sf
import torch
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
import argparse
def parse_transcription(wav_file):
# load pretrained model
processor = Wav2Vec2Processor.from_pretrained("addy88/wav2vec2-sanskrit-stt")
model = Wav2Vec2ForCTC.from_pretrained("addy88/wav2vec2-sanskrit-stt")
# load audio
audio_input, sample_rate = sf.read(wav_file)
# pad input values and return pt tensor
input_values = processor(audio_input, sampling_rate=sample_rate, return_tensors="pt").input_values
# INFERENCE
# retrieve logits & take argmax
logits = model(input_values).logits
predicted_ids = torch.argmax(logits, dim=-1)
# transcribe
transcription = processor.decode(predicted_ids[0], skip_special_tokens=True)
print(transcription)
```
|
{}
|
automatic-speech-recognition
|
addy88/wav2vec2-sanskrit-stt
|
[
"transformers",
"pytorch",
"wav2vec2",
"automatic-speech-recognition",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #region-us
|
## Usage
The model can be used directly (without a language model) as follows:
|
[
"## Usage\n\nThe model can be used directly (without a language model) as follows:"
] |
[
"TAGS\n#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #region-us \n",
"## Usage\n\nThe model can be used directly (without a language model) as follows:"
] |
[
37,
20
] |
[
"passage: TAGS\n#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #region-us \n## Usage\n\nThe model can be used directly (without a language model) as follows:"
] |
[
-0.05252622812986374,
-0.019617876037955284,
-0.0058790817856788635,
-0.053425274789333344,
0.10687846690416336,
-0.007705265656113625,
0.09170906990766525,
0.06302708387374878,
0.1208203136920929,
-0.05187021568417549,
0.10668445378541946,
0.2061861753463745,
-0.01615987904369831,
0.01154998317360878,
-0.02408706024289131,
-0.2849704623222351,
0.05921001359820366,
0.006724327802658081,
0.08173583447933197,
0.11632698774337769,
0.10728606581687927,
-0.0745929405093193,
0.043392978608608246,
0.07603111863136292,
-0.09562446177005768,
0.044834449887275696,
0.017546148970723152,
-0.12676472961902618,
0.12212342768907547,
0.05740610510110855,
0.09601490944623947,
0.003504887456074357,
0.03245728462934494,
-0.23084574937820435,
0.004520881455391645,
-0.04129945859313011,
-0.005402150098234415,
-0.015265563502907753,
0.05411279946565628,
-0.10321149230003357,
0.1183098778128624,
0.09698744118213654,
0.008157354779541492,
0.045819319784641266,
-0.06994444131851196,
-0.09883728623390198,
0.09624841809272766,
0.007222834043204784,
0.06262826174497604,
0.10634984821081161,
-0.05064747855067253,
0.13851769268512726,
-0.10710646957159042,
0.08310505002737045,
0.04849497973918915,
-0.28126662969589233,
0.029068727046251297,
0.07243984192609787,
0.07708492875099182,
0.01645354926586151,
-0.02037384919822216,
0.10069116950035095,
-0.02416316792368889,
0.04858396574854851,
-0.041516125202178955,
-0.05117788165807724,
-0.13475511968135834,
0.03324274718761444,
-0.11539667844772339,
-0.040561530739068985,
0.15148606896400452,
-0.050762925297021866,
0.03511665016412735,
-0.11290891468524933,
-0.06844023615121841,
0.044039443135261536,
-0.07302548736333847,
-0.07782286405563354,
-0.02189541794359684,
0.08985205739736557,
0.006181041710078716,
-0.08480656892061234,
-0.10149180144071579,
-0.07703463733196259,
-0.15133780241012573,
0.2693607807159424,
0.019680902361869812,
0.08886801451444626,
-0.2036275714635849,
0.03727252781391144,
-0.06865793466567993,
-0.03657369315624237,
-0.010131645016372204,
-0.08290375769138336,
-0.003481371561065316,
0.041202232241630554,
-0.10632151365280151,
-0.008017139509320259,
0.08221583068370819,
-0.027638090774416924,
0.06801564246416092,
0.05372175574302673,
-0.025688808411359787,
0.08469673246145248,
0.02976100891828537,
0.1266242116689682,
-0.08318299055099487,
-0.01359215285629034,
-0.006264910567551851,
-0.12346780300140381,
-0.02692805789411068,
-0.026080166921019554,
-0.14001379907131195,
-0.0929582342505455,
-0.03027261234819889,
0.08267602324485779,
-0.03435491770505905,
0.04342223331332207,
-0.017561985179781914,
-0.07181911915540695,
-0.022716619074344635,
-0.0683937594294548,
-0.025212876498699188,
0.08283547312021255,
0.028712647035717964,
0.19050057232379913,
0.03218787536025047,
0.02990150637924671,
-0.10521915555000305,
-0.06501829624176025,
-0.001435491838492453,
0.0477556511759758,
-0.0038637355901300907,
-0.0636293813586235,
-0.050521429628133774,
-0.047594863921403885,
0.04504020884633064,
-0.16827283799648285,
-0.08006849139928818,
0.006139486562460661,
0.016433287411928177,
-0.012976926751434803,
0.03135204687714577,
-0.11849917471408844,
0.039808642119169235,
-0.0012580605689436197,
-0.05513211712241173,
-0.023246455937623978,
-0.04426079988479614,
0.04696167632937431,
0.008784018456935883,
0.08877347409725189,
-0.1122790277004242,
0.07852248102426529,
-0.08129951357841492,
-0.04550950974225998,
-0.01164686307311058,
0.10328144580125809,
-0.017674513161182404,
0.05000694468617439,
-0.08133397996425629,
-0.045672718435525894,
-0.1159110814332962,
0.0878932997584343,
-0.03355850651860237,
0.15796400606632233,
-0.15420709550380707,
-0.10080987215042114,
0.21767915785312653,
-0.09306889027357101,
-0.09239067882299423,
0.08364612609148026,
0.01256752572953701,
0.07035309821367264,
0.08287971466779709,
0.22752153873443604,
0.028992680832743645,
-0.11375638842582703,
0.15816456079483032,
0.1461673527956009,
-0.11467607319355011,
-0.045014068484306335,
0.010342531837522984,
-0.060074593871831894,
-0.11382783204317093,
0.03658890351653099,
-0.03728248178958893,
0.08358000218868256,
-0.04293033108115196,
-0.0835445374250412,
-0.01683056727051735,
-0.11543910205364227,
0.07083569467067719,
0.001472658826969564,
0.10082688927650452,
0.03857358917593956,
0.009115398861467838,
0.008496548049151897,
0.06438606232404709,
-0.08900038152933121,
0.04502258822321892,
-0.15757586061954498,
0.06532309949398041,
-0.04050298035144806,
0.015240225940942764,
-0.202682763338089,
0.12728376686573029,
-0.01672365702688694,
0.10747754573822021,
0.054517317563295364,
0.12177800387144089,
0.08942614495754242,
-0.03367462754249573,
0.03980250656604767,
0.0010367206996306777,
0.16547656059265137,
0.035678017884492874,
-0.03053026832640171,
-0.04889436811208725,
0.00681394012644887,
-0.055863723158836365,
-0.013186636380851269,
-0.048216186463832855,
-0.04109345003962517,
-0.011756274849176407,
0.0651380866765976,
-0.03668820485472679,
0.0516076385974884,
-0.005080144386738539,
0.025842025876045227,
-0.0025852490216493607,
0.040376920253038406,
0.09369415789842606,
-0.044856440275907516,
-0.09525914490222931,
0.21410244703292847,
-0.13405893743038177,
0.18770310282707214,
0.20924758911132812,
-0.2815588414669037,
0.044230442494153976,
0.062409620732069016,
0.011556003242731094,
0.03797592222690582,
0.04959757253527641,
0.006467180326581001,
0.293790340423584,
0.0155835235491395,
0.11668486893177032,
-0.05374575033783913,
0.032668016850948334,
0.02992015704512596,
-0.05695001780986786,
-0.01879408396780491,
0.034647136926651,
0.0661093145608902,
-0.08204877376556396,
0.04923979938030243,
0.11010090261697769,
-0.0453057736158371,
0.148123100399971,
0.024933019652962685,
-0.039744652807712555,
0.03881930187344551,
-0.02343410812318325,
-0.06886041164398193,
-0.009936339221894741,
-0.33419716358184814,
-0.09599199891090393,
0.07709821313619614,
-0.004785764962434769,
0.12489327788352966,
-0.08804792165756226,
0.009168051183223724,
0.019291682168841362,
-0.06878803670406342,
-0.0647115707397461,
0.05472118407487869,
-0.002762231044471264,
0.04009811580181122,
-0.03818083554506302,
-0.1263684183359146,
0.044684890657663345,
-0.0302386824041605,
-0.11999684572219849,
0.12441056966781616,
-0.10402670502662659,
-0.24305014312267303,
-0.10676499456167221,
-0.11100881546735764,
-0.012243036180734634,
0.0840906873345375,
0.06344853341579437,
-0.09637530148029327,
-0.031256671994924545,
0.023415599018335342,
0.03565197065472603,
-0.07387632876634598,
-0.0077576665207743645,
-0.00415276363492012,
0.007432916201651096,
-0.03729229420423508,
-0.11296577751636505,
-0.04147093743085861,
-0.06601216644048691,
-0.027051186189055443,
0.034905724227428436,
-0.10912566632032394,
0.023992136120796204,
0.2160838544368744,
0.049506090581417084,
0.10071378946304321,
0.017306605353951454,
0.1917818933725357,
-0.045892760157585144,
-0.1069929301738739,
0.16272033751010895,
-0.0352884940803051,
0.0028067543171346188,
0.12233041226863861,
0.04503423348069191,
-0.0888904333114624,
-0.057114340364933014,
-0.07670506834983826,
-0.08987729251384735,
-0.14921152591705322,
-0.18451803922653198,
-0.07496201992034912,
-0.10192596167325974,
0.008611632511019707,
0.01191422063857317,
0.08689485490322113,
0.04952683299779892,
0.05695516616106033,
-0.08443386107683182,
0.0356837660074234,
0.03766658902168274,
0.2046319842338562,
-0.05954502522945404,
0.1242118775844574,
-0.044658876955509186,
-0.1400134563446045,
0.020373381674289703,
0.03893480822443962,
0.12193314731121063,
0.17272241413593292,
0.03627229854464531,
0.0488450825214386,
0.10247788578271866,
0.14994199573993683,
0.17463889718055725,
0.0029014565516263247,
-0.020511463284492493,
-0.0009147358941845596,
-0.05476776137948036,
-0.07201310247182846,
0.09347954392433167,
0.2079763263463974,
-0.1406993716955185,
-0.009409161284565926,
-0.10983754694461823,
0.06955263763666153,
0.1199074313044548,
0.05642293393611908,
-0.20243626832962036,
0.03341635689139366,
0.04153240844607353,
-0.06629457324743271,
-0.07251711189746857,
0.1465679109096527,
0.012260537594556808,
-0.11557546257972717,
0.05262777954339981,
0.04174318537116051,
0.07618976384401321,
-0.026631271466612816,
0.08335631340742111,
-0.15152700245380402,
-0.13310033082962036,
0.0689893513917923,
0.09733282774686813,
-0.22627536952495575,
0.2599017024040222,
-0.0082806795835495,
0.015054757706820965,
-0.09506656974554062,
-0.03614956513047218,
0.026938162744045258,
0.13990642130374908,
0.16889335215091705,
-0.005832785740494728,
0.018060486763715744,
-0.06294215470552444,
-0.02094322256743908,
0.0794750228524208,
0.1289294809103012,
0.0012923552421852946,
-0.025144003331661224,
-0.01851782575249672,
-0.048820991069078445,
-0.0035712714307010174,
-0.044398605823516846,
-0.07460972666740417,
-0.11215569823980331,
0.010362415574491024,
0.1303505152463913,
0.10614973306655884,
0.033407822251319885,
-0.00502749951556325,
-0.08575832098722458,
0.10764406621456146,
-0.11254726350307465,
-0.035979606211185455,
-0.06864117085933685,
-0.16859184205532074,
0.1345091015100479,
-0.0476752370595932,
0.06849204748868942,
-0.0048405323177576065,
0.032381389290094376,
-0.05020912364125252,
-0.1380978226661682,
0.09602635353803635,
-0.12306658923625946,
0.01976417936384678,
0.0015626787208020687,
0.15944983065128326,
0.025481410324573517,
-0.007206962909549475,
0.10792868584394455,
0.0020110411569476128,
-0.09983616322278976,
-0.0903809517621994,
-0.032549209892749786,
0.15346577763557434,
-0.10951274633407593,
0.015927769243717194,
0.018179042264819145,
-0.14347873628139496,
-0.08231712877750397,
0.03777335211634636,
0.27825725078582764,
-0.014915425330400467,
-0.04817360267043114,
0.17336682975292206,
0.266446590423584,
-0.052146051079034805,
-0.22492991387844086,
-0.1739804446697235,
-0.04863237589597702,
0.0378878153860569,
-0.0715700313448906,
-0.11964539438486099,
0.10777436196804047,
-0.09367436915636063,
-0.04155983403325081,
-0.02952319197356701,
-0.18605610728263855,
-0.10452525317668915,
0.2878834009170532,
0.008731041103601456,
0.31272685527801514,
-0.034225042909383774,
-0.08491139113903046,
-0.0462961308658123,
-0.1168622300028801,
0.11990545690059662,
-0.028133777901530266,
0.07553482055664062,
0.02659502625465393,
0.14482992887496948,
0.07080701738595963,
-0.03058517538011074,
0.09739825129508972,
0.0694439560174942,
-0.04932545870542526,
-0.014205054379999638,
-0.09434179961681366,
0.010993984527885914,
0.041735049337148666,
0.07998612523078918,
0.05293517932295799,
0.031744420528411865,
-0.09471290558576584,
-0.10459836572408676,
-0.11717119812965393,
0.04566803202033043,
0.06686970591545105,
-0.05674618482589722,
0.08453842252492905,
-0.12748411297798157,
0.013795166276395321,
0.046415891498327255,
0.05644693598151207,
-0.14555446803569794,
0.01097900327295065,
0.20175482332706451,
0.19673597812652588,
-0.11582087725400925,
-0.035082004964351654,
-0.03929390013217926,
-0.06582492589950562,
0.12552955746650696,
-0.016348805278539658,
0.04150126501917839,
0.06624189019203186,
0.011761232279241085,
0.044623930007219315,
0.08187094330787659,
-0.0014814226888120174,
0.011807871982455254,
0.0706041157245636,
-0.1026955246925354,
-0.07286912202835083,
-0.03209858015179634,
0.03451545536518097,
0.1219853013753891,
0.05769479647278786,
0.1266922652721405,
-0.0197153240442276,
-0.029428185895085335,
-0.05409902706742287,
-0.02111060544848442,
-0.1510176807641983,
0.06585611402988434,
0.05681091919541359,
0.029282737523317337,
-0.1461006999015808,
-0.0009589263936504722,
-0.06233185529708862,
-0.11463384330272675,
-0.004083676729351282,
-0.024750350043177605,
-0.09568831324577332,
-0.13677221536636353,
-0.11707108467817307,
0.10505697876214981,
-0.13075333833694458,
-0.12530504167079926,
0.0472247488796711,
-0.11614292114973068,
0.01793769747018814,
0.14788326621055603,
0.07750163227319717,
0.07349028438329697,
-0.1580013483762741,
-0.04895436018705368,
0.014872894622385502,
-0.03940632566809654,
0.00006847670010756701,
-0.0814877450466156,
-0.10139909386634827,
0.07051457464694977,
0.030785532668232918,
0.10237010568380356,
-0.1006712019443512,
-0.11157841235399246,
-0.08842196315526962,
0.1053212434053421,
-0.1773672252893448,
-0.011937949806451797,
-0.12276988476514816,
-0.009165732190012932,
0.054897814989089966,
-0.06066597253084183,
-0.034183766692876816,
0.007981495931744576,
-0.13472281396389008,
0.08353378623723984,
0.003475533565506339,
0.013883791863918304,
-0.07277137786149979,
0.027176927775144577,
0.047363173216581345,
-0.021777629852294922,
0.08778230845928192,
0.23831427097320557,
-0.16073819994926453,
0.15058839321136475,
-0.16987963020801544,
-0.11391031742095947,
0.09087315201759338,
0.04639875516295433,
0.04060527682304382,
-0.008711026981472969,
0.024921422824263573,
0.10381405800580978,
0.030109092593193054,
0.007147052325308323,
0.15466874837875366,
-0.06457111984491348,
0.05303109064698219,
-0.013168524950742722,
-0.08773212879896164,
-0.00940337311476469,
-0.04804586246609688,
0.07879312336444855,
0.09026934206485748,
0.0635034441947937,
-0.04155603051185608,
0.0813690721988678,
0.029936833307147026,
0.060264427214860916,
-0.082431860268116,
-0.04972422868013382,
-0.02518003061413765,
-0.12266892194747925,
0.023915033787488937,
-0.03064400888979435,
0.28028959035873413,
-0.03677959367632866,
0.14348864555358887,
-0.022635672241449356,
0.018306247889995575,
-0.03131228685379028,
0.023785337805747986,
0.3033658564090729,
0.09851063042879105,
0.033345457166433334,
-0.0743090957403183,
0.054648082703351974,
0.01324823684990406,
0.03891727328300476,
-0.058958083391189575,
0.12437312304973602,
-0.02557576633989811,
0.1556762158870697,
0.08825253695249557,
0.011792381294071674,
-0.10271283239126205,
-0.1517172008752823,
-0.03477974236011505,
0.022329291328787804,
-0.09139315038919449,
0.08728893101215363,
0.1347426176071167,
-0.011509649455547333,
0.04300074279308319,
0.008906800299882889,
-0.0395376943051815,
-0.17708246409893036,
-0.1229119822382927,
-0.06867998838424683,
-0.15299323201179504,
0.012428054586052895,
-0.06315319240093231,
0.01787642389535904,
0.04234951362013817,
0.066338911652565,
-0.03069286420941353,
0.11699683219194412,
-0.03180370479822159,
-0.08137953281402588,
0.06664858013391495,
-0.034303344786167145,
0.02629096433520317,
-0.012935973703861237,
-0.008460517041385174,
-0.038132403045892715,
-0.017841307446360588,
0.0013736779801547527,
0.011932797729969025,
-0.11747316271066666,
-0.014412318356335163,
-0.07583178579807281,
-0.04851336404681206,
-0.08428135514259338,
-0.0008740581688471138,
-0.01677345670759678,
0.11235248297452927,
0.04048636183142662,
-0.0890006273984909,
-0.011831426061689854,
0.15025228261947632,
-0.11666398495435715,
-0.20691975951194763,
-0.06340430676937103,
0.274996280670166,
0.0627456083893776,
0.1450004279613495,
-0.04091713950037956,
0.00462886318564415,
-0.09783204644918442,
0.3448697328567505,
0.25440648198127747,
-0.06393852829933167,
0.04810115322470665,
0.051103997975587845,
0.04404282942414284,
0.049606598913669586,
0.023405157029628754,
0.08343277871608734,
0.32936808466911316,
-0.04472985491156578,
-0.03660992160439491,
-0.05594843626022339,
-0.06329580396413803,
-0.038297075778245926,
0.04348982498049736,
-0.020259791985154152,
-0.11740419268608093,
-0.022186610847711563,
0.11966544389724731,
-0.24192485213279724,
0.133467897772789,
-0.04571826010942459,
-0.09056868404150009,
-0.02946968376636505,
-0.015986280515789986,
0.05871385708451271,
0.07608629763126373,
0.07382916659116745,
-0.05554657801985741,
-0.09896405041217804,
0.10340635478496552,
0.05036648362874985,
-0.2329118847846985,
0.018541546538472176,
0.04919476434588432,
-0.03017665445804596,
-0.021637072786688805,
0.010790947824716568,
0.09790199249982834,
0.012425770983099937,
0.13141566514968872,
0.03426911309361458,
0.15303249657154083,
-0.010131238028407097,
-0.08131913840770721,
0.03293213993310928,
0.05652814731001854,
-0.02393581159412861,
-0.035055771470069885,
-0.01140469778329134,
-0.2610277831554413,
0.08833248913288116,
0.00015771633479744196,
-0.04229233041405678,
-0.03033015877008438,
0.006403541192412376,
-0.057553213089704514,
0.047388553619384766,
0.010490331798791885,
-0.0019301060819998384,
-0.0627487376332283,
-0.011176691390573978,
0.08832677453756332,
0.005391156300902367,
-0.11479656398296356,
-0.11939159780740738,
-0.1622319370508194,
-0.09806107729673386,
-0.03324146568775177,
-0.00944250263273716,
-0.0971030741930008,
0.00019520068599376827,
0.010740520432591438,
0.029052171856164932,
-0.0255824513733387,
0.038386229425668716,
0.0961468517780304,
0.013008412905037403,
0.015402606688439846,
0.0020534591749310493,
0.09052255004644394,
0.10167006403207779,
-0.15448948740959167,
-0.1295212209224701
] |
null | null |
transformers
|
## Usage
The model can be used directly (without a language model) as follows:
```python
import soundfile as sf
import torch
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
import argparse
def parse_transcription(wav_file):
# load pretrained model
processor = Wav2Vec2Processor.from_pretrained("addy88/wav2vec2-tamil-stt")
model = Wav2Vec2ForCTC.from_pretrained("addy88/wav2vec2-tamil-stt")
# load audio
audio_input, sample_rate = sf.read(wav_file)
# pad input values and return pt tensor
input_values = processor(audio_input, sampling_rate=sample_rate, return_tensors="pt").input_values
# INFERENCE
# retrieve logits & take argmax
logits = model(input_values).logits
predicted_ids = torch.argmax(logits, dim=-1)
# transcribe
transcription = processor.decode(predicted_ids[0], skip_special_tokens=True)
print(transcription)
```
|
{}
|
automatic-speech-recognition
|
addy88/wav2vec2-tamil-stt
|
[
"transformers",
"pytorch",
"wav2vec2",
"automatic-speech-recognition",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #region-us
|
## Usage
The model can be used directly (without a language model) as follows:
|
[
"## Usage\nThe model can be used directly (without a language model) as follows:"
] |
[
"TAGS\n#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #region-us \n",
"## Usage\nThe model can be used directly (without a language model) as follows:"
] |
[
37,
20
] |
[
"passage: TAGS\n#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #region-us \n## Usage\nThe model can be used directly (without a language model) as follows:"
] |
[
-0.05252622812986374,
-0.019617876037955284,
-0.0058790817856788635,
-0.053425274789333344,
0.10687846690416336,
-0.007705265656113625,
0.09170906990766525,
0.06302708387374878,
0.1208203136920929,
-0.05187021568417549,
0.10668445378541946,
0.2061861753463745,
-0.01615987904369831,
0.01154998317360878,
-0.02408706024289131,
-0.2849704623222351,
0.05921001359820366,
0.006724327802658081,
0.08173583447933197,
0.11632698774337769,
0.10728606581687927,
-0.0745929405093193,
0.043392978608608246,
0.07603111863136292,
-0.09562446177005768,
0.044834449887275696,
0.017546148970723152,
-0.12676472961902618,
0.12212342768907547,
0.05740610510110855,
0.09601490944623947,
0.003504887456074357,
0.03245728462934494,
-0.23084574937820435,
0.004520881455391645,
-0.04129945859313011,
-0.005402150098234415,
-0.015265563502907753,
0.05411279946565628,
-0.10321149230003357,
0.1183098778128624,
0.09698744118213654,
0.008157354779541492,
0.045819319784641266,
-0.06994444131851196,
-0.09883728623390198,
0.09624841809272766,
0.007222834043204784,
0.06262826174497604,
0.10634984821081161,
-0.05064747855067253,
0.13851769268512726,
-0.10710646957159042,
0.08310505002737045,
0.04849497973918915,
-0.28126662969589233,
0.029068727046251297,
0.07243984192609787,
0.07708492875099182,
0.01645354926586151,
-0.02037384919822216,
0.10069116950035095,
-0.02416316792368889,
0.04858396574854851,
-0.041516125202178955,
-0.05117788165807724,
-0.13475511968135834,
0.03324274718761444,
-0.11539667844772339,
-0.040561530739068985,
0.15148606896400452,
-0.050762925297021866,
0.03511665016412735,
-0.11290891468524933,
-0.06844023615121841,
0.044039443135261536,
-0.07302548736333847,
-0.07782286405563354,
-0.02189541794359684,
0.08985205739736557,
0.006181041710078716,
-0.08480656892061234,
-0.10149180144071579,
-0.07703463733196259,
-0.15133780241012573,
0.2693607807159424,
0.019680902361869812,
0.08886801451444626,
-0.2036275714635849,
0.03727252781391144,
-0.06865793466567993,
-0.03657369315624237,
-0.010131645016372204,
-0.08290375769138336,
-0.003481371561065316,
0.041202232241630554,
-0.10632151365280151,
-0.008017139509320259,
0.08221583068370819,
-0.027638090774416924,
0.06801564246416092,
0.05372175574302673,
-0.025688808411359787,
0.08469673246145248,
0.02976100891828537,
0.1266242116689682,
-0.08318299055099487,
-0.01359215285629034,
-0.006264910567551851,
-0.12346780300140381,
-0.02692805789411068,
-0.026080166921019554,
-0.14001379907131195,
-0.0929582342505455,
-0.03027261234819889,
0.08267602324485779,
-0.03435491770505905,
0.04342223331332207,
-0.017561985179781914,
-0.07181911915540695,
-0.022716619074344635,
-0.0683937594294548,
-0.025212876498699188,
0.08283547312021255,
0.028712647035717964,
0.19050057232379913,
0.03218787536025047,
0.02990150637924671,
-0.10521915555000305,
-0.06501829624176025,
-0.001435491838492453,
0.0477556511759758,
-0.0038637355901300907,
-0.0636293813586235,
-0.050521429628133774,
-0.047594863921403885,
0.04504020884633064,
-0.16827283799648285,
-0.08006849139928818,
0.006139486562460661,
0.016433287411928177,
-0.012976926751434803,
0.03135204687714577,
-0.11849917471408844,
0.039808642119169235,
-0.0012580605689436197,
-0.05513211712241173,
-0.023246455937623978,
-0.04426079988479614,
0.04696167632937431,
0.008784018456935883,
0.08877347409725189,
-0.1122790277004242,
0.07852248102426529,
-0.08129951357841492,
-0.04550950974225998,
-0.01164686307311058,
0.10328144580125809,
-0.017674513161182404,
0.05000694468617439,
-0.08133397996425629,
-0.045672718435525894,
-0.1159110814332962,
0.0878932997584343,
-0.03355850651860237,
0.15796400606632233,
-0.15420709550380707,
-0.10080987215042114,
0.21767915785312653,
-0.09306889027357101,
-0.09239067882299423,
0.08364612609148026,
0.01256752572953701,
0.07035309821367264,
0.08287971466779709,
0.22752153873443604,
0.028992680832743645,
-0.11375638842582703,
0.15816456079483032,
0.1461673527956009,
-0.11467607319355011,
-0.045014068484306335,
0.010342531837522984,
-0.060074593871831894,
-0.11382783204317093,
0.03658890351653099,
-0.03728248178958893,
0.08358000218868256,
-0.04293033108115196,
-0.0835445374250412,
-0.01683056727051735,
-0.11543910205364227,
0.07083569467067719,
0.001472658826969564,
0.10082688927650452,
0.03857358917593956,
0.009115398861467838,
0.008496548049151897,
0.06438606232404709,
-0.08900038152933121,
0.04502258822321892,
-0.15757586061954498,
0.06532309949398041,
-0.04050298035144806,
0.015240225940942764,
-0.202682763338089,
0.12728376686573029,
-0.01672365702688694,
0.10747754573822021,
0.054517317563295364,
0.12177800387144089,
0.08942614495754242,
-0.03367462754249573,
0.03980250656604767,
0.0010367206996306777,
0.16547656059265137,
0.035678017884492874,
-0.03053026832640171,
-0.04889436811208725,
0.00681394012644887,
-0.055863723158836365,
-0.013186636380851269,
-0.048216186463832855,
-0.04109345003962517,
-0.011756274849176407,
0.0651380866765976,
-0.03668820485472679,
0.0516076385974884,
-0.005080144386738539,
0.025842025876045227,
-0.0025852490216493607,
0.040376920253038406,
0.09369415789842606,
-0.044856440275907516,
-0.09525914490222931,
0.21410244703292847,
-0.13405893743038177,
0.18770310282707214,
0.20924758911132812,
-0.2815588414669037,
0.044230442494153976,
0.062409620732069016,
0.011556003242731094,
0.03797592222690582,
0.04959757253527641,
0.006467180326581001,
0.293790340423584,
0.0155835235491395,
0.11668486893177032,
-0.05374575033783913,
0.032668016850948334,
0.02992015704512596,
-0.05695001780986786,
-0.01879408396780491,
0.034647136926651,
0.0661093145608902,
-0.08204877376556396,
0.04923979938030243,
0.11010090261697769,
-0.0453057736158371,
0.148123100399971,
0.024933019652962685,
-0.039744652807712555,
0.03881930187344551,
-0.02343410812318325,
-0.06886041164398193,
-0.009936339221894741,
-0.33419716358184814,
-0.09599199891090393,
0.07709821313619614,
-0.004785764962434769,
0.12489327788352966,
-0.08804792165756226,
0.009168051183223724,
0.019291682168841362,
-0.06878803670406342,
-0.0647115707397461,
0.05472118407487869,
-0.002762231044471264,
0.04009811580181122,
-0.03818083554506302,
-0.1263684183359146,
0.044684890657663345,
-0.0302386824041605,
-0.11999684572219849,
0.12441056966781616,
-0.10402670502662659,
-0.24305014312267303,
-0.10676499456167221,
-0.11100881546735764,
-0.012243036180734634,
0.0840906873345375,
0.06344853341579437,
-0.09637530148029327,
-0.031256671994924545,
0.023415599018335342,
0.03565197065472603,
-0.07387632876634598,
-0.0077576665207743645,
-0.00415276363492012,
0.007432916201651096,
-0.03729229420423508,
-0.11296577751636505,
-0.04147093743085861,
-0.06601216644048691,
-0.027051186189055443,
0.034905724227428436,
-0.10912566632032394,
0.023992136120796204,
0.2160838544368744,
0.049506090581417084,
0.10071378946304321,
0.017306605353951454,
0.1917818933725357,
-0.045892760157585144,
-0.1069929301738739,
0.16272033751010895,
-0.0352884940803051,
0.0028067543171346188,
0.12233041226863861,
0.04503423348069191,
-0.0888904333114624,
-0.057114340364933014,
-0.07670506834983826,
-0.08987729251384735,
-0.14921152591705322,
-0.18451803922653198,
-0.07496201992034912,
-0.10192596167325974,
0.008611632511019707,
0.01191422063857317,
0.08689485490322113,
0.04952683299779892,
0.05695516616106033,
-0.08443386107683182,
0.0356837660074234,
0.03766658902168274,
0.2046319842338562,
-0.05954502522945404,
0.1242118775844574,
-0.044658876955509186,
-0.1400134563446045,
0.020373381674289703,
0.03893480822443962,
0.12193314731121063,
0.17272241413593292,
0.03627229854464531,
0.0488450825214386,
0.10247788578271866,
0.14994199573993683,
0.17463889718055725,
0.0029014565516263247,
-0.020511463284492493,
-0.0009147358941845596,
-0.05476776137948036,
-0.07201310247182846,
0.09347954392433167,
0.2079763263463974,
-0.1406993716955185,
-0.009409161284565926,
-0.10983754694461823,
0.06955263763666153,
0.1199074313044548,
0.05642293393611908,
-0.20243626832962036,
0.03341635689139366,
0.04153240844607353,
-0.06629457324743271,
-0.07251711189746857,
0.1465679109096527,
0.012260537594556808,
-0.11557546257972717,
0.05262777954339981,
0.04174318537116051,
0.07618976384401321,
-0.026631271466612816,
0.08335631340742111,
-0.15152700245380402,
-0.13310033082962036,
0.0689893513917923,
0.09733282774686813,
-0.22627536952495575,
0.2599017024040222,
-0.0082806795835495,
0.015054757706820965,
-0.09506656974554062,
-0.03614956513047218,
0.026938162744045258,
0.13990642130374908,
0.16889335215091705,
-0.005832785740494728,
0.018060486763715744,
-0.06294215470552444,
-0.02094322256743908,
0.0794750228524208,
0.1289294809103012,
0.0012923552421852946,
-0.025144003331661224,
-0.01851782575249672,
-0.048820991069078445,
-0.0035712714307010174,
-0.044398605823516846,
-0.07460972666740417,
-0.11215569823980331,
0.010362415574491024,
0.1303505152463913,
0.10614973306655884,
0.033407822251319885,
-0.00502749951556325,
-0.08575832098722458,
0.10764406621456146,
-0.11254726350307465,
-0.035979606211185455,
-0.06864117085933685,
-0.16859184205532074,
0.1345091015100479,
-0.0476752370595932,
0.06849204748868942,
-0.0048405323177576065,
0.032381389290094376,
-0.05020912364125252,
-0.1380978226661682,
0.09602635353803635,
-0.12306658923625946,
0.01976417936384678,
0.0015626787208020687,
0.15944983065128326,
0.025481410324573517,
-0.007206962909549475,
0.10792868584394455,
0.0020110411569476128,
-0.09983616322278976,
-0.0903809517621994,
-0.032549209892749786,
0.15346577763557434,
-0.10951274633407593,
0.015927769243717194,
0.018179042264819145,
-0.14347873628139496,
-0.08231712877750397,
0.03777335211634636,
0.27825725078582764,
-0.014915425330400467,
-0.04817360267043114,
0.17336682975292206,
0.266446590423584,
-0.052146051079034805,
-0.22492991387844086,
-0.1739804446697235,
-0.04863237589597702,
0.0378878153860569,
-0.0715700313448906,
-0.11964539438486099,
0.10777436196804047,
-0.09367436915636063,
-0.04155983403325081,
-0.02952319197356701,
-0.18605610728263855,
-0.10452525317668915,
0.2878834009170532,
0.008731041103601456,
0.31272685527801514,
-0.034225042909383774,
-0.08491139113903046,
-0.0462961308658123,
-0.1168622300028801,
0.11990545690059662,
-0.028133777901530266,
0.07553482055664062,
0.02659502625465393,
0.14482992887496948,
0.07080701738595963,
-0.03058517538011074,
0.09739825129508972,
0.0694439560174942,
-0.04932545870542526,
-0.014205054379999638,
-0.09434179961681366,
0.010993984527885914,
0.041735049337148666,
0.07998612523078918,
0.05293517932295799,
0.031744420528411865,
-0.09471290558576584,
-0.10459836572408676,
-0.11717119812965393,
0.04566803202033043,
0.06686970591545105,
-0.05674618482589722,
0.08453842252492905,
-0.12748411297798157,
0.013795166276395321,
0.046415891498327255,
0.05644693598151207,
-0.14555446803569794,
0.01097900327295065,
0.20175482332706451,
0.19673597812652588,
-0.11582087725400925,
-0.035082004964351654,
-0.03929390013217926,
-0.06582492589950562,
0.12552955746650696,
-0.016348805278539658,
0.04150126501917839,
0.06624189019203186,
0.011761232279241085,
0.044623930007219315,
0.08187094330787659,
-0.0014814226888120174,
0.011807871982455254,
0.0706041157245636,
-0.1026955246925354,
-0.07286912202835083,
-0.03209858015179634,
0.03451545536518097,
0.1219853013753891,
0.05769479647278786,
0.1266922652721405,
-0.0197153240442276,
-0.029428185895085335,
-0.05409902706742287,
-0.02111060544848442,
-0.1510176807641983,
0.06585611402988434,
0.05681091919541359,
0.029282737523317337,
-0.1461006999015808,
-0.0009589263936504722,
-0.06233185529708862,
-0.11463384330272675,
-0.004083676729351282,
-0.024750350043177605,
-0.09568831324577332,
-0.13677221536636353,
-0.11707108467817307,
0.10505697876214981,
-0.13075333833694458,
-0.12530504167079926,
0.0472247488796711,
-0.11614292114973068,
0.01793769747018814,
0.14788326621055603,
0.07750163227319717,
0.07349028438329697,
-0.1580013483762741,
-0.04895436018705368,
0.014872894622385502,
-0.03940632566809654,
0.00006847670010756701,
-0.0814877450466156,
-0.10139909386634827,
0.07051457464694977,
0.030785532668232918,
0.10237010568380356,
-0.1006712019443512,
-0.11157841235399246,
-0.08842196315526962,
0.1053212434053421,
-0.1773672252893448,
-0.011937949806451797,
-0.12276988476514816,
-0.009165732190012932,
0.054897814989089966,
-0.06066597253084183,
-0.034183766692876816,
0.007981495931744576,
-0.13472281396389008,
0.08353378623723984,
0.003475533565506339,
0.013883791863918304,
-0.07277137786149979,
0.027176927775144577,
0.047363173216581345,
-0.021777629852294922,
0.08778230845928192,
0.23831427097320557,
-0.16073819994926453,
0.15058839321136475,
-0.16987963020801544,
-0.11391031742095947,
0.09087315201759338,
0.04639875516295433,
0.04060527682304382,
-0.008711026981472969,
0.024921422824263573,
0.10381405800580978,
0.030109092593193054,
0.007147052325308323,
0.15466874837875366,
-0.06457111984491348,
0.05303109064698219,
-0.013168524950742722,
-0.08773212879896164,
-0.00940337311476469,
-0.04804586246609688,
0.07879312336444855,
0.09026934206485748,
0.0635034441947937,
-0.04155603051185608,
0.0813690721988678,
0.029936833307147026,
0.060264427214860916,
-0.082431860268116,
-0.04972422868013382,
-0.02518003061413765,
-0.12266892194747925,
0.023915033787488937,
-0.03064400888979435,
0.28028959035873413,
-0.03677959367632866,
0.14348864555358887,
-0.022635672241449356,
0.018306247889995575,
-0.03131228685379028,
0.023785337805747986,
0.3033658564090729,
0.09851063042879105,
0.033345457166433334,
-0.0743090957403183,
0.054648082703351974,
0.01324823684990406,
0.03891727328300476,
-0.058958083391189575,
0.12437312304973602,
-0.02557576633989811,
0.1556762158870697,
0.08825253695249557,
0.011792381294071674,
-0.10271283239126205,
-0.1517172008752823,
-0.03477974236011505,
0.022329291328787804,
-0.09139315038919449,
0.08728893101215363,
0.1347426176071167,
-0.011509649455547333,
0.04300074279308319,
0.008906800299882889,
-0.0395376943051815,
-0.17708246409893036,
-0.1229119822382927,
-0.06867998838424683,
-0.15299323201179504,
0.012428054586052895,
-0.06315319240093231,
0.01787642389535904,
0.04234951362013817,
0.066338911652565,
-0.03069286420941353,
0.11699683219194412,
-0.03180370479822159,
-0.08137953281402588,
0.06664858013391495,
-0.034303344786167145,
0.02629096433520317,
-0.012935973703861237,
-0.008460517041385174,
-0.038132403045892715,
-0.017841307446360588,
0.0013736779801547527,
0.011932797729969025,
-0.11747316271066666,
-0.014412318356335163,
-0.07583178579807281,
-0.04851336404681206,
-0.08428135514259338,
-0.0008740581688471138,
-0.01677345670759678,
0.11235248297452927,
0.04048636183142662,
-0.0890006273984909,
-0.011831426061689854,
0.15025228261947632,
-0.11666398495435715,
-0.20691975951194763,
-0.06340430676937103,
0.274996280670166,
0.0627456083893776,
0.1450004279613495,
-0.04091713950037956,
0.00462886318564415,
-0.09783204644918442,
0.3448697328567505,
0.25440648198127747,
-0.06393852829933167,
0.04810115322470665,
0.051103997975587845,
0.04404282942414284,
0.049606598913669586,
0.023405157029628754,
0.08343277871608734,
0.32936808466911316,
-0.04472985491156578,
-0.03660992160439491,
-0.05594843626022339,
-0.06329580396413803,
-0.038297075778245926,
0.04348982498049736,
-0.020259791985154152,
-0.11740419268608093,
-0.022186610847711563,
0.11966544389724731,
-0.24192485213279724,
0.133467897772789,
-0.04571826010942459,
-0.09056868404150009,
-0.02946968376636505,
-0.015986280515789986,
0.05871385708451271,
0.07608629763126373,
0.07382916659116745,
-0.05554657801985741,
-0.09896405041217804,
0.10340635478496552,
0.05036648362874985,
-0.2329118847846985,
0.018541546538472176,
0.04919476434588432,
-0.03017665445804596,
-0.021637072786688805,
0.010790947824716568,
0.09790199249982834,
0.012425770983099937,
0.13141566514968872,
0.03426911309361458,
0.15303249657154083,
-0.010131238028407097,
-0.08131913840770721,
0.03293213993310928,
0.05652814731001854,
-0.02393581159412861,
-0.035055771470069885,
-0.01140469778329134,
-0.2610277831554413,
0.08833248913288116,
0.00015771633479744196,
-0.04229233041405678,
-0.03033015877008438,
0.006403541192412376,
-0.057553213089704514,
0.047388553619384766,
0.010490331798791885,
-0.0019301060819998384,
-0.0627487376332283,
-0.011176691390573978,
0.08832677453756332,
0.005391156300902367,
-0.11479656398296356,
-0.11939159780740738,
-0.1622319370508194,
-0.09806107729673386,
-0.03324146568775177,
-0.00944250263273716,
-0.0971030741930008,
0.00019520068599376827,
0.010740520432591438,
0.029052171856164932,
-0.0255824513733387,
0.038386229425668716,
0.0961468517780304,
0.013008412905037403,
0.015402606688439846,
0.0020534591749310493,
0.09052255004644394,
0.10167006403207779,
-0.15448948740959167,
-0.1295212209224701
] |
null | null |
transformers
|
## Usage
The model can be used directly (without a language model) as follows:
```python
import soundfile as sf
import torch
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
import argparse
def parse_transcription(wav_file):
# load pretrained model
processor = Wav2Vec2Processor.from_pretrained("addy88/wav2vec2-telugu-stt")
model = Wav2Vec2ForCTC.from_pretrained("addy88/wav2vec2-telugu-stt")
# load audio
audio_input, sample_rate = sf.read(wav_file)
# pad input values and return pt tensor
input_values = processor(audio_input, sampling_rate=sample_rate, return_tensors="pt").input_values
# INFERENCE
# retrieve logits & take argmax
logits = model(input_values).logits
predicted_ids = torch.argmax(logits, dim=-1)
# transcribe
transcription = processor.decode(predicted_ids[0], skip_special_tokens=True)
print(transcription)
```
|
{}
|
automatic-speech-recognition
|
addy88/wav2vec2-telugu-stt
|
[
"transformers",
"pytorch",
"wav2vec2",
"automatic-speech-recognition",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #region-us
|
## Usage
The model can be used directly (without a language model) as follows:
|
[
"## Usage\nThe model can be used directly (without a language model) as follows:"
] |
[
"TAGS\n#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #region-us \n",
"## Usage\nThe model can be used directly (without a language model) as follows:"
] |
[
37,
20
] |
[
"passage: TAGS\n#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #region-us \n## Usage\nThe model can be used directly (without a language model) as follows:"
] |
[
-0.05252622812986374,
-0.019617876037955284,
-0.0058790817856788635,
-0.053425274789333344,
0.10687846690416336,
-0.007705265656113625,
0.09170906990766525,
0.06302708387374878,
0.1208203136920929,
-0.05187021568417549,
0.10668445378541946,
0.2061861753463745,
-0.01615987904369831,
0.01154998317360878,
-0.02408706024289131,
-0.2849704623222351,
0.05921001359820366,
0.006724327802658081,
0.08173583447933197,
0.11632698774337769,
0.10728606581687927,
-0.0745929405093193,
0.043392978608608246,
0.07603111863136292,
-0.09562446177005768,
0.044834449887275696,
0.017546148970723152,
-0.12676472961902618,
0.12212342768907547,
0.05740610510110855,
0.09601490944623947,
0.003504887456074357,
0.03245728462934494,
-0.23084574937820435,
0.004520881455391645,
-0.04129945859313011,
-0.005402150098234415,
-0.015265563502907753,
0.05411279946565628,
-0.10321149230003357,
0.1183098778128624,
0.09698744118213654,
0.008157354779541492,
0.045819319784641266,
-0.06994444131851196,
-0.09883728623390198,
0.09624841809272766,
0.007222834043204784,
0.06262826174497604,
0.10634984821081161,
-0.05064747855067253,
0.13851769268512726,
-0.10710646957159042,
0.08310505002737045,
0.04849497973918915,
-0.28126662969589233,
0.029068727046251297,
0.07243984192609787,
0.07708492875099182,
0.01645354926586151,
-0.02037384919822216,
0.10069116950035095,
-0.02416316792368889,
0.04858396574854851,
-0.041516125202178955,
-0.05117788165807724,
-0.13475511968135834,
0.03324274718761444,
-0.11539667844772339,
-0.040561530739068985,
0.15148606896400452,
-0.050762925297021866,
0.03511665016412735,
-0.11290891468524933,
-0.06844023615121841,
0.044039443135261536,
-0.07302548736333847,
-0.07782286405563354,
-0.02189541794359684,
0.08985205739736557,
0.006181041710078716,
-0.08480656892061234,
-0.10149180144071579,
-0.07703463733196259,
-0.15133780241012573,
0.2693607807159424,
0.019680902361869812,
0.08886801451444626,
-0.2036275714635849,
0.03727252781391144,
-0.06865793466567993,
-0.03657369315624237,
-0.010131645016372204,
-0.08290375769138336,
-0.003481371561065316,
0.041202232241630554,
-0.10632151365280151,
-0.008017139509320259,
0.08221583068370819,
-0.027638090774416924,
0.06801564246416092,
0.05372175574302673,
-0.025688808411359787,
0.08469673246145248,
0.02976100891828537,
0.1266242116689682,
-0.08318299055099487,
-0.01359215285629034,
-0.006264910567551851,
-0.12346780300140381,
-0.02692805789411068,
-0.026080166921019554,
-0.14001379907131195,
-0.0929582342505455,
-0.03027261234819889,
0.08267602324485779,
-0.03435491770505905,
0.04342223331332207,
-0.017561985179781914,
-0.07181911915540695,
-0.022716619074344635,
-0.0683937594294548,
-0.025212876498699188,
0.08283547312021255,
0.028712647035717964,
0.19050057232379913,
0.03218787536025047,
0.02990150637924671,
-0.10521915555000305,
-0.06501829624176025,
-0.001435491838492453,
0.0477556511759758,
-0.0038637355901300907,
-0.0636293813586235,
-0.050521429628133774,
-0.047594863921403885,
0.04504020884633064,
-0.16827283799648285,
-0.08006849139928818,
0.006139486562460661,
0.016433287411928177,
-0.012976926751434803,
0.03135204687714577,
-0.11849917471408844,
0.039808642119169235,
-0.0012580605689436197,
-0.05513211712241173,
-0.023246455937623978,
-0.04426079988479614,
0.04696167632937431,
0.008784018456935883,
0.08877347409725189,
-0.1122790277004242,
0.07852248102426529,
-0.08129951357841492,
-0.04550950974225998,
-0.01164686307311058,
0.10328144580125809,
-0.017674513161182404,
0.05000694468617439,
-0.08133397996425629,
-0.045672718435525894,
-0.1159110814332962,
0.0878932997584343,
-0.03355850651860237,
0.15796400606632233,
-0.15420709550380707,
-0.10080987215042114,
0.21767915785312653,
-0.09306889027357101,
-0.09239067882299423,
0.08364612609148026,
0.01256752572953701,
0.07035309821367264,
0.08287971466779709,
0.22752153873443604,
0.028992680832743645,
-0.11375638842582703,
0.15816456079483032,
0.1461673527956009,
-0.11467607319355011,
-0.045014068484306335,
0.010342531837522984,
-0.060074593871831894,
-0.11382783204317093,
0.03658890351653099,
-0.03728248178958893,
0.08358000218868256,
-0.04293033108115196,
-0.0835445374250412,
-0.01683056727051735,
-0.11543910205364227,
0.07083569467067719,
0.001472658826969564,
0.10082688927650452,
0.03857358917593956,
0.009115398861467838,
0.008496548049151897,
0.06438606232404709,
-0.08900038152933121,
0.04502258822321892,
-0.15757586061954498,
0.06532309949398041,
-0.04050298035144806,
0.015240225940942764,
-0.202682763338089,
0.12728376686573029,
-0.01672365702688694,
0.10747754573822021,
0.054517317563295364,
0.12177800387144089,
0.08942614495754242,
-0.03367462754249573,
0.03980250656604767,
0.0010367206996306777,
0.16547656059265137,
0.035678017884492874,
-0.03053026832640171,
-0.04889436811208725,
0.00681394012644887,
-0.055863723158836365,
-0.013186636380851269,
-0.048216186463832855,
-0.04109345003962517,
-0.011756274849176407,
0.0651380866765976,
-0.03668820485472679,
0.0516076385974884,
-0.005080144386738539,
0.025842025876045227,
-0.0025852490216493607,
0.040376920253038406,
0.09369415789842606,
-0.044856440275907516,
-0.09525914490222931,
0.21410244703292847,
-0.13405893743038177,
0.18770310282707214,
0.20924758911132812,
-0.2815588414669037,
0.044230442494153976,
0.062409620732069016,
0.011556003242731094,
0.03797592222690582,
0.04959757253527641,
0.006467180326581001,
0.293790340423584,
0.0155835235491395,
0.11668486893177032,
-0.05374575033783913,
0.032668016850948334,
0.02992015704512596,
-0.05695001780986786,
-0.01879408396780491,
0.034647136926651,
0.0661093145608902,
-0.08204877376556396,
0.04923979938030243,
0.11010090261697769,
-0.0453057736158371,
0.148123100399971,
0.024933019652962685,
-0.039744652807712555,
0.03881930187344551,
-0.02343410812318325,
-0.06886041164398193,
-0.009936339221894741,
-0.33419716358184814,
-0.09599199891090393,
0.07709821313619614,
-0.004785764962434769,
0.12489327788352966,
-0.08804792165756226,
0.009168051183223724,
0.019291682168841362,
-0.06878803670406342,
-0.0647115707397461,
0.05472118407487869,
-0.002762231044471264,
0.04009811580181122,
-0.03818083554506302,
-0.1263684183359146,
0.044684890657663345,
-0.0302386824041605,
-0.11999684572219849,
0.12441056966781616,
-0.10402670502662659,
-0.24305014312267303,
-0.10676499456167221,
-0.11100881546735764,
-0.012243036180734634,
0.0840906873345375,
0.06344853341579437,
-0.09637530148029327,
-0.031256671994924545,
0.023415599018335342,
0.03565197065472603,
-0.07387632876634598,
-0.0077576665207743645,
-0.00415276363492012,
0.007432916201651096,
-0.03729229420423508,
-0.11296577751636505,
-0.04147093743085861,
-0.06601216644048691,
-0.027051186189055443,
0.034905724227428436,
-0.10912566632032394,
0.023992136120796204,
0.2160838544368744,
0.049506090581417084,
0.10071378946304321,
0.017306605353951454,
0.1917818933725357,
-0.045892760157585144,
-0.1069929301738739,
0.16272033751010895,
-0.0352884940803051,
0.0028067543171346188,
0.12233041226863861,
0.04503423348069191,
-0.0888904333114624,
-0.057114340364933014,
-0.07670506834983826,
-0.08987729251384735,
-0.14921152591705322,
-0.18451803922653198,
-0.07496201992034912,
-0.10192596167325974,
0.008611632511019707,
0.01191422063857317,
0.08689485490322113,
0.04952683299779892,
0.05695516616106033,
-0.08443386107683182,
0.0356837660074234,
0.03766658902168274,
0.2046319842338562,
-0.05954502522945404,
0.1242118775844574,
-0.044658876955509186,
-0.1400134563446045,
0.020373381674289703,
0.03893480822443962,
0.12193314731121063,
0.17272241413593292,
0.03627229854464531,
0.0488450825214386,
0.10247788578271866,
0.14994199573993683,
0.17463889718055725,
0.0029014565516263247,
-0.020511463284492493,
-0.0009147358941845596,
-0.05476776137948036,
-0.07201310247182846,
0.09347954392433167,
0.2079763263463974,
-0.1406993716955185,
-0.009409161284565926,
-0.10983754694461823,
0.06955263763666153,
0.1199074313044548,
0.05642293393611908,
-0.20243626832962036,
0.03341635689139366,
0.04153240844607353,
-0.06629457324743271,
-0.07251711189746857,
0.1465679109096527,
0.012260537594556808,
-0.11557546257972717,
0.05262777954339981,
0.04174318537116051,
0.07618976384401321,
-0.026631271466612816,
0.08335631340742111,
-0.15152700245380402,
-0.13310033082962036,
0.0689893513917923,
0.09733282774686813,
-0.22627536952495575,
0.2599017024040222,
-0.0082806795835495,
0.015054757706820965,
-0.09506656974554062,
-0.03614956513047218,
0.026938162744045258,
0.13990642130374908,
0.16889335215091705,
-0.005832785740494728,
0.018060486763715744,
-0.06294215470552444,
-0.02094322256743908,
0.0794750228524208,
0.1289294809103012,
0.0012923552421852946,
-0.025144003331661224,
-0.01851782575249672,
-0.048820991069078445,
-0.0035712714307010174,
-0.044398605823516846,
-0.07460972666740417,
-0.11215569823980331,
0.010362415574491024,
0.1303505152463913,
0.10614973306655884,
0.033407822251319885,
-0.00502749951556325,
-0.08575832098722458,
0.10764406621456146,
-0.11254726350307465,
-0.035979606211185455,
-0.06864117085933685,
-0.16859184205532074,
0.1345091015100479,
-0.0476752370595932,
0.06849204748868942,
-0.0048405323177576065,
0.032381389290094376,
-0.05020912364125252,
-0.1380978226661682,
0.09602635353803635,
-0.12306658923625946,
0.01976417936384678,
0.0015626787208020687,
0.15944983065128326,
0.025481410324573517,
-0.007206962909549475,
0.10792868584394455,
0.0020110411569476128,
-0.09983616322278976,
-0.0903809517621994,
-0.032549209892749786,
0.15346577763557434,
-0.10951274633407593,
0.015927769243717194,
0.018179042264819145,
-0.14347873628139496,
-0.08231712877750397,
0.03777335211634636,
0.27825725078582764,
-0.014915425330400467,
-0.04817360267043114,
0.17336682975292206,
0.266446590423584,
-0.052146051079034805,
-0.22492991387844086,
-0.1739804446697235,
-0.04863237589597702,
0.0378878153860569,
-0.0715700313448906,
-0.11964539438486099,
0.10777436196804047,
-0.09367436915636063,
-0.04155983403325081,
-0.02952319197356701,
-0.18605610728263855,
-0.10452525317668915,
0.2878834009170532,
0.008731041103601456,
0.31272685527801514,
-0.034225042909383774,
-0.08491139113903046,
-0.0462961308658123,
-0.1168622300028801,
0.11990545690059662,
-0.028133777901530266,
0.07553482055664062,
0.02659502625465393,
0.14482992887496948,
0.07080701738595963,
-0.03058517538011074,
0.09739825129508972,
0.0694439560174942,
-0.04932545870542526,
-0.014205054379999638,
-0.09434179961681366,
0.010993984527885914,
0.041735049337148666,
0.07998612523078918,
0.05293517932295799,
0.031744420528411865,
-0.09471290558576584,
-0.10459836572408676,
-0.11717119812965393,
0.04566803202033043,
0.06686970591545105,
-0.05674618482589722,
0.08453842252492905,
-0.12748411297798157,
0.013795166276395321,
0.046415891498327255,
0.05644693598151207,
-0.14555446803569794,
0.01097900327295065,
0.20175482332706451,
0.19673597812652588,
-0.11582087725400925,
-0.035082004964351654,
-0.03929390013217926,
-0.06582492589950562,
0.12552955746650696,
-0.016348805278539658,
0.04150126501917839,
0.06624189019203186,
0.011761232279241085,
0.044623930007219315,
0.08187094330787659,
-0.0014814226888120174,
0.011807871982455254,
0.0706041157245636,
-0.1026955246925354,
-0.07286912202835083,
-0.03209858015179634,
0.03451545536518097,
0.1219853013753891,
0.05769479647278786,
0.1266922652721405,
-0.0197153240442276,
-0.029428185895085335,
-0.05409902706742287,
-0.02111060544848442,
-0.1510176807641983,
0.06585611402988434,
0.05681091919541359,
0.029282737523317337,
-0.1461006999015808,
-0.0009589263936504722,
-0.06233185529708862,
-0.11463384330272675,
-0.004083676729351282,
-0.024750350043177605,
-0.09568831324577332,
-0.13677221536636353,
-0.11707108467817307,
0.10505697876214981,
-0.13075333833694458,
-0.12530504167079926,
0.0472247488796711,
-0.11614292114973068,
0.01793769747018814,
0.14788326621055603,
0.07750163227319717,
0.07349028438329697,
-0.1580013483762741,
-0.04895436018705368,
0.014872894622385502,
-0.03940632566809654,
0.00006847670010756701,
-0.0814877450466156,
-0.10139909386634827,
0.07051457464694977,
0.030785532668232918,
0.10237010568380356,
-0.1006712019443512,
-0.11157841235399246,
-0.08842196315526962,
0.1053212434053421,
-0.1773672252893448,
-0.011937949806451797,
-0.12276988476514816,
-0.009165732190012932,
0.054897814989089966,
-0.06066597253084183,
-0.034183766692876816,
0.007981495931744576,
-0.13472281396389008,
0.08353378623723984,
0.003475533565506339,
0.013883791863918304,
-0.07277137786149979,
0.027176927775144577,
0.047363173216581345,
-0.021777629852294922,
0.08778230845928192,
0.23831427097320557,
-0.16073819994926453,
0.15058839321136475,
-0.16987963020801544,
-0.11391031742095947,
0.09087315201759338,
0.04639875516295433,
0.04060527682304382,
-0.008711026981472969,
0.024921422824263573,
0.10381405800580978,
0.030109092593193054,
0.007147052325308323,
0.15466874837875366,
-0.06457111984491348,
0.05303109064698219,
-0.013168524950742722,
-0.08773212879896164,
-0.00940337311476469,
-0.04804586246609688,
0.07879312336444855,
0.09026934206485748,
0.0635034441947937,
-0.04155603051185608,
0.0813690721988678,
0.029936833307147026,
0.060264427214860916,
-0.082431860268116,
-0.04972422868013382,
-0.02518003061413765,
-0.12266892194747925,
0.023915033787488937,
-0.03064400888979435,
0.28028959035873413,
-0.03677959367632866,
0.14348864555358887,
-0.022635672241449356,
0.018306247889995575,
-0.03131228685379028,
0.023785337805747986,
0.3033658564090729,
0.09851063042879105,
0.033345457166433334,
-0.0743090957403183,
0.054648082703351974,
0.01324823684990406,
0.03891727328300476,
-0.058958083391189575,
0.12437312304973602,
-0.02557576633989811,
0.1556762158870697,
0.08825253695249557,
0.011792381294071674,
-0.10271283239126205,
-0.1517172008752823,
-0.03477974236011505,
0.022329291328787804,
-0.09139315038919449,
0.08728893101215363,
0.1347426176071167,
-0.011509649455547333,
0.04300074279308319,
0.008906800299882889,
-0.0395376943051815,
-0.17708246409893036,
-0.1229119822382927,
-0.06867998838424683,
-0.15299323201179504,
0.012428054586052895,
-0.06315319240093231,
0.01787642389535904,
0.04234951362013817,
0.066338911652565,
-0.03069286420941353,
0.11699683219194412,
-0.03180370479822159,
-0.08137953281402588,
0.06664858013391495,
-0.034303344786167145,
0.02629096433520317,
-0.012935973703861237,
-0.008460517041385174,
-0.038132403045892715,
-0.017841307446360588,
0.0013736779801547527,
0.011932797729969025,
-0.11747316271066666,
-0.014412318356335163,
-0.07583178579807281,
-0.04851336404681206,
-0.08428135514259338,
-0.0008740581688471138,
-0.01677345670759678,
0.11235248297452927,
0.04048636183142662,
-0.0890006273984909,
-0.011831426061689854,
0.15025228261947632,
-0.11666398495435715,
-0.20691975951194763,
-0.06340430676937103,
0.274996280670166,
0.0627456083893776,
0.1450004279613495,
-0.04091713950037956,
0.00462886318564415,
-0.09783204644918442,
0.3448697328567505,
0.25440648198127747,
-0.06393852829933167,
0.04810115322470665,
0.051103997975587845,
0.04404282942414284,
0.049606598913669586,
0.023405157029628754,
0.08343277871608734,
0.32936808466911316,
-0.04472985491156578,
-0.03660992160439491,
-0.05594843626022339,
-0.06329580396413803,
-0.038297075778245926,
0.04348982498049736,
-0.020259791985154152,
-0.11740419268608093,
-0.022186610847711563,
0.11966544389724731,
-0.24192485213279724,
0.133467897772789,
-0.04571826010942459,
-0.09056868404150009,
-0.02946968376636505,
-0.015986280515789986,
0.05871385708451271,
0.07608629763126373,
0.07382916659116745,
-0.05554657801985741,
-0.09896405041217804,
0.10340635478496552,
0.05036648362874985,
-0.2329118847846985,
0.018541546538472176,
0.04919476434588432,
-0.03017665445804596,
-0.021637072786688805,
0.010790947824716568,
0.09790199249982834,
0.012425770983099937,
0.13141566514968872,
0.03426911309361458,
0.15303249657154083,
-0.010131238028407097,
-0.08131913840770721,
0.03293213993310928,
0.05652814731001854,
-0.02393581159412861,
-0.035055771470069885,
-0.01140469778329134,
-0.2610277831554413,
0.08833248913288116,
0.00015771633479744196,
-0.04229233041405678,
-0.03033015877008438,
0.006403541192412376,
-0.057553213089704514,
0.047388553619384766,
0.010490331798791885,
-0.0019301060819998384,
-0.0627487376332283,
-0.011176691390573978,
0.08832677453756332,
0.005391156300902367,
-0.11479656398296356,
-0.11939159780740738,
-0.1622319370508194,
-0.09806107729673386,
-0.03324146568775177,
-0.00944250263273716,
-0.0971030741930008,
0.00019520068599376827,
0.010740520432591438,
0.029052171856164932,
-0.0255824513733387,
0.038386229425668716,
0.0961468517780304,
0.013008412905037403,
0.015402606688439846,
0.0020534591749310493,
0.09052255004644394,
0.10167006403207779,
-0.15448948740959167,
-0.1295212209224701
] |
null | null |
transformers
|
## Usage
The model can be used directly (without a language model) as follows:
```python
import soundfile as sf
import torch
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
import argparse
def parse_transcription(wav_file):
# load pretrained model
processor = Wav2Vec2Processor.from_pretrained("addy88/wav2vec2-urdu-stt")
model = Wav2Vec2ForCTC.from_pretrained("addy88/wav2vec2-urdu-stt")
# load audio
audio_input, sample_rate = sf.read(wav_file)
# pad input values and return pt tensor
input_values = processor(audio_input, sampling_rate=sample_rate, return_tensors="pt").input_values
# INFERENCE
# retrieve logits & take argmax
logits = model(input_values).logits
predicted_ids = torch.argmax(logits, dim=-1)
# transcribe
transcription = processor.decode(predicted_ids[0], skip_special_tokens=True)
print(transcription)
```
|
{}
|
automatic-speech-recognition
|
addy88/wav2vec2-urdu-stt
|
[
"transformers",
"pytorch",
"wav2vec2",
"automatic-speech-recognition",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #region-us
|
## Usage
The model can be used directly (without a language model) as follows:
|
[
"## Usage\nThe model can be used directly (without a language model) as follows:"
] |
[
"TAGS\n#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #region-us \n",
"## Usage\nThe model can be used directly (without a language model) as follows:"
] |
[
37,
20
] |
[
"passage: TAGS\n#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #region-us \n## Usage\nThe model can be used directly (without a language model) as follows:"
] |
[
-0.05252622812986374,
-0.019617876037955284,
-0.0058790817856788635,
-0.053425274789333344,
0.10687846690416336,
-0.007705265656113625,
0.09170906990766525,
0.06302708387374878,
0.1208203136920929,
-0.05187021568417549,
0.10668445378541946,
0.2061861753463745,
-0.01615987904369831,
0.01154998317360878,
-0.02408706024289131,
-0.2849704623222351,
0.05921001359820366,
0.006724327802658081,
0.08173583447933197,
0.11632698774337769,
0.10728606581687927,
-0.0745929405093193,
0.043392978608608246,
0.07603111863136292,
-0.09562446177005768,
0.044834449887275696,
0.017546148970723152,
-0.12676472961902618,
0.12212342768907547,
0.05740610510110855,
0.09601490944623947,
0.003504887456074357,
0.03245728462934494,
-0.23084574937820435,
0.004520881455391645,
-0.04129945859313011,
-0.005402150098234415,
-0.015265563502907753,
0.05411279946565628,
-0.10321149230003357,
0.1183098778128624,
0.09698744118213654,
0.008157354779541492,
0.045819319784641266,
-0.06994444131851196,
-0.09883728623390198,
0.09624841809272766,
0.007222834043204784,
0.06262826174497604,
0.10634984821081161,
-0.05064747855067253,
0.13851769268512726,
-0.10710646957159042,
0.08310505002737045,
0.04849497973918915,
-0.28126662969589233,
0.029068727046251297,
0.07243984192609787,
0.07708492875099182,
0.01645354926586151,
-0.02037384919822216,
0.10069116950035095,
-0.02416316792368889,
0.04858396574854851,
-0.041516125202178955,
-0.05117788165807724,
-0.13475511968135834,
0.03324274718761444,
-0.11539667844772339,
-0.040561530739068985,
0.15148606896400452,
-0.050762925297021866,
0.03511665016412735,
-0.11290891468524933,
-0.06844023615121841,
0.044039443135261536,
-0.07302548736333847,
-0.07782286405563354,
-0.02189541794359684,
0.08985205739736557,
0.006181041710078716,
-0.08480656892061234,
-0.10149180144071579,
-0.07703463733196259,
-0.15133780241012573,
0.2693607807159424,
0.019680902361869812,
0.08886801451444626,
-0.2036275714635849,
0.03727252781391144,
-0.06865793466567993,
-0.03657369315624237,
-0.010131645016372204,
-0.08290375769138336,
-0.003481371561065316,
0.041202232241630554,
-0.10632151365280151,
-0.008017139509320259,
0.08221583068370819,
-0.027638090774416924,
0.06801564246416092,
0.05372175574302673,
-0.025688808411359787,
0.08469673246145248,
0.02976100891828537,
0.1266242116689682,
-0.08318299055099487,
-0.01359215285629034,
-0.006264910567551851,
-0.12346780300140381,
-0.02692805789411068,
-0.026080166921019554,
-0.14001379907131195,
-0.0929582342505455,
-0.03027261234819889,
0.08267602324485779,
-0.03435491770505905,
0.04342223331332207,
-0.017561985179781914,
-0.07181911915540695,
-0.022716619074344635,
-0.0683937594294548,
-0.025212876498699188,
0.08283547312021255,
0.028712647035717964,
0.19050057232379913,
0.03218787536025047,
0.02990150637924671,
-0.10521915555000305,
-0.06501829624176025,
-0.001435491838492453,
0.0477556511759758,
-0.0038637355901300907,
-0.0636293813586235,
-0.050521429628133774,
-0.047594863921403885,
0.04504020884633064,
-0.16827283799648285,
-0.08006849139928818,
0.006139486562460661,
0.016433287411928177,
-0.012976926751434803,
0.03135204687714577,
-0.11849917471408844,
0.039808642119169235,
-0.0012580605689436197,
-0.05513211712241173,
-0.023246455937623978,
-0.04426079988479614,
0.04696167632937431,
0.008784018456935883,
0.08877347409725189,
-0.1122790277004242,
0.07852248102426529,
-0.08129951357841492,
-0.04550950974225998,
-0.01164686307311058,
0.10328144580125809,
-0.017674513161182404,
0.05000694468617439,
-0.08133397996425629,
-0.045672718435525894,
-0.1159110814332962,
0.0878932997584343,
-0.03355850651860237,
0.15796400606632233,
-0.15420709550380707,
-0.10080987215042114,
0.21767915785312653,
-0.09306889027357101,
-0.09239067882299423,
0.08364612609148026,
0.01256752572953701,
0.07035309821367264,
0.08287971466779709,
0.22752153873443604,
0.028992680832743645,
-0.11375638842582703,
0.15816456079483032,
0.1461673527956009,
-0.11467607319355011,
-0.045014068484306335,
0.010342531837522984,
-0.060074593871831894,
-0.11382783204317093,
0.03658890351653099,
-0.03728248178958893,
0.08358000218868256,
-0.04293033108115196,
-0.0835445374250412,
-0.01683056727051735,
-0.11543910205364227,
0.07083569467067719,
0.001472658826969564,
0.10082688927650452,
0.03857358917593956,
0.009115398861467838,
0.008496548049151897,
0.06438606232404709,
-0.08900038152933121,
0.04502258822321892,
-0.15757586061954498,
0.06532309949398041,
-0.04050298035144806,
0.015240225940942764,
-0.202682763338089,
0.12728376686573029,
-0.01672365702688694,
0.10747754573822021,
0.054517317563295364,
0.12177800387144089,
0.08942614495754242,
-0.03367462754249573,
0.03980250656604767,
0.0010367206996306777,
0.16547656059265137,
0.035678017884492874,
-0.03053026832640171,
-0.04889436811208725,
0.00681394012644887,
-0.055863723158836365,
-0.013186636380851269,
-0.048216186463832855,
-0.04109345003962517,
-0.011756274849176407,
0.0651380866765976,
-0.03668820485472679,
0.0516076385974884,
-0.005080144386738539,
0.025842025876045227,
-0.0025852490216493607,
0.040376920253038406,
0.09369415789842606,
-0.044856440275907516,
-0.09525914490222931,
0.21410244703292847,
-0.13405893743038177,
0.18770310282707214,
0.20924758911132812,
-0.2815588414669037,
0.044230442494153976,
0.062409620732069016,
0.011556003242731094,
0.03797592222690582,
0.04959757253527641,
0.006467180326581001,
0.293790340423584,
0.0155835235491395,
0.11668486893177032,
-0.05374575033783913,
0.032668016850948334,
0.02992015704512596,
-0.05695001780986786,
-0.01879408396780491,
0.034647136926651,
0.0661093145608902,
-0.08204877376556396,
0.04923979938030243,
0.11010090261697769,
-0.0453057736158371,
0.148123100399971,
0.024933019652962685,
-0.039744652807712555,
0.03881930187344551,
-0.02343410812318325,
-0.06886041164398193,
-0.009936339221894741,
-0.33419716358184814,
-0.09599199891090393,
0.07709821313619614,
-0.004785764962434769,
0.12489327788352966,
-0.08804792165756226,
0.009168051183223724,
0.019291682168841362,
-0.06878803670406342,
-0.0647115707397461,
0.05472118407487869,
-0.002762231044471264,
0.04009811580181122,
-0.03818083554506302,
-0.1263684183359146,
0.044684890657663345,
-0.0302386824041605,
-0.11999684572219849,
0.12441056966781616,
-0.10402670502662659,
-0.24305014312267303,
-0.10676499456167221,
-0.11100881546735764,
-0.012243036180734634,
0.0840906873345375,
0.06344853341579437,
-0.09637530148029327,
-0.031256671994924545,
0.023415599018335342,
0.03565197065472603,
-0.07387632876634598,
-0.0077576665207743645,
-0.00415276363492012,
0.007432916201651096,
-0.03729229420423508,
-0.11296577751636505,
-0.04147093743085861,
-0.06601216644048691,
-0.027051186189055443,
0.034905724227428436,
-0.10912566632032394,
0.023992136120796204,
0.2160838544368744,
0.049506090581417084,
0.10071378946304321,
0.017306605353951454,
0.1917818933725357,
-0.045892760157585144,
-0.1069929301738739,
0.16272033751010895,
-0.0352884940803051,
0.0028067543171346188,
0.12233041226863861,
0.04503423348069191,
-0.0888904333114624,
-0.057114340364933014,
-0.07670506834983826,
-0.08987729251384735,
-0.14921152591705322,
-0.18451803922653198,
-0.07496201992034912,
-0.10192596167325974,
0.008611632511019707,
0.01191422063857317,
0.08689485490322113,
0.04952683299779892,
0.05695516616106033,
-0.08443386107683182,
0.0356837660074234,
0.03766658902168274,
0.2046319842338562,
-0.05954502522945404,
0.1242118775844574,
-0.044658876955509186,
-0.1400134563446045,
0.020373381674289703,
0.03893480822443962,
0.12193314731121063,
0.17272241413593292,
0.03627229854464531,
0.0488450825214386,
0.10247788578271866,
0.14994199573993683,
0.17463889718055725,
0.0029014565516263247,
-0.020511463284492493,
-0.0009147358941845596,
-0.05476776137948036,
-0.07201310247182846,
0.09347954392433167,
0.2079763263463974,
-0.1406993716955185,
-0.009409161284565926,
-0.10983754694461823,
0.06955263763666153,
0.1199074313044548,
0.05642293393611908,
-0.20243626832962036,
0.03341635689139366,
0.04153240844607353,
-0.06629457324743271,
-0.07251711189746857,
0.1465679109096527,
0.012260537594556808,
-0.11557546257972717,
0.05262777954339981,
0.04174318537116051,
0.07618976384401321,
-0.026631271466612816,
0.08335631340742111,
-0.15152700245380402,
-0.13310033082962036,
0.0689893513917923,
0.09733282774686813,
-0.22627536952495575,
0.2599017024040222,
-0.0082806795835495,
0.015054757706820965,
-0.09506656974554062,
-0.03614956513047218,
0.026938162744045258,
0.13990642130374908,
0.16889335215091705,
-0.005832785740494728,
0.018060486763715744,
-0.06294215470552444,
-0.02094322256743908,
0.0794750228524208,
0.1289294809103012,
0.0012923552421852946,
-0.025144003331661224,
-0.01851782575249672,
-0.048820991069078445,
-0.0035712714307010174,
-0.044398605823516846,
-0.07460972666740417,
-0.11215569823980331,
0.010362415574491024,
0.1303505152463913,
0.10614973306655884,
0.033407822251319885,
-0.00502749951556325,
-0.08575832098722458,
0.10764406621456146,
-0.11254726350307465,
-0.035979606211185455,
-0.06864117085933685,
-0.16859184205532074,
0.1345091015100479,
-0.0476752370595932,
0.06849204748868942,
-0.0048405323177576065,
0.032381389290094376,
-0.05020912364125252,
-0.1380978226661682,
0.09602635353803635,
-0.12306658923625946,
0.01976417936384678,
0.0015626787208020687,
0.15944983065128326,
0.025481410324573517,
-0.007206962909549475,
0.10792868584394455,
0.0020110411569476128,
-0.09983616322278976,
-0.0903809517621994,
-0.032549209892749786,
0.15346577763557434,
-0.10951274633407593,
0.015927769243717194,
0.018179042264819145,
-0.14347873628139496,
-0.08231712877750397,
0.03777335211634636,
0.27825725078582764,
-0.014915425330400467,
-0.04817360267043114,
0.17336682975292206,
0.266446590423584,
-0.052146051079034805,
-0.22492991387844086,
-0.1739804446697235,
-0.04863237589597702,
0.0378878153860569,
-0.0715700313448906,
-0.11964539438486099,
0.10777436196804047,
-0.09367436915636063,
-0.04155983403325081,
-0.02952319197356701,
-0.18605610728263855,
-0.10452525317668915,
0.2878834009170532,
0.008731041103601456,
0.31272685527801514,
-0.034225042909383774,
-0.08491139113903046,
-0.0462961308658123,
-0.1168622300028801,
0.11990545690059662,
-0.028133777901530266,
0.07553482055664062,
0.02659502625465393,
0.14482992887496948,
0.07080701738595963,
-0.03058517538011074,
0.09739825129508972,
0.0694439560174942,
-0.04932545870542526,
-0.014205054379999638,
-0.09434179961681366,
0.010993984527885914,
0.041735049337148666,
0.07998612523078918,
0.05293517932295799,
0.031744420528411865,
-0.09471290558576584,
-0.10459836572408676,
-0.11717119812965393,
0.04566803202033043,
0.06686970591545105,
-0.05674618482589722,
0.08453842252492905,
-0.12748411297798157,
0.013795166276395321,
0.046415891498327255,
0.05644693598151207,
-0.14555446803569794,
0.01097900327295065,
0.20175482332706451,
0.19673597812652588,
-0.11582087725400925,
-0.035082004964351654,
-0.03929390013217926,
-0.06582492589950562,
0.12552955746650696,
-0.016348805278539658,
0.04150126501917839,
0.06624189019203186,
0.011761232279241085,
0.044623930007219315,
0.08187094330787659,
-0.0014814226888120174,
0.011807871982455254,
0.0706041157245636,
-0.1026955246925354,
-0.07286912202835083,
-0.03209858015179634,
0.03451545536518097,
0.1219853013753891,
0.05769479647278786,
0.1266922652721405,
-0.0197153240442276,
-0.029428185895085335,
-0.05409902706742287,
-0.02111060544848442,
-0.1510176807641983,
0.06585611402988434,
0.05681091919541359,
0.029282737523317337,
-0.1461006999015808,
-0.0009589263936504722,
-0.06233185529708862,
-0.11463384330272675,
-0.004083676729351282,
-0.024750350043177605,
-0.09568831324577332,
-0.13677221536636353,
-0.11707108467817307,
0.10505697876214981,
-0.13075333833694458,
-0.12530504167079926,
0.0472247488796711,
-0.11614292114973068,
0.01793769747018814,
0.14788326621055603,
0.07750163227319717,
0.07349028438329697,
-0.1580013483762741,
-0.04895436018705368,
0.014872894622385502,
-0.03940632566809654,
0.00006847670010756701,
-0.0814877450466156,
-0.10139909386634827,
0.07051457464694977,
0.030785532668232918,
0.10237010568380356,
-0.1006712019443512,
-0.11157841235399246,
-0.08842196315526962,
0.1053212434053421,
-0.1773672252893448,
-0.011937949806451797,
-0.12276988476514816,
-0.009165732190012932,
0.054897814989089966,
-0.06066597253084183,
-0.034183766692876816,
0.007981495931744576,
-0.13472281396389008,
0.08353378623723984,
0.003475533565506339,
0.013883791863918304,
-0.07277137786149979,
0.027176927775144577,
0.047363173216581345,
-0.021777629852294922,
0.08778230845928192,
0.23831427097320557,
-0.16073819994926453,
0.15058839321136475,
-0.16987963020801544,
-0.11391031742095947,
0.09087315201759338,
0.04639875516295433,
0.04060527682304382,
-0.008711026981472969,
0.024921422824263573,
0.10381405800580978,
0.030109092593193054,
0.007147052325308323,
0.15466874837875366,
-0.06457111984491348,
0.05303109064698219,
-0.013168524950742722,
-0.08773212879896164,
-0.00940337311476469,
-0.04804586246609688,
0.07879312336444855,
0.09026934206485748,
0.0635034441947937,
-0.04155603051185608,
0.0813690721988678,
0.029936833307147026,
0.060264427214860916,
-0.082431860268116,
-0.04972422868013382,
-0.02518003061413765,
-0.12266892194747925,
0.023915033787488937,
-0.03064400888979435,
0.28028959035873413,
-0.03677959367632866,
0.14348864555358887,
-0.022635672241449356,
0.018306247889995575,
-0.03131228685379028,
0.023785337805747986,
0.3033658564090729,
0.09851063042879105,
0.033345457166433334,
-0.0743090957403183,
0.054648082703351974,
0.01324823684990406,
0.03891727328300476,
-0.058958083391189575,
0.12437312304973602,
-0.02557576633989811,
0.1556762158870697,
0.08825253695249557,
0.011792381294071674,
-0.10271283239126205,
-0.1517172008752823,
-0.03477974236011505,
0.022329291328787804,
-0.09139315038919449,
0.08728893101215363,
0.1347426176071167,
-0.011509649455547333,
0.04300074279308319,
0.008906800299882889,
-0.0395376943051815,
-0.17708246409893036,
-0.1229119822382927,
-0.06867998838424683,
-0.15299323201179504,
0.012428054586052895,
-0.06315319240093231,
0.01787642389535904,
0.04234951362013817,
0.066338911652565,
-0.03069286420941353,
0.11699683219194412,
-0.03180370479822159,
-0.08137953281402588,
0.06664858013391495,
-0.034303344786167145,
0.02629096433520317,
-0.012935973703861237,
-0.008460517041385174,
-0.038132403045892715,
-0.017841307446360588,
0.0013736779801547527,
0.011932797729969025,
-0.11747316271066666,
-0.014412318356335163,
-0.07583178579807281,
-0.04851336404681206,
-0.08428135514259338,
-0.0008740581688471138,
-0.01677345670759678,
0.11235248297452927,
0.04048636183142662,
-0.0890006273984909,
-0.011831426061689854,
0.15025228261947632,
-0.11666398495435715,
-0.20691975951194763,
-0.06340430676937103,
0.274996280670166,
0.0627456083893776,
0.1450004279613495,
-0.04091713950037956,
0.00462886318564415,
-0.09783204644918442,
0.3448697328567505,
0.25440648198127747,
-0.06393852829933167,
0.04810115322470665,
0.051103997975587845,
0.04404282942414284,
0.049606598913669586,
0.023405157029628754,
0.08343277871608734,
0.32936808466911316,
-0.04472985491156578,
-0.03660992160439491,
-0.05594843626022339,
-0.06329580396413803,
-0.038297075778245926,
0.04348982498049736,
-0.020259791985154152,
-0.11740419268608093,
-0.022186610847711563,
0.11966544389724731,
-0.24192485213279724,
0.133467897772789,
-0.04571826010942459,
-0.09056868404150009,
-0.02946968376636505,
-0.015986280515789986,
0.05871385708451271,
0.07608629763126373,
0.07382916659116745,
-0.05554657801985741,
-0.09896405041217804,
0.10340635478496552,
0.05036648362874985,
-0.2329118847846985,
0.018541546538472176,
0.04919476434588432,
-0.03017665445804596,
-0.021637072786688805,
0.010790947824716568,
0.09790199249982834,
0.012425770983099937,
0.13141566514968872,
0.03426911309361458,
0.15303249657154083,
-0.010131238028407097,
-0.08131913840770721,
0.03293213993310928,
0.05652814731001854,
-0.02393581159412861,
-0.035055771470069885,
-0.01140469778329134,
-0.2610277831554413,
0.08833248913288116,
0.00015771633479744196,
-0.04229233041405678,
-0.03033015877008438,
0.006403541192412376,
-0.057553213089704514,
0.047388553619384766,
0.010490331798791885,
-0.0019301060819998384,
-0.0627487376332283,
-0.011176691390573978,
0.08832677453756332,
0.005391156300902367,
-0.11479656398296356,
-0.11939159780740738,
-0.1622319370508194,
-0.09806107729673386,
-0.03324146568775177,
-0.00944250263273716,
-0.0971030741930008,
0.00019520068599376827,
0.010740520432591438,
0.029052171856164932,
-0.0255824513733387,
0.038386229425668716,
0.0961468517780304,
0.013008412905037403,
0.015402606688439846,
0.0020534591749310493,
0.09052255004644394,
0.10167006403207779,
-0.15448948740959167,
-0.1295212209224701
] |
null | null |
transformers
|
# Model Trained Using AutoNLP
- Problem type: Multi-class Classification
- Model ID: 18833547
- CO2 Emissions (in grams): 64.58945483765274
## Validation Metrics
- Loss: 0.14247722923755646
- Accuracy: 0.9586074193404036
- Macro F1: 0.9468339778730883
- Micro F1: 0.9586074193404036
- Weighted F1: 0.9585551117678807
- Macro Precision: 0.9445436604001405
- Micro Precision: 0.9586074193404036
- Weighted Precision: 0.9591405429662925
- Macro Recall: 0.9499427161888565
- Micro Recall: 0.9586074193404036
- Weighted Recall: 0.9586074193404036
## Usage
You can use cURL to access this model:
```
$ curl -X POST -H "Authorization: Bearer YOUR_API_KEY" -H "Content-Type: application/json" -d '{"inputs": "I love AutoNLP"}' https://api-inference.huggingface.co/models/adelgasmi/autonlp-kpmg_nlp-18833547
```
Or Python API:
```
from transformers import AutoModelForSequenceClassification, AutoTokenizer
model = AutoModelForSequenceClassification.from_pretrained("adelgasmi/autonlp-kpmg_nlp-18833547", use_auth_token=True)
tokenizer = AutoTokenizer.from_pretrained("adelgasmi/autonlp-kpmg_nlp-18833547", use_auth_token=True)
inputs = tokenizer("I love AutoNLP", return_tensors="pt")
outputs = model(**inputs)
```
|
{"language": "ar", "tags": "autonlp", "datasets": ["adelgasmi/autonlp-data-kpmg_nlp"], "widget": [{"text": "I love AutoNLP \ud83e\udd17"}], "co2_eq_emissions": 64.58945483765274}
|
text-classification
|
adelgasmi/autonlp-kpmg_nlp-18833547
|
[
"transformers",
"pytorch",
"bert",
"text-classification",
"autonlp",
"ar",
"dataset:adelgasmi/autonlp-data-kpmg_nlp",
"co2_eq_emissions",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"ar"
] |
TAGS
#transformers #pytorch #bert #text-classification #autonlp #ar #dataset-adelgasmi/autonlp-data-kpmg_nlp #co2_eq_emissions #autotrain_compatible #endpoints_compatible #region-us
|
# Model Trained Using AutoNLP
- Problem type: Multi-class Classification
- Model ID: 18833547
- CO2 Emissions (in grams): 64.58945483765274
## Validation Metrics
- Loss: 0.14247722923755646
- Accuracy: 0.9586074193404036
- Macro F1: 0.9468339778730883
- Micro F1: 0.9586074193404036
- Weighted F1: 0.9585551117678807
- Macro Precision: 0.9445436604001405
- Micro Precision: 0.9586074193404036
- Weighted Precision: 0.9591405429662925
- Macro Recall: 0.9499427161888565
- Micro Recall: 0.9586074193404036
- Weighted Recall: 0.9586074193404036
## Usage
You can use cURL to access this model:
Or Python API:
|
[
"# Model Trained Using AutoNLP\n\n- Problem type: Multi-class Classification\n- Model ID: 18833547\n- CO2 Emissions (in grams): 64.58945483765274",
"## Validation Metrics\n\n- Loss: 0.14247722923755646\n- Accuracy: 0.9586074193404036\n- Macro F1: 0.9468339778730883\n- Micro F1: 0.9586074193404036\n- Weighted F1: 0.9585551117678807\n- Macro Precision: 0.9445436604001405\n- Micro Precision: 0.9586074193404036\n- Weighted Precision: 0.9591405429662925\n- Macro Recall: 0.9499427161888565\n- Micro Recall: 0.9586074193404036\n- Weighted Recall: 0.9586074193404036",
"## Usage\n\nYou can use cURL to access this model:\n\n\n\nOr Python API:"
] |
[
"TAGS\n#transformers #pytorch #bert #text-classification #autonlp #ar #dataset-adelgasmi/autonlp-data-kpmg_nlp #co2_eq_emissions #autotrain_compatible #endpoints_compatible #region-us \n",
"# Model Trained Using AutoNLP\n\n- Problem type: Multi-class Classification\n- Model ID: 18833547\n- CO2 Emissions (in grams): 64.58945483765274",
"## Validation Metrics\n\n- Loss: 0.14247722923755646\n- Accuracy: 0.9586074193404036\n- Macro F1: 0.9468339778730883\n- Micro F1: 0.9586074193404036\n- Weighted F1: 0.9585551117678807\n- Macro Precision: 0.9445436604001405\n- Micro Precision: 0.9586074193404036\n- Weighted Precision: 0.9591405429662925\n- Macro Recall: 0.9499427161888565\n- Micro Recall: 0.9586074193404036\n- Weighted Recall: 0.9586074193404036",
"## Usage\n\nYou can use cURL to access this model:\n\n\n\nOr Python API:"
] |
[
72,
42,
156,
17
] |
[
"passage: TAGS\n#transformers #pytorch #bert #text-classification #autonlp #ar #dataset-adelgasmi/autonlp-data-kpmg_nlp #co2_eq_emissions #autotrain_compatible #endpoints_compatible #region-us \n# Model Trained Using AutoNLP\n\n- Problem type: Multi-class Classification\n- Model ID: 18833547\n- CO2 Emissions (in grams): 64.58945483765274## Validation Metrics\n\n- Loss: 0.14247722923755646\n- Accuracy: 0.9586074193404036\n- Macro F1: 0.9468339778730883\n- Micro F1: 0.9586074193404036\n- Weighted F1: 0.9585551117678807\n- Macro Precision: 0.9445436604001405\n- Micro Precision: 0.9586074193404036\n- Weighted Precision: 0.9591405429662925\n- Macro Recall: 0.9499427161888565\n- Micro Recall: 0.9586074193404036\n- Weighted Recall: 0.9586074193404036## Usage\n\nYou can use cURL to access this model:\n\n\n\nOr Python API:"
] |
[
-0.10009733587503433,
0.2081102430820465,
-0.002871075877919793,
0.08175350725650787,
0.10388089716434479,
0.039328038692474365,
0.06559102237224579,
0.12901300191879272,
-0.011995851993560791,
0.1551448106765747,
0.08343926072120667,
0.18307366967201233,
0.06661561131477356,
0.13800545036792755,
-0.07165562361478806,
-0.13520283997058868,
0.0067283036187291145,
0.032058265060186386,
0.04109911620616913,
0.08740759640932083,
0.07501290738582611,
-0.07953409105539322,
0.12698374688625336,
-0.005440513137727976,
-0.10626187920570374,
0.04966912046074867,
0.060737837105989456,
-0.07078977674245834,
0.07319211214780807,
0.106412373483181,
0.12087495625019073,
0.006854766048491001,
0.08470652252435684,
-0.13615384697914124,
-0.017107302322983742,
0.05385309085249901,
-0.043774817138910294,
0.09014967828989029,
0.1615382581949234,
0.008876516483724117,
0.08596107363700867,
-0.10281436890363693,
0.08820730447769165,
0.07709521800279617,
-0.07790594547986984,
-0.06882886588573456,
-0.12075115740299225,
0.06900516897439957,
0.08728457242250443,
0.09032334387302399,
0.014709272421896458,
0.18298834562301636,
-0.019686022773385048,
0.11048351228237152,
0.07535984367132187,
-0.23887944221496582,
-0.05174579471349716,
0.17370474338531494,
-0.02862049825489521,
0.013586917892098427,
-0.0010544807882979512,
0.006259371526539326,
0.041125115007162094,
-0.0012510530650615692,
0.013885130174458027,
-0.055652860552072525,
-0.05053500831127167,
-0.030272187665104866,
-0.12470410764217377,
-0.06748753041028976,
0.13709090650081635,
0.019412687048316002,
-0.05674280971288681,
-0.09557508677244186,
-0.0791485384106636,
-0.09330558776855469,
-0.059155695140361786,
-0.041658952832221985,
0.012925272807478905,
-0.03961336612701416,
-0.059905968606472015,
0.061569683253765106,
-0.03244417533278465,
-0.07361216843128204,
-0.12396318465471268,
-0.0016410578973591328,
0.006365849636495113,
0.04575202241539955,
0.023368222638964653,
0.010703874751925468,
-0.0653611570596695,
-0.05077387019991875,
0.0021059513092041016,
0.021750018000602722,
-0.09487009048461914,
-0.06458211690187454,
0.00467180460691452,
0.07082255184650421,
0.04876229539513588,
0.15545782446861267,
-0.016404900699853897,
0.11051742732524872,
0.05937434360384941,
-0.01741412840783596,
-0.05450785160064697,
0.10327092558145523,
-0.11142436414957047,
-0.14126232266426086,
0.017513109371066093,
-0.03006044588983059,
0.012077120132744312,
-0.029754625633358955,
-0.05197868496179581,
-0.07300732284784317,
0.034260138869285583,
0.032276034355163574,
0.045938629657030106,
0.020092714577913284,
-0.057372260838747025,
-0.07036274671554565,
0.10143326967954636,
-0.0911010131239891,
0.03529244661331177,
0.01879848912358284,
-0.11386967450380325,
0.07212718576192856,
0.06054948642849922,
-0.0035123727284371853,
-0.13112710416316986,
0.017051562666893005,
-0.10675179213285446,
0.008691252209246159,
-0.0783524140715599,
-0.13345268368721008,
0.04689452424645424,
0.026982828974723816,
-0.036123454570770264,
-0.1096547394990921,
-0.151280015707016,
-0.09927501529455185,
0.004394800402224064,
-0.05926547572016716,
-0.06374476850032806,
-0.014345711097121239,
0.01708829030394554,
0.03822658583521843,
-0.0037236963398754597,
0.03944467753171921,
-0.02770301140844822,
0.04181651771068573,
0.04553671553730965,
0.06849744915962219,
-0.04108962416648865,
0.03408107906579971,
-0.028376445174217224,
0.013533752411603928,
-0.12120173126459122,
0.08813370764255524,
-0.08837886154651642,
0.03573022037744522,
-0.18807289004325867,
-0.05923912301659584,
0.07332354784011841,
-0.034960079938173294,
0.0749608501791954,
0.059324316680431366,
-0.12962070107460022,
0.007895266637206078,
0.11497122794389725,
-0.075982004404068,
-0.11762714385986328,
0.06042422354221344,
-0.0004259113920852542,
-0.00006848594057373703,
0.04536982253193855,
0.10716737061738968,
0.19521856307983398,
-0.12414485961198807,
-0.07310161739587784,
0.010328761301934719,
0.02043335698544979,
-0.09003686159849167,
0.0858842059969902,
-0.045726947486400604,
-0.13776010274887085,
-0.01003984548151493,
0.001707453397102654,
-0.014389132149517536,
-0.019133059307932854,
-0.0724601298570633,
-0.016727250069379807,
-0.038183994591236115,
0.00002277344901813194,
0.009158924221992493,
0.01371920108795166,
-0.023527344688773155,
-0.015880268067121506,
0.05419328063726425,
0.17541861534118652,
-0.044691331684589386,
-0.03222106769680977,
-0.12973976135253906,
0.04174895957112312,
-0.10561270266771317,
-0.03832220286130905,
-0.2195889800786972,
-0.0840611606836319,
0.017048044130206108,
-0.12775208055973053,
0.026911191642284393,
-0.041123922914266586,
0.08616359531879425,
0.05273305997252464,
0.05903478339314461,
0.034644559025764465,
0.09297920763492584,
-0.027485093101859093,
-0.08427184820175171,
-0.0439334511756897,
-0.045711662620306015,
0.009650842286646366,
0.21283294260501862,
-0.16453474760055542,
0.017854075878858566,
0.03488937020301819,
0.02938232757151127,
-0.004535661078989506,
-0.06269291788339615,
-0.036053773015737534,
0.08493752032518387,
0.03327809274196625,
-0.045192934572696686,
0.07461246103048325,
-0.036556683480739594,
-0.07257313281297684,
-0.025158442556858063,
-0.24332480132579803,
0.20058579742908478,
0.10975395888090134,
0.004988886881619692,
-0.07269444316625595,
-0.08373355865478516,
0.03513515368103981,
-0.04522478207945824,
-0.018282756209373474,
0.05678475648164749,
0.15039031207561493,
0.03850144147872925,
0.0924319475889206,
-0.06311877816915512,
-0.024849331006407738,
0.001744718523696065,
-0.02417248673737049,
-0.01932290382683277,
0.19159775972366333,
0.07551007717847824,
-0.09535250067710876,
0.05858964845538139,
-0.024003174155950546,
-0.08113333582878113,
0.021682871505618095,
0.0370127409696579,
-0.05724044516682625,
-0.0765109434723854,
0.004906981252133846,
0.05552246421575546,
0.0036598758306354284,
0.0011317274766042829,
0.07097796350717545,
0.06582307815551758,
-0.03807772323489189,
0.039752136915922165,
-0.10346589237451553,
0.03079349547624588,
0.03613824397325516,
-0.05630205571651459,
-0.05381236970424652,
0.015123267658054829,
0.028563320636749268,
0.09945642203092575,
0.020473867654800415,
0.006793960463255644,
-0.03194722533226013,
-0.016103841364383698,
-0.12142015993595123,
0.21167543530464172,
-0.09778644889593124,
-0.13986538350582123,
-0.15538346767425537,
-0.1590873897075653,
-0.07782860100269318,
-0.058526210486888885,
-0.013002179563045502,
-0.048973072320222855,
-0.11029940843582153,
-0.045152466744184494,
-0.10326097160577774,
-0.0004165612917859107,
-0.07718738168478012,
0.03690623119473457,
-0.030227839946746826,
0.08775873482227325,
-0.12951523065567017,
-0.016823504120111465,
0.00351190404035151,
-0.11898603290319443,
0.03326607495546341,
0.009229710325598717,
0.12100239098072052,
0.16290107369422913,
-0.025944774970412254,
0.03143917769193649,
0.004786286968737841,
0.22611744701862335,
-0.00957416370511055,
-0.04007121920585632,
0.2017553299665451,
0.0969925969839096,
0.07932543009519577,
0.10295663774013519,
0.07122756540775299,
-0.06760527193546295,
-0.014250065200030804,
0.05376187339425087,
-0.01426784135401249,
-0.19369260966777802,
-0.19908010959625244,
0.009691741317510605,
0.001401571906171739,
0.17393891513347626,
0.034771621227264404,
0.015312018804252148,
0.10691148787736893,
0.033025749027729034,
0.0891219973564148,
-0.03528791293501854,
0.06469863653182983,
0.14410017430782318,
0.03718815743923187,
0.12127753347158432,
-0.08333466947078705,
0.012324849143624306,
0.10745987296104431,
0.005072402767837048,
0.07054167240858078,
0.03371863067150116,
0.15070758759975433,
-0.014584721997380257,
0.139623761177063,
0.02299703285098076,
0.10413804650306702,
0.011147589422762394,
-0.004298607353121042,
0.052439961582422256,
-0.09010303020477295,
-0.11810250580310822,
-0.02715909853577614,
0.034304965287446976,
0.019162999466061592,
-0.0805976390838623,
0.002749545266851783,
-0.005278993397951126,
0.0648168995976448,
0.031106311827898026,
-0.4464006721973419,
-0.04565681517124176,
0.003499626647680998,
-0.011960584670305252,
-0.09024524688720703,
-0.02469666860997677,
-0.007662519346922636,
-0.13474346697330475,
0.061524420976638794,
-0.016730640083551407,
0.13456369936466217,
-0.07144961506128311,
-0.043709415942430496,
-0.01682448759675026,
0.06424630433320999,
-0.0008592019439674914,
0.05853188782930374,
-0.14170973002910614,
0.14771388471126556,
0.04245409369468689,
0.039921365678310394,
-0.06411455571651459,
0.007790099363774061,
0.03236822411417961,
-0.0007234189542941749,
0.12536917626857758,
0.013275323435664177,
-0.12188233435153961,
-0.30518078804016113,
-0.15154315531253815,
0.025321032851934433,
-0.016306916251778603,
-0.00630760844796896,
0.07896116375923157,
-0.019922764971852303,
-0.013131815008819103,
-0.04524106904864311,
-0.07376653701066971,
-0.08861468732357025,
-0.0836753100156784,
0.03507357835769653,
0.08565351366996765,
-0.02972586825489998,
-0.03621191158890724,
-0.027945013716816902,
-0.01263181772083044,
0.1313500851392746,
-0.09758264571428299,
-0.05010557174682617,
-0.1381980925798416,
-0.03331800177693367,
0.12488318979740143,
-0.12910641729831696,
0.05326298251748085,
-0.0018550704699009657,
0.0724659115076065,
-0.01454964466392994,
-0.11288279294967651,
0.05362424626946449,
-0.041979383677244186,
-0.02313975803554058,
0.033177752047777176,
0.03552442044019699,
0.0027044215239584446,
0.0628187358379364,
0.05515271797776222,
0.01839021034538746,
-0.03375938907265663,
-0.1401083618402481,
-0.06231983006000519,
0.05236139893531799,
0.11648451536893845,
0.07209686189889908,
0.020100755617022514,
-0.1459112912416458,
-0.057758402079343796,
0.08575104922056198,
0.13983984291553497,
0.22540798783302307,
-0.06758210062980652,
0.003165180329233408,
0.11966797709465027,
-0.036956027150154114,
-0.19983966648578644,
-0.038429778069257736,
0.03711380809545517,
-0.005251413211226463,
-0.05795501545071602,
-0.08834569156169891,
0.1275562196969986,
0.1872640997171402,
-0.03915448486804962,
-0.03473389521241188,
-0.30618757009506226,
-0.13731759786605835,
0.16043820977210999,
0.13950178027153015,
-0.008675020188093185,
-0.1580897718667984,
-0.07437289506196976,
-0.1268950253725052,
-0.14223188161849976,
0.1293138861656189,
-0.045562658458948135,
0.06148694455623627,
-0.05332440137863159,
0.0884583368897438,
0.03807327523827553,
-0.06500090658664703,
0.14674022793769836,
0.017757851630449295,
0.022240376099944115,
-0.05345155671238899,
-0.032478515058755875,
-0.028766950592398643,
-0.09266746044158936,
0.1266857385635376,
0.036874186247587204,
0.07493323087692261,
-0.2433544099330902,
0.00481824716553092,
-0.004399924073368311,
0.05274417996406555,
-0.04838477075099945,
-0.009608456864953041,
-0.02963397465646267,
0.014720901846885681,
-0.005487032700330019,
-0.021952200680971146,
-0.05095580592751503,
-0.03444702550768852,
0.07125910371541977,
0.1845158487558365,
0.09563291072845459,
0.018877962604165077,
-0.10469071567058563,
0.04920719936490059,
-0.04261035472154617,
0.048793643712997437,
-0.11345215886831284,
0.05358315631747246,
0.12647640705108643,
0.03547053784132004,
0.06258207559585571,
0.014765619300305843,
-0.038644444197416306,
-0.018406659364700317,
0.05196044221520424,
-0.1247984990477562,
0.03336792439222336,
0.014631476253271103,
0.030335796996951103,
-0.09372296184301376,
-0.0545637384057045,
0.10632703453302383,
0.020379886031150818,
-0.03739970549941063,
0.021588897332549095,
-0.009791427291929722,
-0.01565868966281414,
0.25949540734291077,
0.013994883745908737,
0.09199342876672745,
-0.11745359003543854,
0.055886246263980865,
0.10778219997882843,
-0.17600588500499725,
-0.005567039828747511,
0.12736427783966064,
-0.07938804477453232,
-0.07103900611400604,
0.020390307530760765,
0.12457691878080368,
-0.1304272562265396,
-0.04938891902565956,
0.018425730988383293,
-0.06137390807271004,
0.07331234961748123,
0.21395203471183777,
0.09016844630241394,
0.0018986103823408484,
0.012952389195561409,
-0.08569090813398361,
-0.11496340483427048,
0.028985241428017616,
0.0828055888414383,
0.020145300775766373,
-0.08297155052423477,
0.16031622886657715,
-0.004185854457318783,
-0.0005173642421141267,
-0.0019427896477282047,
-0.00004827182783628814,
-0.18451891839504242,
-0.03883877769112587,
-0.028079276904463768,
0.08001044392585754,
-0.07124308496713638,
0.06530745327472687,
-0.021513616666197777,
0.03684965893626213,
-0.053468022495508194,
0.00019013170094694942,
-0.0680786594748497,
-0.0689624473452568,
0.007415165193378925,
0.06709601730108261,
-0.11538518965244293,
-0.005476544611155987,
0.07499156892299652,
-0.018034599721431732,
0.03332873806357384,
0.07835868746042252,
0.07067187130451202,
0.010089715011417866,
-0.053872715681791306,
-0.017722390592098236,
0.04988877475261688,
0.03767745941877365,
0.08201420307159424,
-0.16937220096588135,
0.07283198088407516,
-0.005927703808993101,
0.027527034282684326,
0.04476998746395111,
0.11325515806674957,
-0.11531110852956772,
0.01715431921184063,
-0.10909527540206909,
-0.06929618120193481,
-0.12147527188062668,
0.03907395526766777,
0.1417788714170456,
0.06141708046197891,
0.05904311686754227,
-0.08053971081972122,
0.03965136781334877,
-0.19163167476654053,
-0.016031483188271523,
-0.025598060339689255,
-0.029808174818754196,
0.009365062229335308,
-0.02487417869269848,
0.0879957526922226,
-0.014949733391404152,
0.09114202857017517,
-0.025974644348025322,
0.020652305334806442,
0.010958973318338394,
0.08228565007448196,
-0.03294511139392853,
-0.05537909269332886,
0.1925521045923233,
0.08969160914421082,
0.010914595797657967,
0.12180239707231522,
0.11390858888626099,
0.00448570353910327,
0.009461835958063602,
-0.0003835865936707705,
0.09233739227056503,
-0.0863400250673294,
0.05737381428480148,
0.04073067381978035,
-0.09780556708574295,
-0.021459557116031647,
0.08346143364906311,
-0.12066927552223206,
0.010411715134978294,
-0.06527066975831985,
0.05024546757340431,
0.11914215981960297,
-0.14231707155704498,
0.014728025533258915,
0.022025082260370255,
-0.06942178308963776,
-0.19774585962295532,
-0.08821194618940353,
-0.12517274916172028,
-0.026154400780797005,
-0.03566568344831467,
-0.11590564250946045,
0.022131865844130516,
0.1468367576599121,
0.009126530960202217,
0.0463106706738472,
0.10624431818723679,
-0.215407133102417,
-0.012811452150344849,
-0.0530453696846962,
0.0030009066686034203,
-0.0011138819390907884,
-0.014337562024593353,
-0.009642235934734344,
0.033492304384708405,
-0.017890149727463722,
0.11118924617767334,
0.01704937033355236,
0.023104174062609673,
0.09680493175983429,
-0.022212576121091843,
-0.07241282612085342,
-0.04040642827749252,
0.02432536706328392,
0.04145480692386627,
0.15236812829971313,
0.0358695387840271,
-0.0008323166985064745,
-0.03855651989579201,
0.17962084710597992,
-0.08757758140563965,
0.0084699597209692,
-0.1069687232375145,
0.23744292557239532,
0.016710884869098663,
0.03349440172314644,
0.030281392857432365,
0.0072704036720097065,
0.02994418516755104,
0.18127314746379852,
0.11536797881126404,
-0.0019892326090484858,
-0.01781652495265007,
0.0165175199508667,
-0.00328273163177073,
-0.024230901151895523,
0.10602299869060516,
0.05264892801642418,
0.18269702792167664,
-0.06546054035425186,
0.02002105861902237,
0.007178594823926687,
0.007044844329357147,
-0.09593693166971207,
0.025025514885783195,
0.010540070943534374,
-0.007282061967998743,
0.01734255813062191,
0.12552441656589508,
-0.03331497684121132,
0.06428616493940353,
0.09524621069431305,
-0.08996749669313431,
-0.15156085789203644,
0.026800092309713364,
-0.03952373191714287,
-0.04229845851659775,
0.07969530671834946,
-0.03437256067991257,
-0.014648672193288803,
0.0432434156537056,
0.0007888586260378361,
-0.2138630598783493,
-0.08356032520532608,
-0.004766439087688923,
0.13332951068878174,
0.28679028153419495,
0.02695111371576786,
0.10982495546340942,
0.1746840626001358,
-0.01108982041478157,
-0.15099669992923737,
0.11211390048265457,
0.003183668712154031,
-0.15931646525859833,
0.10244356840848923,
0.050782836973667145,
-0.050377827137708664,
0.1461600810289383,
0.04824719578027725,
-0.16317519545555115,
0.0009443970629945397,
-0.006602110806852579,
0.10034378618001938,
-0.06670468300580978,
0.017807206138968468,
-0.08290740102529526,
0.12020863592624664,
0.1613686978816986,
-0.016302281990647316,
0.018211087211966515,
-0.04928387701511383,
0.05181533098220825,
-0.016468500718474388,
0.015628043562173843,
-0.04793738201260567,
-0.0992782711982727,
0.0525871217250824,
-0.23716118931770325,
0.0036650069523602724,
-0.2672797441482544,
-0.022490819916129112,
-0.018930893391370773,
-0.05694117397069931,
-0.054796185344457626,
0.10516808182001114,
0.011648643761873245,
-0.0013563535176217556,
-0.03090762533247471,
-0.18313108384609222,
0.0030955811962485313,
0.1438250094652176,
-0.140880286693573,
-0.12960413098335266
] |
null | null |
transformers
|
# Wav2Vec2-Large-XLSR-53-Kazakh
Fine-tuned [facebook/wav2vec2-large-xlsr-53](https://huggingface.co/facebook/wav2vec2-large-xlsr-53) for Kazakh ASR using the [Kazakh Speech Corpus v1.1](https://issai.nu.edu.kz/kz-speech-corpus/?version=1.1)
When using this model, make sure that your speech input is sampled at 16kHz.
## Usage
The model can be used directly (without a language model) as follows:
```python
import torch
import torchaudio
from datasets import load_dataset
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
from utils import get_test_dataset
test_dataset = get_test_dataset("ISSAI_KSC_335RS_v1.1")
processor = Wav2Vec2Processor.from_pretrained("wav2vec2-large-xlsr-kazakh")
model = Wav2Vec2ForCTC.from_pretrained("wav2vec2-large-xlsr-kazakh")
# Preprocessing the datasets.
# We need to read the audio files as arrays
def speech_file_to_array_fn(batch):
speech_array, sampling_rate = torchaudio.load(batch["path"])
batch["speech"] = torchaudio.transforms.Resample(sampling_rate, 16_000)(speech_array).squeeze().numpy()
return batch
test_dataset = test_dataset.map(speech_file_to_array_fn)
inputs = processor(test_dataset["speech"][:2], sampling_rate=16_000, return_tensors="pt", padding=True)
with torch.no_grad():
logits = model(inputs.input_values, attention_mask=inputs.attention_mask).logits
predicted_ids = torch.argmax(logits, dim=-1)
print("Prediction:", processor.batch_decode(predicted_ids))
print("Reference:", test_dataset["sentence"][:2])
```
## Evaluation
The model can be evaluated as follows on the test set of [Kazakh Speech Corpus v1.1](https://issai.nu.edu.kz/kz-speech-corpus/?version=1.1). To evaluate, download the [archive](https://www.openslr.org/resources/102/ISSAI_KSC_335RS_v1.1_flac.tar.gz), untar and pass the path to data to `get_test_dataset` as below:
```python
import torch
import torchaudio
from datasets import load_dataset, load_metric
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
import re
from utils import get_test_dataset
test_dataset = get_test_dataset("ISSAI_KSC_335RS_v1.1")
wer = load_metric("wer")
processor = Wav2Vec2Processor.from_pretrained("adilism/wav2vec2-large-xlsr-kazakh")
model = Wav2Vec2ForCTC.from_pretrained("adilism/wav2vec2-large-xlsr-kazakh")
model.to("cuda")
# Preprocessing the datasets.
# We need to read the audio files as arrays
def speech_file_to_array_fn(batch):
batch["sentence"] = re.sub(chars_to_ignore_regex, '', batch["sentence"]).lower()
speech_array, sampling_rate = torchaudio.load(batch["path"])
batch["speech"] = torchaudio.transforms.Resample(sampling_rate, 16_000)(speech_array).squeeze().numpy()
return batch
test_dataset = test_dataset.map(speech_file_to_array_fn)
def evaluate(batch):
inputs = processor(batch["text"], sampling_rate=16_000, return_tensors="pt", padding=True)
with torch.no_grad():
logits = model(inputs.input_values.to("cuda"), attention_mask=inputs.attention_mask.to("cuda")).logits
pred_ids = torch.argmax(logits, dim=-1)
batch["pred_strings"] = processor.batch_decode(pred_ids)
return batch
result = test_dataset.map(evaluate, batched=True, batch_size=8)
print("WER: {:2f}".format(100 * wer.compute(predictions=result["pred_strings"], references=result["sentence"])))
```
**Test Result**: 19.65%
## Training
The Kazakh Speech Corpus v1.1 `train` dataset was used for training.
|
{"language": "kk", "license": "apache-2.0", "tags": ["audio", "automatic-speech-recognition", "speech", "xlsr-fine-tuning-week"], "datasets": ["kazakh_speech_corpus"], "metrics": ["wer"], "base_model": "facebook/wav2vec2-large-xlsr-53", "model-index": [{"name": "Wav2Vec2-XLSR-53 Kazakh by adilism", "results": [{"task": {"type": "automatic-speech-recognition", "name": "Speech Recognition"}, "dataset": {"name": "Kazakh Speech Corpus v1.1", "type": "kazakh_speech_corpus", "args": "kk"}, "metrics": [{"type": "wer", "value": 19.65, "name": "Test WER"}]}]}]}
|
automatic-speech-recognition
|
aismlv/wav2vec2-large-xlsr-kazakh
|
[
"transformers",
"pytorch",
"jax",
"safetensors",
"wav2vec2",
"automatic-speech-recognition",
"audio",
"speech",
"xlsr-fine-tuning-week",
"kk",
"dataset:kazakh_speech_corpus",
"base_model:facebook/wav2vec2-large-xlsr-53",
"license:apache-2.0",
"model-index",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"kk"
] |
TAGS
#transformers #pytorch #jax #safetensors #wav2vec2 #automatic-speech-recognition #audio #speech #xlsr-fine-tuning-week #kk #dataset-kazakh_speech_corpus #base_model-facebook/wav2vec2-large-xlsr-53 #license-apache-2.0 #model-index #endpoints_compatible #region-us
|
# Wav2Vec2-Large-XLSR-53-Kazakh
Fine-tuned facebook/wav2vec2-large-xlsr-53 for Kazakh ASR using the Kazakh Speech Corpus v1.1
When using this model, make sure that your speech input is sampled at 16kHz.
## Usage
The model can be used directly (without a language model) as follows:
## Evaluation
The model can be evaluated as follows on the test set of Kazakh Speech Corpus v1.1. To evaluate, download the archive, untar and pass the path to data to 'get_test_dataset' as below:
Test Result: 19.65%
## Training
The Kazakh Speech Corpus v1.1 'train' dataset was used for training.
|
[
"# Wav2Vec2-Large-XLSR-53-Kazakh\n\nFine-tuned facebook/wav2vec2-large-xlsr-53 for Kazakh ASR using the Kazakh Speech Corpus v1.1\n\nWhen using this model, make sure that your speech input is sampled at 16kHz.",
"## Usage\n\nThe model can be used directly (without a language model) as follows:",
"## Evaluation\n\nThe model can be evaluated as follows on the test set of Kazakh Speech Corpus v1.1. To evaluate, download the archive, untar and pass the path to data to 'get_test_dataset' as below:\n\n\n\nTest Result: 19.65%",
"## Training\n\nThe Kazakh Speech Corpus v1.1 'train' dataset was used for training."
] |
[
"TAGS\n#transformers #pytorch #jax #safetensors #wav2vec2 #automatic-speech-recognition #audio #speech #xlsr-fine-tuning-week #kk #dataset-kazakh_speech_corpus #base_model-facebook/wav2vec2-large-xlsr-53 #license-apache-2.0 #model-index #endpoints_compatible #region-us \n",
"# Wav2Vec2-Large-XLSR-53-Kazakh\n\nFine-tuned facebook/wav2vec2-large-xlsr-53 for Kazakh ASR using the Kazakh Speech Corpus v1.1\n\nWhen using this model, make sure that your speech input is sampled at 16kHz.",
"## Usage\n\nThe model can be used directly (without a language model) as follows:",
"## Evaluation\n\nThe model can be evaluated as follows on the test set of Kazakh Speech Corpus v1.1. To evaluate, download the archive, untar and pass the path to data to 'get_test_dataset' as below:\n\n\n\nTest Result: 19.65%",
"## Training\n\nThe Kazakh Speech Corpus v1.1 'train' dataset was used for training."
] |
[
108,
69,
20,
59,
20
] |
[
"passage: TAGS\n#transformers #pytorch #jax #safetensors #wav2vec2 #automatic-speech-recognition #audio #speech #xlsr-fine-tuning-week #kk #dataset-kazakh_speech_corpus #base_model-facebook/wav2vec2-large-xlsr-53 #license-apache-2.0 #model-index #endpoints_compatible #region-us \n# Wav2Vec2-Large-XLSR-53-Kazakh\n\nFine-tuned facebook/wav2vec2-large-xlsr-53 for Kazakh ASR using the Kazakh Speech Corpus v1.1\n\nWhen using this model, make sure that your speech input is sampled at 16kHz.## Usage\n\nThe model can be used directly (without a language model) as follows:## Evaluation\n\nThe model can be evaluated as follows on the test set of Kazakh Speech Corpus v1.1. To evaluate, download the archive, untar and pass the path to data to 'get_test_dataset' as below:\n\n\n\nTest Result: 19.65%## Training\n\nThe Kazakh Speech Corpus v1.1 'train' dataset was used for training."
] |
[
-0.07278113812208176,
0.03558628633618355,
-0.004104719962924719,
-0.046547602862119675,
0.09576592594385147,
-0.011661832220852375,
0.18318139016628265,
0.14815112948417664,
-0.0009429799974896014,
-0.02321859449148178,
0.03681764379143715,
0.032019540667533875,
0.09247159212827682,
0.0895061120390892,
0.026346061378717422,
-0.20961616933345795,
-0.000568796822335571,
-0.08124236762523651,
0.007749754469841719,
0.11163699626922607,
0.17425766587257385,
-0.10429180413484573,
-0.025877635926008224,
0.011117859743535519,
-0.045243799686431885,
0.040643416345119476,
0.0220593623816967,
-0.18106763064861298,
0.03681149706244469,
0.027287201955914497,
0.06662115454673767,
0.060666605830192566,
0.04137963056564331,
-0.2282353639602661,
0.025550302118062973,
0.015526092611253262,
0.02170904166996479,
0.00700171897187829,
0.121273472905159,
-0.019850093871355057,
0.05136355012655258,
-0.052551910281181335,
-0.07418899983167648,
0.060508135706186295,
-0.03517177328467369,
-0.18478849530220032,
-0.07733608037233353,
0.097600556910038,
0.09362183511257172,
0.10679278522729874,
-0.06360381096601486,
0.05495323985815048,
-0.08077261596918106,
0.07688431441783905,
0.029278643429279327,
-0.2303742915391922,
0.00527908792719245,
-0.03352189436554909,
-0.06324069201946259,
0.1735112965106964,
-0.07874428480863571,
0.008422988466918468,
-0.03127710893750191,
-0.048422690480947495,
-0.011957593262195587,
-0.08796896785497665,
-0.2628474831581116,
-0.03942166641354561,
-0.1122531145811081,
-0.07163140177726746,
0.2962208390235901,
-0.011500811204314232,
-0.0478932186961174,
-0.10300525277853012,
0.05238371342420578,
0.018488861620426178,
-0.020980168133974075,
-0.01465462427586317,
-0.033382873982191086,
-0.022879645228385925,
0.02908063679933548,
-0.06947772204875946,
-0.13585889339447021,
-0.06835208833217621,
0.00160474784206599,
0.10661856830120087,
0.04550052806735039,
0.03249702975153923,
-0.06563586741685867,
0.04126928746700287,
-0.10362633317708969,
-0.08939575403928757,
0.018428469076752663,
-0.02891983650624752,
-0.04753213748335838,
0.008059806190431118,
-0.02264026179909706,
-0.2729622721672058,
0.09293997287750244,
-0.09402178972959518,
-0.012081919237971306,
0.07668646425008774,
-0.03706227242946625,
0.046485356986522675,
0.001197452307678759,
0.15979520976543427,
0.01711295172572136,
-0.03242873027920723,
-0.0000868650822667405,
0.04544703662395477,
-0.06287498772144318,
0.016085060313344002,
-0.041186459362506866,
-0.05875483527779579,
-0.0061216033063828945,
0.11098156869411469,
0.015350169502198696,
0.013124478049576283,
0.06863806396722794,
-0.028110826388001442,
0.09407109022140503,
-0.1407565474510193,
-0.015785817056894302,
0.06118891015648842,
-0.08216686546802521,
0.05143136903643608,
-0.08928009122610092,
-0.0073550608940422535,
-0.09709715098142624,
-0.015365890227258205,
0.06027833744883537,
0.06589864939451218,
-0.039160728454589844,
-0.08672917634248734,
-0.0006251400336623192,
0.027233775705099106,
-0.03371128439903259,
-0.15310269594192505,
-0.12364596873521805,
-0.04999352991580963,
-0.019844483584165573,
0.022352995350956917,
0.10888857394456863,
-0.03279667720198631,
-0.06075941398739815,
0.03727593272924423,
-0.08302455395460129,
0.06294353306293488,
-0.04695238918066025,
0.04945480078458786,
-0.018329091370105743,
0.06007813289761543,
0.14289726316928864,
0.07472536712884903,
-0.10715218633413315,
-0.05713328719139099,
-0.1384839564561844,
0.16197827458381653,
-0.09366128593683243,
-0.0382053405046463,
-0.15948806703090668,
-0.02248845249414444,
-0.11422070115804672,
-0.021777629852294922,
0.021903879940509796,
0.08346546441316605,
-0.1329418271780014,
-0.018661368638277054,
0.17773641645908356,
-0.06980934739112854,
0.0051214867271482944,
0.1546425223350525,
-0.0036949017085134983,
0.09402161836624146,
0.12074261903762817,
0.24891769886016846,
0.2538793683052063,
-0.09724097698926926,
-0.1600218117237091,
0.010375726968050003,
-0.04014807567000389,
0.040528569370508194,
0.08820395171642303,
-0.06417648494243622,
0.03998071327805519,
0.008932851254940033,
-0.09781134873628616,
0.02733936533331871,
0.004670934285968542,
-0.0863531306385994,
-0.018252378329634666,
-0.05194474011659622,
0.08775756508111954,
-0.012217038311064243,
0.0348183773458004,
-0.04563825577497482,
-0.07063527405261993,
-0.06862848252058029,
0.11959000676870346,
-0.08505000174045563,
0.048129480332136154,
-0.13136516511440277,
0.027343004941940308,
-0.10733448714017868,
0.051169220358133316,
-0.12853743135929108,
0.11819110065698624,
0.06487364321947098,
-0.0627412423491478,
0.04427154362201691,
-0.015268825925886631,
0.014293643645942211,
-0.035718753933906555,
-0.0878242552280426,
-0.027359195053577423,
-0.0017629093490540981,
-0.014697031117975712,
-0.05577890947461128,
-0.14476531744003296,
0.05022016167640686,
-0.038407497107982635,
0.06798762083053589,
-0.06181199476122856,
0.005896151065826416,
0.018114831298589706,
-0.045666441321372986,
-0.04375893250107765,
0.041285544633865356,
0.03813556581735611,
0.055927641689777374,
-0.0005084251170046628,
0.023322604596614838,
0.014069131575524807,
-0.031900059431791306,
-0.04020342603325844,
0.15532942116260529,
-0.10961122810840607,
-0.19978474080562592,
0.11288270354270935,
-0.024807589128613472,
-0.05074502155184746,
0.03018820658326149,
-0.011718463152647018,
-0.012700241059064865,
-0.014587190002202988,
-0.0322897806763649,
0.21624402701854706,
-0.01742098480463028,
0.10448741912841797,
-0.06819264590740204,
-0.004135167691856623,
0.03312264010310173,
-0.09208567440509796,
0.0015015824465081096,
0.06078639253973961,
-0.12684489786624908,
-0.07625535875558853,
0.0691734030842781,
0.06613979488611221,
-0.03292107209563255,
0.23866845667362213,
-0.004221345763653517,
-0.13983966410160065,
-0.0029747080989181995,
0.0462002232670784,
-0.028409911319613457,
0.12186794728040695,
-0.09076373279094696,
-0.03801802545785904,
0.050617169588804245,
0.06824954599142075,
0.04708024486899376,
-0.108575239777565,
0.05863123759627342,
-0.04786526411771774,
-0.14539164304733276,
-0.09746642410755157,
0.03602593019604683,
0.001506308326497674,
0.07591156661510468,
-0.08198409527540207,
0.04263698309659958,
-0.003150619799271226,
-0.06115143746137619,
-0.13586032390594482,
0.08911039680242538,
-0.13684357702732086,
-0.29684150218963623,
-0.11773498356342316,
0.08858949691057205,
0.001296154223382473,
-0.0016343703027814627,
0.0663129985332489,
-0.1702113002538681,
0.003996438812464476,
-0.06501056998968124,
0.06645643711090088,
-0.003744899993762374,
0.0012355941580608487,
-0.07095090299844742,
0.05860909819602966,
0.022034304216504097,
-0.06492544710636139,
0.000695826718583703,
-0.008803864941000938,
-0.026690419763326645,
0.08055844157934189,
0.04790135845541954,
0.010675379075109959,
0.12940426170825958,
-0.09100455045700073,
-0.01427872572094202,
-0.0522649772465229,
0.05081932991743088,
-0.15613457560539246,
0.0697253867983818,
0.13680817186832428,
-0.0037392466329038143,
-0.00816913228482008,
0.09745416045188904,
-0.02587880939245224,
0.030690239742398262,
0.04370896890759468,
-0.04205362871289253,
-0.031132493168115616,
-0.2824973165988922,
-0.04964263364672661,
-0.05576641857624054,
-0.025248248130083084,
-0.028274333104491234,
0.01823664829134941,
0.07684782892465591,
0.06900064647197723,
-0.05112455412745476,
-0.10738734900951385,
0.09290632605552673,
0.02482517808675766,
0.1776551902294159,
0.0026968528982251883,
0.0781090259552002,
-0.10436893254518509,
0.013651013374328613,
0.03898780792951584,
0.0732874646782875,
0.20403438806533813,
0.043278466910123825,
0.12936152517795563,
0.06355538964271545,
0.1388450562953949,
0.049079544842243195,
-0.019218837842345238,
0.013169566169381142,
0.009477057494223118,
0.047312792390584946,
-0.09461280703544617,
-0.027404310181736946,
0.05283953621983528,
0.13862405717372894,
-0.06125946715474129,
-0.00784279964864254,
-0.04374191537499428,
0.13541048765182495,
0.15760912001132965,
0.040669996291399,
-0.11237281560897827,
-0.11706265062093735,
0.00464205164462328,
-0.07100150734186172,
0.0002516255481168628,
-0.009796434082090855,
0.13251616060733795,
-0.12473670393228531,
0.04101871699094772,
-0.007903511635959148,
0.07763709872961044,
0.009009819477796555,
-0.005457633174955845,
-0.06564467400312424,
0.057830311357975006,
-0.025457317009568214,
0.11402440071105957,
-0.3024109899997711,
0.15046729147434235,
0.054131779819726944,
0.1437874287366867,
-0.058227382600307465,
-0.009391478262841702,
0.08405546844005585,
-0.03179352357983589,
0.0815875455737114,
0.026206625625491142,
-0.1676737368106842,
-0.11931173503398895,
-0.15482942759990692,
0.0443478599190712,
0.10143934935331345,
0.10983968526124954,
0.08634337782859802,
-0.013885264284908772,
-0.001863887533545494,
-0.010005608201026917,
-0.09167337417602539,
-0.22992342710494995,
-0.04646000266075134,
0.029100829735398293,
0.1563444584608078,
0.05128193274140358,
0.003554877359420061,
-0.004841879475861788,
-0.08158321678638458,
-0.0707254409790039,
-0.17743638157844543,
-0.07870284467935562,
-0.02309885434806347,
-0.056847184896469116,
0.13432003557682037,
-0.07685735821723938,
-0.0498521625995636,
0.01435949094593525,
0.04279863089323044,
0.017760077491402626,
-0.04846610873937607,
-0.05993520841002464,
-0.11140888929367065,
-0.16297173500061035,
-0.008582831360399723,
0.13832126557826996,
0.08895907551050186,
0.09669308364391327,
0.046662576496601105,
0.02155516855418682,
0.01805299147963524,
-0.07064632326364517,
0.03693583980202675,
0.026627089828252792,
-0.0592968612909317,
0.07257702201604843,
-0.062091246247291565,
-0.18798142671585083,
-0.22932091355323792,
-0.08325035125017166,
0.1489199548959732,
0.10118028521537781,
-0.09994441270828247,
0.17064064741134644,
0.19042934477329254,
-0.14551116526126862,
-0.1795451045036316,
0.05364994332194328,
0.09151037037372589,
0.047517187893390656,
-0.028781365603208542,
-0.2014061063528061,
-0.0054497565142810345,
0.013148386031389236,
-0.03390505537390709,
0.07133070379495621,
-0.3266109228134155,
-0.12553580105304718,
0.192496195435524,
-0.05024039372801781,
-0.0037672461476176977,
-0.11929692327976227,
-0.023928025737404823,
-0.0667593851685524,
0.14812590181827545,
0.04567255079746246,
-0.020035460591316223,
0.12666593492031097,
0.017515631392598152,
0.03786349296569824,
0.04201902821660042,
-0.04294465854763985,
0.1407082974910736,
0.07524606585502625,
-0.0169603880494833,
-0.02313881181180477,
0.10662122815847397,
0.22497931122779846,
-0.011419491842389107,
0.16473697125911713,
0.0021465530153363943,
0.031094271689653397,
-0.13060829043388367,
-0.02384471334517002,
-0.11626777797937393,
0.08120761811733246,
0.017236683517694473,
-0.033648133277893066,
-0.04830395430326462,
0.01756211929023266,
0.11487094312906265,
-0.012354559265077114,
0.019492924213409424,
-0.08482479304075241,
-0.054653946310281754,
0.1588987559080124,
0.09831959754228592,
0.16962474584579468,
-0.06817591190338135,
0.014633609913289547,
-0.01939653418958187,
0.06875066459178925,
-0.10511855036020279,
0.028919007629156113,
0.05905311182141304,
-0.023032864555716515,
0.11343200504779816,
-0.04714041203260422,
-0.10216601938009262,
0.08149462938308716,
0.0588606558740139,
-0.0549936480820179,
-0.05716719478368759,
0.010172015056014061,
0.00004737505514640361,
-0.026730842888355255,
-0.04486725106835365,
0.1335514932870865,
-0.11919667571783066,
0.018731066957116127,
-0.04349897429347038,
0.02367178350687027,
-0.0671464055776596,
0.2304658442735672,
0.07811623811721802,
0.03453788533806801,
-0.02051883190870285,
0.13078060746192932,
0.0491737462580204,
-0.06959044188261032,
0.057708632200956345,
-0.016603566706180573,
-0.06437158584594727,
-0.04826076328754425,
-0.08685079216957092,
-0.07468368858098984,
0.043986085802316666,
-0.15397778153419495,
-0.07481852173805237,
-0.039601508527994156,
0.0024570408277213573,
0.047651518136262894,
0.004594048950821161,
-0.003959354013204575,
-0.003949521575123072,
0.0008383292006328702,
-0.1260717213153839,
0.12554031610488892,
0.011697961948812008,
-0.04092497378587723,
-0.07676766812801361,
0.13530388474464417,
0.04158520698547363,
0.048348795622587204,
-0.0398184172809124,
-0.08246473222970963,
0.04763318970799446,
0.05796704813838005,
-0.09801732003688812,
0.03492144122719765,
-0.097413070499897,
-0.038918398320674896,
-0.017058053985238075,
-0.03695125877857208,
-0.024127628654241562,
0.060278188437223434,
-0.08696923404932022,
0.017520997673273087,
-0.0415826179087162,
0.13254879415035248,
-0.12849287688732147,
-0.02069767750799656,
-0.01639525219798088,
-0.03221799060702324,
0.05994103103876114,
0.176518976688385,
-0.09775804728269577,
0.12768901884555817,
-0.1419738084077835,
0.01558893546462059,
0.022984636947512627,
0.08528709411621094,
-0.03995704650878906,
-0.14432324469089508,
0.039227958768606186,
0.07265356183052063,
0.009594696573913097,
0.026570437476038933,
0.1687287837266922,
-0.08119194209575653,
-0.006121405865997076,
-0.11390459537506104,
0.03249525651335716,
-0.02765260636806488,
0.09638711810112,
0.03201870247721672,
0.08479788154363632,
0.1974547654390335,
-0.1358722448348999,
0.06545574218034744,
-0.09016213566064835,
-0.004153023473918438,
-0.04068952053785324,
-0.057729244232177734,
-0.045512910932302475,
-0.017803194001317024,
0.09058985114097595,
-0.047713782638311386,
0.14373843371868134,
0.06833171099424362,
0.040960751473903656,
0.04417562112212181,
-0.13782119750976562,
-0.03148293122649193,
0.03918800875544548,
0.17330390214920044,
0.05230751633644104,
-0.0467827133834362,
-0.04010910913348198,
0.0037850351072847843,
0.0010952280135825276,
-0.039781130850315094,
0.03390247002243996,
0.21736733615398407,
0.10613128542900085,
0.08571787923574448,
0.03020733781158924,
-0.10719185322523117,
-0.006767512299120426,
0.07527752965688705,
-0.1661147177219391,
0.06363561749458313,
-0.06859412044286728,
0.171843484044075,
0.18791911005973816,
-0.16699855029582977,
0.11084560304880142,
-0.03344263136386871,
-0.12168099731206894,
-0.07173877209424973,
-0.13213828206062317,
-0.0652572363615036,
-0.09711448103189468,
0.009103319607675076,
-0.09092707931995392,
0.059088051319122314,
0.06474261730909348,
0.035404425114393234,
-0.04072067141532898,
0.18219415843486786,
-0.01266483310610056,
-0.10433783382177353,
0.14909888803958893,
-0.0563197135925293,
0.06513817608356476,
-0.046484146267175674,
0.0015311231836676598,
0.15426424145698547,
-0.024638034403324127,
0.04104238003492355,
0.06631758064031601,
-0.019962437450885773,
0.018010614439845085,
-0.052953820675611496,
-0.05437559261918068,
0.010695160366594791,
0.033440057188272476,
0.14989981055259705,
0.0822417214512825,
0.11921850591897964,
-0.07769838720560074,
-0.004272501450031996,
0.15872108936309814,
0.0057075414806604385,
-0.09456650912761688,
-0.0669943243265152,
0.0507839173078537,
0.07544933259487152,
0.0008214472327381372,
0.04179149493575096,
-0.0663747787475586,
-0.024881083518266678,
0.11911918967962265,
0.25176122784614563,
0.03059295564889908,
0.020486027002334595,
-0.08663035929203033,
0.007126541808247566,
0.0013116227928549051,
0.13572487235069275,
0.05688413232564926,
0.1385612189769745,
0.005616768263280392,
0.06242901831865311,
-0.08957739174365997,
-0.007262091618031263,
-0.025762444362044334,
0.09686149656772614,
-0.0758364349603653,
-0.07162130624055862,
-0.03362584859132767,
0.1531522572040558,
-0.03679627552628517,
-0.21402670443058014,
-0.07010497897863388,
-0.07139389216899872,
-0.06525712460279465,
0.04559910669922829,
0.07442689687013626,
0.08602679520845413,
-0.023312006145715714,
-0.03238995745778084,
0.03778095170855522,
0.039373841136693954,
0.026843475177884102,
0.027468884363770485,
-0.06591904908418655,
0.04882429540157318,
-0.15967637300491333,
0.1163313090801239,
0.012512863613665104,
0.20230773091316223,
0.05243532359600067,
0.026203736662864685,
-0.05482989922165871,
0.13541121780872345,
0.06300681084394455,
-0.04611349105834961,
0.10387180745601654,
0.14933785796165466,
0.02110053412616253,
0.19787801802158356,
0.09336557239294052,
0.022760532796382904,
0.09957192093133926,
-0.11215361207723618,
-0.01664098910987377,
-0.10657697170972824,
0.05909494310617447,
-0.05469442903995514,
0.07295552641153336,
0.17375636100769043,
-0.05262492224574089,
-0.0011392084416002035,
-0.06240479275584221,
-0.036499131470918655,
0.026636777445673943,
0.004519648849964142,
-0.054258882999420166,
-0.16315524280071259,
-0.002113904571160674,
0.0028275386430323124,
0.06550099700689316,
-0.16977131366729736,
-0.04276759549975395,
0.01248144917190075,
-0.07420295476913452,
0.026554416865110397,
0.06390134245157242,
-0.019810998812317848,
0.04446202516555786,
-0.016180118545889854,
-0.03409332409501076,
0.009066873230040073,
0.10556413233280182,
-0.18208561837673187,
-0.09555639326572418
] |
null | null |
transformers
|
# Wav2Vec2-Large-XLSR-53-Kyrgyz
Fine-tuned [facebook/wav2vec2-large-xlsr-53](https://huggingface.co/facebook/wav2vec2-large-xlsr-53) on Kyrgyz using the [Common Voice](https://huggingface.co/datasets/common_voice) dataset.
When using this model, make sure that your speech input is sampled at 16kHz.
## Usage
The model can be used directly (without a language model) as follows:
```python
import torch
import torchaudio
from datasets import load_dataset
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
test_dataset = load_dataset("common_voice", "ky", split="test[:2%]")
processor = Wav2Vec2Processor.from_pretrained("adilism/wav2vec2-large-xlsr-kyrgyz")
model = Wav2Vec2ForCTC.from_pretrained("adilism/wav2vec2-large-xlsr-kyrgyz")
resampler = torchaudio.transforms.Resample(48_000, 16_000)
# Preprocessing the datasets.
# We need to read the aduio files as arrays
def speech_file_to_array_fn(batch):
speech_array, sampling_rate = torchaudio.load(batch["path"])
batch["speech"] = resampler(speech_array).squeeze().numpy()
return batch
test_dataset = test_dataset.map(speech_file_to_array_fn)
inputs = processor(test_dataset["speech"][:2], sampling_rate=16_000, return_tensors="pt", padding=True)
with torch.no_grad():
logits = model(inputs.input_values, attention_mask=inputs.attention_mask).logits
predicted_ids = torch.argmax(logits, dim=-1)
print("Prediction:", processor.batch_decode(predicted_ids))
print("Reference:", test_dataset["sentence"][:2])
```
## Evaluation
The model can be evaluated as follows on the Kyrgyz test data of Common Voice:
```python
import torch
import torchaudio
from datasets import load_dataset, load_metric
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
import re
test_dataset = load_dataset("common_voice", "ky", split="test")
wer = load_metric("wer")
processor = Wav2Vec2Processor.from_pretrained("adilism/wav2vec2-large-xlsr-kyrgyz")
model = Wav2Vec2ForCTC.from_pretrained("adilism/wav2vec2-large-xlsr-kyrgyz")
model.to("cuda")
chars_to_ignore = [",", "?", ".", "!", "-", ";", ":", "—", "–", "”"]
chars_to_ignore_regex = f'[{"".join(chars_to_ignore)}]'
resampler = torchaudio.transforms.Resample(48_000, 16_000)
# Preprocessing the datasets.
# We need to read the aduio files as arrays
def speech_file_to_array_fn(batch):
batch["sentence"] = re.sub(chars_to_ignore_regex, '', batch["sentence"]).lower()
speech_array, sampling_rate = torchaudio.load(batch["path"])
batch["speech"] = resampler(speech_array).squeeze().numpy()
return batch
test_dataset = test_dataset.map(speech_file_to_array_fn)
# Preprocessing the datasets.
# We need to read the aduio files as arrays
def evaluate(batch):
inputs = processor(batch["speech"], sampling_rate=16_000, return_tensors="pt", padding=True)
with torch.no_grad():
logits = model(inputs.input_values.to("cuda"), attention_mask=inputs.attention_mask.to("cuda")).logits
pred_ids = torch.argmax(logits, dim=-1)
batch["pred_strings"] = processor.batch_decode(pred_ids)
return batch
result = test_dataset.map(evaluate, batched=True, batch_size=8)
print("WER: {:2f}".format(100 * wer.compute(predictions=result["pred_strings"], references=result["sentence"])))
```
**Test Result**: 34.08 %
## Training
The Common Voice `train` and `validation` datasets were used for training.
|
{"language": "ky", "license": "apache-2.0", "tags": ["audio", "automatic-speech-recognition", "speech", "xlsr-fine-tuning-week"], "datasets": ["common_voice"], "metrics": ["wer"], "model-index": [{"name": {}, "results": [{"task": {"type": "automatic-speech-recognition", "name": "Speech Recognition"}, "dataset": {"name": "Common Voice ky", "type": "common_voice", "args": "ky"}, "metrics": [{"type": "wer", "value": 34.08, "name": "Test WER"}]}]}]}
|
automatic-speech-recognition
|
aismlv/wav2vec2-large-xlsr-kyrgyz
|
[
"transformers",
"pytorch",
"jax",
"wav2vec2",
"automatic-speech-recognition",
"audio",
"speech",
"xlsr-fine-tuning-week",
"ky",
"dataset:common_voice",
"license:apache-2.0",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"ky"
] |
TAGS
#transformers #pytorch #jax #wav2vec2 #automatic-speech-recognition #audio #speech #xlsr-fine-tuning-week #ky #dataset-common_voice #license-apache-2.0 #endpoints_compatible #region-us
|
# Wav2Vec2-Large-XLSR-53-Kyrgyz
Fine-tuned facebook/wav2vec2-large-xlsr-53 on Kyrgyz using the Common Voice dataset.
When using this model, make sure that your speech input is sampled at 16kHz.
## Usage
The model can be used directly (without a language model) as follows:
## Evaluation
The model can be evaluated as follows on the Kyrgyz test data of Common Voice:
Test Result: 34.08 %
## Training
The Common Voice 'train' and 'validation' datasets were used for training.
|
[
"# Wav2Vec2-Large-XLSR-53-Kyrgyz\n\nFine-tuned facebook/wav2vec2-large-xlsr-53 on Kyrgyz using the Common Voice dataset.\n\nWhen using this model, make sure that your speech input is sampled at 16kHz.",
"## Usage\n\nThe model can be used directly (without a language model) as follows:",
"## Evaluation\n\nThe model can be evaluated as follows on the Kyrgyz test data of Common Voice:\n\n\n\n\nTest Result: 34.08 %",
"## Training\n\nThe Common Voice 'train' and 'validation' datasets were used for training."
] |
[
"TAGS\n#transformers #pytorch #jax #wav2vec2 #automatic-speech-recognition #audio #speech #xlsr-fine-tuning-week #ky #dataset-common_voice #license-apache-2.0 #endpoints_compatible #region-us \n",
"# Wav2Vec2-Large-XLSR-53-Kyrgyz\n\nFine-tuned facebook/wav2vec2-large-xlsr-53 on Kyrgyz using the Common Voice dataset.\n\nWhen using this model, make sure that your speech input is sampled at 16kHz.",
"## Usage\n\nThe model can be used directly (without a language model) as follows:",
"## Evaluation\n\nThe model can be evaluated as follows on the Kyrgyz test data of Common Voice:\n\n\n\n\nTest Result: 34.08 %",
"## Training\n\nThe Common Voice 'train' and 'validation' datasets were used for training."
] |
[
76,
68,
20,
29,
23
] |
[
"passage: TAGS\n#transformers #pytorch #jax #wav2vec2 #automatic-speech-recognition #audio #speech #xlsr-fine-tuning-week #ky #dataset-common_voice #license-apache-2.0 #endpoints_compatible #region-us \n# Wav2Vec2-Large-XLSR-53-Kyrgyz\n\nFine-tuned facebook/wav2vec2-large-xlsr-53 on Kyrgyz using the Common Voice dataset.\n\nWhen using this model, make sure that your speech input is sampled at 16kHz.## Usage\n\nThe model can be used directly (without a language model) as follows:## Evaluation\n\nThe model can be evaluated as follows on the Kyrgyz test data of Common Voice:\n\n\n\n\nTest Result: 34.08 %## Training\n\nThe Common Voice 'train' and 'validation' datasets were used for training."
] |
[
-0.09662068635225296,
0.01307420153170824,
-0.0019320726860314608,
-0.04755064472556114,
0.0836501270532608,
-0.04053717851638794,
0.16084297001361847,
0.11204231530427933,
0.010964395478367805,
-0.030236296355724335,
0.03068765252828598,
0.03883945569396019,
0.05731818825006485,
0.08347473293542862,
-0.01797707937657833,
-0.2266855388879776,
-0.004771782085299492,
0.0243791863322258,
0.08589039742946625,
0.12269071489572525,
0.08281411230564117,
-0.09162533283233643,
-0.017270494252443314,
0.09628758579492569,
-0.13438105583190918,
0.021237829700112343,
0.012287058867514133,
-0.12434926629066467,
0.11684313416481018,
0.05167385935783386,
0.0643584206700325,
0.07272765040397644,
0.07236133515834808,
-0.21330507099628448,
0.024464135989546776,
0.02291184291243553,
0.04792982339859009,
0.0370427668094635,
0.04192928597331047,
0.0041731251403689384,
0.08950670808553696,
0.03584526479244232,
-0.04708658903837204,
0.034215301275253296,
-0.02643273025751114,
-0.23118865489959717,
-0.026416590437293053,
0.039257582277059555,
0.12641610205173492,
0.11518928408622742,
-0.06612111628055573,
0.11155561357736588,
-0.12908627092838287,
0.08384696394205093,
0.12481620162725449,
-0.1972890943288803,
0.013855229131877422,
0.09285043925046921,
0.047091271728277206,
0.037436343729496,
-0.06910061836242676,
0.04161399230360985,
0.024941924959421158,
0.009838322177529335,
0.021497290581464767,
-0.04995075240731239,
-0.17448842525482178,
-0.01937013678252697,
-0.13702848553657532,
-0.03297329694032669,
0.19523067772388458,
-0.02125544473528862,
-0.060803864151239395,
-0.13776636123657227,
0.00704885134473443,
0.028177358210086823,
-0.023688537999987602,
-0.05690034478902817,
-0.034958966076374054,
-0.005133786238729954,
-0.07634615898132324,
-0.030444607138633728,
-0.12204104661941528,
-0.1326737254858017,
-0.01984802447259426,
0.15417473018169403,
0.050844691693782806,
0.02273578569293022,
-0.11470706760883331,
0.10428724437952042,
-0.07702631503343582,
-0.0643560066819191,
-0.006767570041120052,
0.03051742911338806,
-0.04754871502518654,
0.01702481135725975,
-0.06582137942314148,
-0.12372580915689468,
0.055514857172966,
-0.06493944674730301,
0.022993603721261024,
0.032358117401599884,
0.0295474361628294,
0.04766789823770523,
0.06839297711849213,
0.09137480705976486,
-0.05669208988547325,
-0.019558347761631012,
0.026128971949219704,
0.0039993636310100555,
-0.05531645938754082,
-0.029301410540938377,
-0.06995956599712372,
-0.08132918179035187,
0.05076773464679718,
0.051674984395504,
-0.02046925202012062,
0.02218741737306118,
-0.006159718614071608,
-0.0488588809967041,
0.015548747032880783,
-0.06374168395996094,
-0.04185065999627113,
0.06416866928339005,
-0.013569651171565056,
0.08716153353452682,
0.010675966739654541,
0.08836690336465836,
-0.05927571654319763,
0.02393014170229435,
0.02977168932557106,
0.03782055526971817,
-0.011560716666281223,
-0.08545692265033722,
-0.002968414453789592,
-0.018631651997566223,
0.017290204763412476,
-0.096089206635952,
-0.08377252519130707,
-0.07551642507314682,
0.0066453320905566216,
0.043013229966163635,
-0.029680006206035614,
-0.09489040821790695,
-0.02682781219482422,
-0.0003643875243142247,
-0.06203756108880043,
0.10329245775938034,
-0.03756071254611015,
0.0721614882349968,
-0.019259391352534294,
0.052371710538864136,
0.05746510997414589,
0.08150649815797806,
-0.09741733223199844,
-0.042525939643383026,
0.03739975020289421,
0.12655945122241974,
-0.0403260700404644,
-0.10126274079084396,
-0.10818562656641006,
-0.09822949767112732,
-0.0382721871137619,
0.05272689834237099,
0.06066557392477989,
0.08455586433410645,
-0.1928180605173111,
-0.0877617672085762,
0.13230293989181519,
-0.08330202847719193,
-0.0055268206633627415,
0.18413478136062622,
-0.023353712633252144,
0.13486866652965546,
0.13865751028060913,
0.26097485423088074,
0.17603355646133423,
-0.20274928212165833,
-0.005369429010897875,
0.030911317095160484,
-0.00966127123683691,
-0.029357243329286575,
0.06093965470790863,
-0.02780175767838955,
-0.05291547626256943,
0.04275135323405266,
-0.08979959785938263,
0.05764877796173096,
-0.05163971334695816,
-0.058939505368471146,
-0.007044602185487747,
-0.07767774909734726,
0.1007733941078186,
0.036587703973054886,
0.05796094238758087,
0.0015686084516346455,
-0.03399831801652908,
0.07247510552406311,
0.1411563605070114,
-0.13971541821956635,
0.0440397635102272,
-0.10651962459087372,
0.04792969673871994,
-0.1056647077202797,
-0.004729451611638069,
-0.11019699275493622,
0.17609184980392456,
0.0029804063960909843,
-0.034334447234869,
0.06300566345453262,
0.15764664113521576,
0.00977956224232912,
0.014376894570887089,
-0.05389964580535889,
-0.040102846920490265,
0.01203814335167408,
-0.005726275034248829,
-0.07109599560499191,
-0.0982871800661087,
-0.013043427839875221,
-0.04775816947221756,
0.03216397389769554,
-0.10253395885229111,
-0.01471651066094637,
0.0442805252969265,
-0.0181858092546463,
-0.005675002466887236,
0.020723111927509308,
0.07644826918840408,
0.1030178815126419,
0.020004460588097572,
0.03653930500149727,
0.05434364825487137,
0.005984340328723192,
-0.051861491054296494,
0.15052032470703125,
-0.12245812267065048,
-0.07230769097805023,
0.09356480836868286,
0.0012343719135969877,
0.003794295946136117,
0.01460177730768919,
-0.014344025403261185,
-0.035110291093587875,
-0.06667912751436234,
0.020613307133316994,
0.335855096578598,
-0.009904826991260052,
0.09184926003217697,
-0.08636752516031265,
-0.03826335817575455,
0.03940671309828758,
-0.09269831329584122,
0.050563812255859375,
0.0705123320221901,
-0.014857993461191654,
-0.009614466689527035,
0.014587518759071827,
-0.015894442796707153,
-0.09706802666187286,
0.272455096244812,
-0.02773779258131981,
-0.08321726322174072,
-0.004296785686165094,
-0.054261256009340286,
-0.05281563848257065,
0.07640606164932251,
-0.14488543570041656,
-0.030430711805820465,
0.04681091383099556,
0.04795164614915848,
0.07076743245124817,
-0.1507376879453659,
0.023159516975283623,
-0.0025692509952932596,
-0.1121603474020958,
-0.18735083937644958,
0.09168432652950287,
-0.04102872684597969,
0.030854670330882072,
-0.07557643949985504,
-0.012213329784572124,
0.012127042748034,
-0.04869929701089859,
-0.14430548250675201,
0.1245453953742981,
-0.07044148445129395,
-0.19543799757957458,
-0.11957473307847977,
0.010740361176431179,
0.0033116245176643133,
-0.015526448376476765,
0.09516970068216324,
-0.11050652712583542,
0.014012743718922138,
-0.013664793223142624,
0.11063653230667114,
-0.0017211214872077107,
-0.016378773376345634,
-0.06848705559968948,
0.007277375552803278,
0.004735864698886871,
-0.1320992261171341,
0.0105827571824193,
-0.06990025192499161,
-0.04812333360314369,
0.03320441395044327,
0.016272690147161484,
0.011213396675884724,
0.18360230326652527,
0.02383871003985405,
0.010867203585803509,
-0.03776833415031433,
0.16422955691814423,
-0.0966453030705452,
-0.02139371447265148,
0.16655264794826508,
-0.004720941185951233,
-0.0023004745598882437,
0.09590992331504822,
0.030081231147050858,
-0.02142534777522087,
0.0038236326072365046,
-0.010217907838523388,
-0.08814799040555954,
-0.21731425821781158,
-0.11180412024259567,
-0.042334794998168945,
-0.05702347308397293,
-0.0272318497300148,
-0.023678403347730637,
0.07772437483072281,
0.042501743882894516,
-0.011899643577635288,
-0.07105983793735504,
0.0425356961786747,
-0.010963906534016132,
0.09064152091741562,
0.0036207400262355804,
0.09647635370492935,
-0.04454542696475983,
-0.0392313189804554,
0.007112632505595684,
0.06189495697617531,
0.19878561794757843,
0.022038010880351067,
0.03933834657073021,
0.10972335189580917,
0.14442652463912964,
0.13161136209964752,
0.0625232383608818,
-0.06892884522676468,
-0.012320302426815033,
0.015298073180019855,
-0.057496484369039536,
0.011800752952694893,
0.04316677153110504,
0.1324986070394516,
-0.056650158017873764,
-0.051443737000226974,
-0.05687179043889046,
0.014190595597028732,
0.2163693755865097,
0.08068379014730453,
-0.1461791694164276,
-0.0865364819765091,
-0.02479749172925949,
-0.06407556682825089,
0.011269094422459602,
0.043506331741809845,
0.16342449188232422,
-0.1543533205986023,
0.018505148589611053,
-0.006293418351560831,
0.06930596381425858,
-0.03836631029844284,
0.01631413772702217,
-0.09443144500255585,
0.04126819968223572,
-0.016557903960347176,
0.10813725739717484,
-0.2579129636287689,
0.21656587719917297,
0.007276330608874559,
0.1397092640399933,
-0.07755179703235626,
-0.03680367395281792,
0.01420263759791851,
0.018012534826993942,
0.08694695681333542,
0.023955386132001877,
0.0001823987695388496,
-0.13154487311840057,
-0.06892910599708557,
0.07828855514526367,
0.0070966798812150955,
0.005952490959316492,
0.05042954534292221,
0.010383965447545052,
-0.006487770471721888,
0.007027975749224424,
-0.07283854484558105,
-0.1447262316942215,
-0.04171144217252731,
0.0029898816719651222,
0.16129477322101593,
0.09315969794988632,
-0.01779431290924549,
-0.09507954865694046,
-0.08350802212953568,
-0.005161115899682045,
-0.06342495232820511,
-0.07016226649284363,
-0.01755150966346264,
-0.09268612414598465,
0.12599295377731323,
-0.08799425512552261,
-0.009043406695127487,
0.0938536673784256,
0.08639189600944519,
-0.02771339938044548,
-0.08157868683338165,
0.02198817767202854,
-0.10837867110967636,
-0.1408316195011139,
0.009001223370432854,
0.17643283307552338,
0.12985295057296753,
0.07990047335624695,
0.07623285800218582,
0.009364241734147072,
-0.03156813234090805,
-0.062140513211488724,
0.014159073121845722,
0.08731969445943832,
-0.0792829766869545,
-0.0020568373147398233,
0.03528754413127899,
-0.1645524948835373,
-0.15473511815071106,
-0.07126820087432861,
0.17396163940429688,
0.09101641923189163,
-0.07146628946065903,
0.20044943690299988,
0.2543722689151764,
-0.09031574428081512,
-0.22299134731292725,
-0.04705735296010971,
0.13370545208454132,
0.09608086198568344,
-0.033954471349716187,
-0.19481736421585083,
0.06458304077386856,
-0.022200265899300575,
-0.03422649949789047,
-0.059800442308187485,
-0.31114813685417175,
-0.16312207281589508,
0.1669725477695465,
-0.005869607906788588,
0.1876203417778015,
0.007902137003839016,
-0.02162650227546692,
-0.002703275764361024,
0.04192159324884415,
0.10221545398235321,
-0.1172819435596466,
0.12819074094295502,
0.002752880798652768,
0.12549680471420288,
0.05429988354444504,
-0.026588598266243935,
0.08358139544725418,
0.09413066506385803,
-0.011847472749650478,
0.0076584164053201675,
0.027530863881111145,
0.03608599677681923,
0.008490928448736668,
0.0978519469499588,
-0.08057864010334015,
0.04937742277979851,
-0.08315002173185349,
-0.0844421237707138,
-0.1060672402381897,
0.07457145303487778,
0.03490716218948364,
-0.06977751106023788,
-0.010950357653200626,
-0.03691145405173302,
0.02580667845904827,
-0.007997614331543446,
-0.03783339262008667,
-0.08928808569908142,
0.034523412585258484,
0.13704270124435425,
0.16185177862644196,
-0.005943691823631525,
-0.07187005132436752,
-0.01610616222023964,
-0.053318724036216736,
0.12973488867282867,
-0.0950622707605362,
0.022410796955227852,
0.08000128716230392,
0.07077568024396896,
0.10341889411211014,
0.00623429287225008,
-0.1180606558918953,
0.0939406156539917,
0.022856494411826134,
-0.07183866202831268,
-0.13379526138305664,
-0.04321407154202461,
-0.03764158487319946,
-0.01201658509671688,
0.03489629924297333,
0.09273384511470795,
-0.09009724110364914,
-0.03548130393028259,
-0.03190707042813301,
0.01877945475280285,
-0.11332732439041138,
0.2405387908220291,
0.030485648661851883,
0.07365644723176956,
-0.10274621099233627,
0.007005119696259499,
-0.02137582376599312,
-0.004680237267166376,
0.04657460004091263,
-0.06601545214653015,
-0.07442886382341385,
-0.06654960662126541,
-0.007119918707758188,
0.05567903071641922,
0.05595983564853668,
-0.1315414160490036,
-0.013102089054882526,
-0.0893663838505745,
-0.016759954392910004,
0.0709078386425972,
0.053377553820610046,
0.01605246588587761,
-0.11871466785669327,
-0.055749520659446716,
-0.12428482621908188,
0.08870002627372742,
0.07360075414180756,
-0.02712128683924675,
-0.12356560677289963,
0.18958517909049988,
0.06901712715625763,
0.07004538923501968,
-0.05827908590435982,
-0.0813361331820488,
-0.015419530682265759,
0.07770205289125443,
-0.07793839275836945,
-0.002725023776292801,
-0.07045961171388626,
0.0017200453439727426,
-0.008901618421077728,
-0.0794302225112915,
-0.004234759137034416,
0.0828932449221611,
-0.10309518128633499,
0.08426903188228607,
0.003143944079056382,
0.08203653246164322,
-0.050804365426301956,
0.01014875527471304,
0.0337352491915226,
-0.05031678080558777,
0.08166874945163727,
0.13967430591583252,
-0.0893021672964096,
0.14671702682971954,
-0.207621768116951,
-0.027763640508055687,
0.06415428966283798,
0.07006541639566422,
-0.049725502729415894,
-0.14237746596336365,
0.01819838397204876,
0.0693674236536026,
0.07444524019956589,
-0.00048753712326288223,
0.1091001108288765,
-0.06555230170488358,
-0.013441982679069042,
-0.06959684193134308,
0.010032491758465767,
-0.04863641783595085,
0.04622531309723854,
0.053225189447402954,
0.1654183715581894,
0.17270982265472412,
-0.10289319604635239,
0.10421343892812729,
-0.08931834995746613,
0.010840054601430893,
-0.05591603368520737,
-0.050890613347291946,
-0.11549581587314606,
-0.08433973044157028,
0.05747217684984207,
-0.044536031782627106,
0.09975910931825638,
0.008434961549937725,
0.005598095711320639,
-0.06588663160800934,
-0.0677192434668541,
-0.004995815921574831,
-0.01739894598722458,
0.2824482023715973,
0.03700323775410652,
0.017445841804146767,
-0.047829512506723404,
0.02664761058986187,
-0.012447899207472801,
0.09260299801826477,
-0.023509545251727104,
0.12599004805088043,
0.0344855934381485,
0.03859279304742813,
0.09812850505113602,
-0.05069522187113762,
-0.0645795539021492,
-0.006634105928242207,
-0.14718948304653168,
0.046382494270801544,
-0.07593604922294617,
0.21356186270713806,
0.11288329213857651,
-0.11041593551635742,
0.08568038791418076,
0.02356533892452717,
-0.09059561043977737,
-0.1512451022863388,
-0.16058894991874695,
-0.07179930061101913,
-0.14187167584896088,
0.027160847559571266,
-0.08604219555854797,
0.02705552615225315,
0.0489896684885025,
0.032473284751176834,
-0.022429732605814934,
0.15372101962566376,
0.009364009834825993,
-0.10516559332609177,
0.1015850082039833,
-0.07859104126691818,
-0.05917701497673988,
-0.08540397137403488,
0.047762542963027954,
0.1636720597743988,
-0.018242960795760155,
0.061689410358667374,
0.002720247022807598,
-0.10368780046701431,
0.020480358973145485,
-0.07115418463945389,
-0.07399719208478928,
-0.02043874002993107,
-0.0075620426796376705,
0.13938000798225403,
0.09987711906433105,
0.12469163537025452,
-0.07746198773384094,
-0.0037872560787945986,
0.15311892330646515,
-0.033343877643346786,
-0.1552727073431015,
-0.11940037459135056,
0.1700814664363861,
0.0326913520693779,
0.006272425409406424,
0.035988323390483856,
-0.03237877041101456,
-0.015885060653090477,
0.21607686579227448,
0.2154039740562439,
0.09067969769239426,
0.035204630345106125,
-0.04225926473736763,
-0.004687931854277849,
-0.0336918942630291,
0.07943210005760193,
0.07591427117586136,
0.23784424364566803,
-0.03407827019691467,
0.043779391795396805,
-0.08638706058263779,
-0.05681392922997475,
0.011784113012254238,
0.05882081016898155,
-0.055322956293821335,
-0.1413518488407135,
-0.0013306965120136738,
0.12624825537204742,
-0.10677449405193329,
-0.11123678833246231,
-0.11786085367202759,
-0.08821607381105423,
-0.06976944953203201,
0.00400662561878562,
0.03418911248445511,
0.09898296743631363,
0.024988988414406776,
-0.08132443577051163,
0.05388384684920311,
0.08280156552791595,
0.01243529375642538,
-0.06349131464958191,
-0.02297833561897278,
0.06639696657657623,
-0.05721176788210869,
-0.0022754536475986242,
-0.0089583033695817,
0.21697108447551727,
0.016560643911361694,
0.08085151016712189,
-0.026569688692688942,
0.1532849818468094,
-0.012577235698699951,
-0.09361818432807922,
0.026364410296082497,
0.1546984612941742,
-0.010308816097676754,
0.07982439547777176,
0.026509223505854607,
-0.12165111303329468,
0.059444598853588104,
-0.143999382853508,
-0.0019245704170316458,
-0.08697926998138428,
0.05891122668981552,
-0.0355226993560791,
0.08576378226280212,
0.05715445056557655,
-0.06860349327325821,
-0.040046583861112595,
-0.052459780126810074,
0.05079393833875656,
0.007058156654238701,
-0.060590941458940506,
-0.042343541979789734,
-0.21409174799919128,
-0.007337014190852642,
-0.10371863096952438,
-0.0027965621557086706,
-0.1879161149263382,
-0.0024523476604372263,
-0.017680484801530838,
-0.07503527402877808,
0.006500301882624626,
0.02953789196908474,
0.08769029378890991,
-0.002763445023447275,
0.00648830970749259,
-0.038775358349084854,
0.01564815454185009,
0.09683755785226822,
-0.18360938131809235,
-0.10504250973463058
] |
null | null |
transformers
|
{'junk': 0, 'relevant': 1}
|
{}
|
text-classification
|
adit94/relevancy_classifier
|
[
"transformers",
"pytorch",
"distilbert",
"text-classification",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #distilbert #text-classification #autotrain_compatible #endpoints_compatible #region-us
|
{'junk': 0, 'relevant': 1}
|
[] |
[
"TAGS\n#transformers #pytorch #distilbert #text-classification #autotrain_compatible #endpoints_compatible #region-us \n"
] |
[
38
] |
[
"passage: TAGS\n#transformers #pytorch #distilbert #text-classification #autotrain_compatible #endpoints_compatible #region-us \n"
] |
[
-0.03533577919006348,
0.06443645805120468,
-0.007824759930372238,
0.02963758073747158,
0.21134145557880402,
0.0368538573384285,
0.06359195709228516,
0.10786357522010803,
0.047846585512161255,
-0.029699334874749184,
0.09624463319778442,
0.2456933856010437,
-0.04527274891734123,
0.11506538093090057,
-0.1315092295408249,
-0.2995516061782837,
0.0646483302116394,
0.06820031255483627,
0.01968790777027607,
0.11027561128139496,
0.08955937623977661,
-0.08577843010425568,
0.06416945904493332,
-0.03987749293446541,
-0.13028311729431152,
0.036934368312358856,
0.037670549005270004,
-0.12557227909564972,
0.08850666880607605,
0.03936105594038963,
0.16363440454006195,
0.029493317008018494,
-0.0571451373398304,
-0.13760130107402802,
0.03542056307196617,
0.003107793163508177,
-0.08173839002847672,
0.035451244562864304,
0.07971785217523575,
-0.13606007397174835,
0.03269175812602043,
0.01657985709607601,
0.028779901564121246,
0.05034712329506874,
-0.13549968600273132,
-0.06766978651285172,
-0.009825913235545158,
0.02846479043364525,
0.08123840391635895,
0.06563035398721695,
-0.00027321543893776834,
0.11571130156517029,
-0.14468228816986084,
0.13729768991470337,
0.08681581169366837,
-0.26667332649230957,
-0.01513616368174553,
0.09300960600376129,
0.014211298897862434,
0.03189397603273392,
-0.05005642771720886,
0.03387840837240219,
0.021587392315268517,
0.012041964568197727,
-0.005505601409822702,
-0.06911619752645493,
-0.12172640115022659,
0.01909228041768074,
-0.0760328620672226,
-0.039914727210998535,
0.2024218738079071,
-0.06752687692642212,
0.06574457883834839,
-0.03853347897529602,
-0.09920144081115723,
-0.04725521057844162,
-0.028420861810445786,
0.03284634277224541,
-0.05052020400762558,
0.06803859770298004,
0.04873250797390938,
0.02093963511288166,
-0.10541380196809769,
0.027895580977201462,
-0.2198127955198288,
0.21804359555244446,
0.00917235016822815,
0.04113364964723587,
-0.17035873234272003,
0.06059039384126663,
0.043774571269750595,
-0.10760118812322617,
0.049048252403736115,
-0.10497406870126724,
0.019541887566447258,
-0.04680290073156357,
-0.07833123207092285,
-0.044003088027238846,
0.0761561468243599,
0.15131190419197083,
0.024525625631213188,
0.0676354393362999,
-0.023907558992505074,
0.08125972747802734,
0.03615585342049599,
0.12704050540924072,
0.04965166375041008,
-0.030767392367124557,
0.03752761334180832,
-0.13245059549808502,
-0.00002132852932845708,
-0.07070981711149216,
-0.1520344465970993,
-0.028104213997721672,
0.058518148958683014,
0.07771685719490051,
0.007545619271695614,
0.09117837250232697,
-0.07305282354354858,
-0.03670652583241463,
0.09205243736505508,
-0.09038619697093964,
0.022389709949493408,
0.0189626757055521,
0.024910688400268555,
0.11437109857797623,
-0.01640472002327442,
-0.004441923461854458,
-0.08554866164922714,
0.15481221675872803,
-0.05412428826093674,
0.01906411163508892,
-0.027951309457421303,
-0.07562480866909027,
0.023844171315431595,
-0.16517141461372375,
0.024268588051199913,
-0.16968505084514618,
-0.12177367508411407,
0.0011497566010802984,
0.01497613824903965,
0.0003558929602149874,
-0.029599502682685852,
-0.034584347158670425,
0.0028823118191212416,
0.05339471623301506,
-0.05009040981531143,
-0.08925710618495941,
-0.0734119787812233,
0.09545788168907166,
-0.03665677830576897,
0.07958490401506424,
-0.12844105064868927,
0.0784672200679779,
-0.0987219363451004,
-0.0187049712985754,
-0.14024826884269714,
0.05743253231048584,
-0.04765705391764641,
0.18340644240379333,
0.01636499911546707,
-0.05442013591527939,
-0.05629796162247658,
0.05081459879875183,
-0.06792773306369781,
0.17081454396247864,
-0.10482346266508102,
-0.11688733100891113,
0.18975088000297546,
-0.09539731591939926,
-0.11199936270713806,
0.08214274048805237,
-0.012322766706347466,
-0.002544441493228078,
0.10592521727085114,
0.18774141371250153,
0.11772145330905914,
0.015394842252135277,
0.071439228951931,
0.1266816407442093,
-0.09738999605178833,
-0.10514426231384277,
-0.016195401549339294,
-0.010998358018696308,
-0.11682542413473129,
0.06311710923910141,
0.08283041417598724,
0.0693083181977272,
-0.04381299018859863,
-0.038738906383514404,
-0.015374792739748955,
-0.0029897931963205338,
0.14953550696372986,
0.06494788080453873,
0.11409911513328552,
-0.07472079247236252,
0.010434641502797604,
0.010832404717803001,
-0.008651630952954292,
0.016917014494538307,
0.02875317819416523,
-0.061046965420246124,
0.11194391548633575,
0.03876045346260071,
0.02736404910683632,
-0.24566538631916046,
-0.06682449579238892,
-0.011323003098368645,
0.1456235647201538,
-0.02446315996348858,
0.10121438652276993,
0.045561324805021286,
-0.0504569448530674,
-0.010978372767567635,
-0.029581138864159584,
0.17828664183616638,
0.022655870765447617,
-0.06422974169254303,
-0.0612877793610096,
0.0651540756225586,
-0.07150227576494217,
0.012235969305038452,
-0.07036937773227692,
0.020627280697226524,
0.08606486022472382,
0.12204300612211227,
0.010734139941632748,
0.06475073099136353,
-0.02579765021800995,
0.07209211587905884,
-0.07104320824146271,
0.019227510318160057,
0.11117701232433319,
-0.010595849715173244,
-0.07011682540178299,
0.13524381816387177,
-0.1373681277036667,
0.2673107087612152,
0.19483336806297302,
-0.2967563271522522,
0.0005786092369817197,
-0.04439404606819153,
-0.007282515522092581,
0.030610160902142525,
0.030042126774787903,
0.014859852381050587,
0.08437592536211014,
0.0014727829257026315,
0.20341786742210388,
-0.021047484129667282,
-0.03919289633631706,
-0.018922755494713783,
-0.04877391830086708,
-0.03148360177874565,
0.08788784593343735,
0.06451795995235443,
-0.192406564950943,
0.19050060212612152,
0.21731194853782654,
0.010114802047610283,
0.16024211049079895,
-0.010486523620784283,
0.043989237397909164,
0.09252246469259262,
-0.03757351264357567,
-0.024272754788398743,
-0.08932791650295258,
-0.1848243772983551,
-0.03918878361582756,
0.07472185045480728,
0.03010893426835537,
0.06895712018013,
-0.10219920426607132,
-0.027038687840104103,
0.0004840063920710236,
0.021132981404662132,
-0.01947878859937191,
0.08704918622970581,
0.08203180879354477,
0.1052171140909195,
-0.017219819128513336,
-0.07267280668020248,
0.11330383270978928,
-0.0011106154415756464,
-0.07149384170770645,
0.18412140011787415,
-0.15954560041427612,
-0.36233094334602356,
-0.1530739665031433,
-0.20456592738628387,
-0.02883506752550602,
0.06615062057971954,
0.10685895383358002,
-0.12165717035531998,
-0.048558108508586884,
0.0375000461935997,
-0.013693227432668209,
-0.04041895270347595,
0.03981194645166397,
-0.05303730443120003,
0.07329315692186356,
-0.05222955346107483,
-0.06364883482456207,
-0.06660815328359604,
-0.03131863474845886,
-0.004695216193795204,
0.16393853724002838,
-0.12483653426170349,
0.06658802926540375,
0.1819998174905777,
0.0010995424818247557,
0.06644674390554428,
-0.032483141869306564,
0.1697184294462204,
-0.08651559799909592,
-0.02343188226222992,
0.1893177032470703,
-0.07345744967460632,
0.07808925211429596,
0.15666639804840088,
0.020104380324482918,
-0.0712679922580719,
0.0352557972073555,
-0.035343270748853683,
-0.08934015780687332,
-0.2058166265487671,
-0.1703205555677414,
-0.12546730041503906,
0.05237005278468132,
0.0663270428776741,
0.07582127302885056,
0.12632738053798676,
0.06528977304697037,
0.00627241050824523,
0.010700550861656666,
0.006936580874025822,
0.07483439892530441,
0.24698598682880402,
-0.0010819705203175545,
0.14767786860466003,
-0.057353224605321884,
-0.13245494663715363,
0.08233633637428284,
0.000922833161894232,
0.1185675784945488,
0.08539658784866333,
0.017674902454018593,
0.005295653361827135,
0.05462205410003662,
0.164198637008667,
0.1299368292093277,
0.04298880323767662,
-0.013622048310935497,
-0.01172587089240551,
0.0032578855752944946,
-0.0797785148024559,
0.006457295268774033,
0.07906489074230194,
-0.14195358753204346,
-0.08270972222089767,
-0.11039547622203827,
0.10006770491600037,
0.08380265533924103,
0.042938295751810074,
-0.2052999883890152,
0.005745685659348965,
0.09206069260835648,
-0.027502331882715225,
-0.09957162290811539,
0.06463603675365448,
-0.04812092334032059,
-0.13455109298229218,
0.10769277811050415,
-0.029609164223074913,
0.13354617357254028,
-0.0870715081691742,
0.08272852748632431,
-0.0378170944750309,
-0.11202792823314667,
0.03467349335551262,
0.10786684602499008,
-0.27751585841178894,
0.2031957507133484,
0.007435075007379055,
-0.06144534796476364,
-0.07824365049600601,
-0.015199865214526653,
0.039944443851709366,
0.22591036558151245,
0.06934285908937454,
0.004277070518583059,
-0.05739999935030937,
-0.1865520477294922,
-0.009981787763535976,
-0.008337096311151981,
0.12231403589248657,
-0.03427664935588837,
-0.01814279891550541,
-0.036011241376399994,
-0.030255382880568504,
-0.03578435257077217,
-0.06897740066051483,
0.02666986919939518,
-0.17997102439403534,
0.056329283863306046,
0.034454237669706345,
0.05416429787874222,
0.01469043642282486,
-0.04343695193529129,
-0.11887014657258987,
0.19838201999664307,
-0.10767136514186859,
-0.09184177964925766,
-0.11828504502773285,
-0.07852382957935333,
0.02535579912364483,
-0.08476060628890991,
0.06807194650173187,
-0.08172672241926193,
0.018900277093052864,
-0.06600436568260193,
-0.20524995028972626,
0.11596046388149261,
-0.10182060301303864,
-0.03258875012397766,
-0.058350928127765656,
0.1526644378900528,
-0.07479622215032578,
0.010474151000380516,
0.03318091109395027,
0.02239469438791275,
-0.08559903502464294,
-0.08446884155273438,
-0.018381169065833092,
0.03129338473081589,
0.06142119690775871,
0.08739607781171799,
-0.09792511910200119,
-0.07674866914749146,
-0.03134889155626297,
0.02817792072892189,
0.2929084002971649,
0.1401015967130661,
-0.06586769968271255,
0.1629326492547989,
0.10387758165597916,
-0.06942285597324371,
-0.3373493552207947,
-0.09150945395231247,
-0.09645266830921173,
-0.03972399979829788,
-0.042589932680130005,
-0.16358928382396698,
0.13413257896900177,
-0.004249863792210817,
-0.010055972263216972,
0.08473600447177887,
-0.16361457109451294,
-0.08480892330408096,
0.19654500484466553,
-0.0355062410235405,
0.36373743414878845,
-0.09189414978027344,
-0.09806639701128006,
-0.07035496085882187,
-0.1232207641005516,
0.12262474000453949,
0.007738110609352589,
0.08150525391101837,
-0.02050303854048252,
0.04451111704111099,
0.04815887659788132,
-0.03690929710865021,
0.10097026824951172,
0.036669690161943436,
0.025901002809405327,
-0.11938466131687164,
-0.09219347685575485,
0.023168733343482018,
-0.019243339076638222,
-0.007111898623406887,
-0.01547485776245594,
0.01685570739209652,
-0.17164339125156403,
-0.04131095111370087,
-0.07032524049282074,
0.05912882834672928,
0.04161927476525307,
-0.029813537374138832,
0.012351144105196,
-0.020498499274253845,
-0.000361355283530429,
0.006620287895202637,
0.251852810382843,
-0.03737054020166397,
0.1604781597852707,
0.08527542650699615,
0.141584113240242,
-0.15723979473114014,
0.01194052491337061,
-0.07652142643928528,
-0.05061504244804382,
0.06191904842853546,
-0.06635212153196335,
0.07575498521327972,
0.13591395318508148,
-0.05730273202061653,
0.07247055321931839,
0.11612356454133987,
0.07706465572118759,
-0.034392137080430984,
0.16330119967460632,
-0.2292891889810562,
0.04589579999446869,
-0.050483379513025284,
-0.033954232931137085,
0.06465915590524673,
0.0655360221862793,
0.1258573830127716,
0.06694923341274261,
-0.04017629101872444,
0.005630772560834885,
0.00028037314768880606,
0.005372054409235716,
0.07443340867757797,
0.04748379439115524,
0.04316747188568115,
-0.14709694683551788,
0.05031560733914375,
0.05119774490594864,
-0.15819577872753143,
-0.022534551098942757,
0.1376158893108368,
-0.1704932600259781,
-0.1271103173494339,
-0.021827740594744682,
0.12368015199899673,
-0.09311434626579285,
-0.046253565698862076,
-0.07048245519399643,
-0.13402129709720612,
0.07112511247396469,
0.18836617469787598,
0.12805050611495972,
0.09663103520870209,
-0.06118634715676308,
-0.04969988390803337,
0.0036050756461918354,
-0.004089095629751682,
0.017009761184453964,
0.03120747022330761,
-0.12284451723098755,
0.046005018055438995,
-0.02090919390320778,
0.15390309691429138,
-0.09199176728725433,
-0.07624588906764984,
-0.1582917422056198,
0.04238278418779373,
-0.09195777773857117,
-0.023019742220640182,
-0.09330286085605621,
-0.01648246869444847,
0.0030273916199803352,
-0.030272169038653374,
-0.026145517826080322,
-0.06213071197271347,
-0.11623096466064453,
0.04011767357587814,
-0.028817979618906975,
0.04146858677268028,
-0.06920336186885834,
-0.04603973776102066,
0.09102679789066315,
-0.03833403438329697,
0.10358903557062149,
0.10654495656490326,
-0.0914529487490654,
0.0934479758143425,
-0.14121071994304657,
-0.1319282501935959,
0.1433861404657364,
0.030263781547546387,
0.07207431644201279,
0.07694290578365326,
0.03595962002873421,
0.07349478453397751,
0.004535248037427664,
0.06631990522146225,
0.06761990487575531,
-0.12337382882833481,
0.061452679336071014,
-0.046973392367362976,
-0.17189696431159973,
-0.05778007209300995,
-0.04047338292002678,
0.10660306364297867,
0.010234192945063114,
0.1720496565103531,
-0.05692226439714432,
0.1017514169216156,
-0.03180769085884094,
0.0038062711246311665,
-0.01604292169213295,
-0.20698778331279755,
-0.06364472210407257,
-0.08077114075422287,
0.026275143027305603,
0.005083381198346615,
0.23303534090518951,
0.061751753091812134,
0.033835094422101974,
0.04869496077299118,
0.09752455353736877,
-0.0014774927403777838,
0.023545393720269203,
0.17794077098369598,
0.10133370757102966,
-0.05567975342273712,
-0.05575546622276306,
0.05616139620542526,
0.029215605929493904,
0.006353367585688829,
0.14132826030254364,
0.07252193242311478,
-0.041009921580553055,
0.07551323622465134,
-0.03376345708966255,
0.04427867755293846,
-0.1321653574705124,
-0.16054923832416534,
-0.05143791809678078,
0.07023841142654419,
0.01740087941288948,
0.03448288515210152,
0.07088012248277664,
-0.028410857543349266,
0.05220868065953255,
-0.033101536333560944,
-0.05869230628013611,
-0.18244294822216034,
-0.09428907185792923,
-0.09423913061618805,
-0.09753676503896713,
0.0058974651619791985,
-0.07943454384803772,
-0.01026046834886074,
0.06547573953866959,
0.037508975714445114,
-0.05198773369193077,
0.07752657681703568,
0.003285798244178295,
-0.05593571066856384,
0.08687124401330948,
-0.045962750911712646,
0.02649652026593685,
0.00841206219047308,
-0.029707664623856544,
-0.1380927860736847,
-0.013390704058110714,
-0.04401649907231331,
0.040850814431905746,
-0.058590736240148544,
0.007230483461171389,
-0.1483704298734665,
-0.12039808928966522,
-0.019934508949518204,
0.0580129399895668,
-0.06074916571378708,
0.14175079762935638,
0.015395265072584152,
0.00611070916056633,
0.047287240624427795,
0.17810532450675964,
-0.0544942207634449,
-0.06539076566696167,
-0.04489162564277649,
0.24079638719558716,
0.09303659200668335,
0.10803006589412689,
0.0026883413083851337,
-0.013426939956843853,
-0.07931891828775406,
0.28847232460975647,
0.27526742219924927,
-0.04996299743652344,
0.054827310144901276,
0.007495596073567867,
0.03283945098519325,
0.15242771804332733,
0.1401364952325821,
0.09061526507139206,
0.24117816984653473,
-0.0521743968129158,
-0.05017128586769104,
-0.026741530746221542,
-0.03419290855526924,
-0.13402216136455536,
0.0581725612282753,
0.05382576957345009,
-0.0488528348505497,
-0.06285785138607025,
0.10921014845371246,
-0.21934591233730316,
0.16537490487098694,
0.019078493118286133,
-0.20565392076969147,
-0.06819386035203934,
-0.03284084051847458,
0.1365688294172287,
-0.0016830840613692999,
0.07499389350414276,
-0.00323955318890512,
-0.11883772164583206,
0.042848069220781326,
0.01306091621518135,
-0.20812170207500458,
-0.0041817850433290005,
0.06021128222346306,
-0.05781300365924835,
-0.0120098190382123,
-0.02640264853835106,
0.03757385164499283,
0.06560133397579193,
0.07958315312862396,
-0.0117155397310853,
0.04959989711642265,
-0.012948726303875446,
-0.030828366056084633,
0.029231732711195946,
0.02946082502603531,
0.0038178605027496815,
-0.09871038049459457,
0.06783884763717651,
-0.16667571663856506,
0.0549757145345211,
-0.05383889377117157,
-0.05352160334587097,
-0.019258368760347366,
0.04339629411697388,
-0.05456918105483055,
0.04438189044594765,
0.10450860112905502,
0.011940731666982174,
-0.025312455371022224,
-0.04523419588804245,
-0.04262804985046387,
-0.012295196764171124,
-0.1369558572769165,
-0.14967197179794312,
-0.09997987747192383,
-0.08965370059013367,
0.09313849359750748,
0.0034958450123667717,
-0.12975360453128815,
-0.006513827480375767,
-0.11122267693281174,
0.05365913361310959,
-0.16868756711483002,
0.09322161972522736,
0.0323028489947319,
0.015595607459545135,
-0.011563225649297237,
-0.040581803768873215,
0.04532773047685623,
0.07905946671962738,
-0.1267605572938919,
-0.08728102594614029
] |
null | null |
transformers
|
Label association
{'Biology': 0, 'Physics': 1, 'Chemistry': 2, 'Maths': 3}
|
{}
|
text-classification
|
Jackett/subject_classifier
|
[
"transformers",
"pytorch",
"roberta",
"text-classification",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #roberta #text-classification #autotrain_compatible #endpoints_compatible #region-us
|
Label association
{'Biology': 0, 'Physics': 1, 'Chemistry': 2, 'Maths': 3}
|
[] |
[
"TAGS\n#transformers #pytorch #roberta #text-classification #autotrain_compatible #endpoints_compatible #region-us \n"
] |
[
37
] |
[
"passage: TAGS\n#transformers #pytorch #roberta #text-classification #autotrain_compatible #endpoints_compatible #region-us \n"
] |
[
-0.020008964464068413,
0.044132623821496964,
-0.0078112599439918995,
0.02638433128595352,
0.20928440988063812,
0.035342395305633545,
0.06615248322486877,
0.11328862607479095,
0.016332678496837616,
-0.028178846463561058,
0.1062050312757492,
0.244922935962677,
-0.040547244250774384,
0.11052706092596054,
-0.11703961342573166,
-0.3019167184829712,
0.0651109591126442,
0.058234453201293945,
0.0004814834101125598,
0.11527911573648453,
0.09698275476694107,
-0.07739077508449554,
0.06869864463806152,
-0.03217874467372894,
-0.131941020488739,
0.042836710810661316,
0.04529296234250069,
-0.13870608806610107,
0.09858331084251404,
0.04921972379088402,
0.15171344578266144,
0.01987096667289734,
-0.06366993486881256,
-0.1485668271780014,
0.03940661624073982,
0.0016886860830709338,
-0.08661011606454849,
0.03708817437291145,
0.071238212287426,
-0.14339829981327057,
0.04450083151459694,
0.03611757233738899,
0.02662758156657219,
0.05500191077589989,
-0.1467411369085312,
-0.06837250292301178,
-0.015736868605017662,
0.030547795817255974,
0.06831184029579163,
0.06859925389289856,
-0.003380601992830634,
0.14116983115673065,
-0.1399628221988678,
0.12732058763504028,
0.10533753782510757,
-0.2779976427555084,
-0.01969316229224205,
0.09297934174537659,
0.03233207389712334,
0.04336971044540405,
-0.0469091534614563,
0.05149709805846214,
0.02325548231601715,
0.005723325069993734,
-0.013593494892120361,
-0.07786429673433304,
-0.13017228245735168,
0.029447631910443306,
-0.07575114071369171,
-0.047911252826452255,
0.18931140005588531,
-0.06078016012907028,
0.06419374793767929,
-0.02578158490359783,
-0.09002748131752014,
-0.04515666514635086,
-0.02595321089029312,
0.02780180424451828,
-0.04802282154560089,
0.06563033908605576,
0.02161816507577896,
0.010254548862576485,
-0.10361794382333755,
0.025263015180826187,
-0.21374544501304626,
0.22683702409267426,
0.014008278027176857,
0.04628974571824074,
-0.17974287271499634,
0.04919026419520378,
0.016432149335741997,
-0.09800734370946884,
0.04059531167149544,
-0.10359881818294525,
0.019286664202809334,
-0.036020707339048386,
-0.07521474361419678,
-0.04318004101514816,
0.08935294300317764,
0.17481516301631927,
0.060050930827856064,
0.05660588666796684,
-0.02891738899052143,
0.08364686369895935,
0.034934286028146744,
0.12381786108016968,
0.029145758599042892,
-0.0430731363594532,
0.054288964718580246,
-0.13852715492248535,
0.005985075607895851,
-0.06892093271017075,
-0.15193496644496918,
-0.043890099972486496,
0.05996817350387573,
0.08541124314069748,
0.016340363770723343,
0.08751767128705978,
-0.05123571678996086,
-0.025786807760596275,
0.057079561054706573,
-0.07623042166233063,
0.010047397576272488,
0.009102854877710342,
0.021692194044589996,
0.10377813875675201,
-0.016680341213941574,
0.0019239634275436401,
-0.07883065193891525,
0.13569426536560059,
-0.05062291771173477,
0.002834826707839966,
-0.028882183134555817,
-0.06732304394245148,
0.031353920698165894,
-0.1529940962791443,
0.04031229764223099,
-0.18075710535049438,
-0.10555167496204376,
-0.0007299539283849299,
0.01633904129266739,
-0.010452449321746826,
-0.036828503012657166,
-0.029121527448296547,
-0.0010542483069002628,
0.0465390607714653,
-0.05203495919704437,
-0.05372150242328644,
-0.0710693895816803,
0.09455125033855438,
-0.025662148371338844,
0.07735227048397064,
-0.11272741109132767,
0.07412353903055191,
-0.09475456178188324,
-0.029008569195866585,
-0.14016872644424438,
0.04407212883234024,
-0.04205916076898575,
0.18818572163581848,
0.013361545279622078,
-0.03987136483192444,
-0.05454714596271515,
0.06222211569547653,
-0.06554840505123138,
0.16502423584461212,
-0.055415909737348557,
-0.1103559210896492,
0.21572303771972656,
-0.08793190866708755,
-0.12616907060146332,
0.07853521406650543,
-0.01983499526977539,
0.014107154682278633,
0.09935758262872696,
0.18871597945690155,
0.10630406439304352,
0.016910504549741745,
0.0883619412779808,
0.09970127791166306,
-0.10351017117500305,
-0.1213667243719101,
-0.003216387936845422,
-0.008271720260381699,
-0.12229956686496735,
0.057527974247932434,
0.08477486670017242,
0.06781025230884552,
-0.04844829812645912,
-0.03921041637659073,
-0.016000164672732353,
-0.011660793796181679,
0.11266223341226578,
0.0542222298681736,
0.11704793572425842,
-0.08393700420856476,
-0.0022737428080290556,
-0.022228462621569633,
-0.005893548019230366,
0.02648397535085678,
0.028688877820968628,
-0.05932282656431198,
0.11452367901802063,
0.02017734758555889,
0.029400894418358803,
-0.2367136925458908,
-0.06601598858833313,
-0.020603468641638756,
0.1433582305908203,
-0.010105208493769169,
0.10095411539077759,
0.04427468404173851,
-0.06928575038909912,
-0.01621636003255844,
-0.009299000725150108,
0.17815911769866943,
0.01738034375011921,
-0.04921546205878258,
-0.07032492011785507,
0.07650125026702881,
-0.07516951113939285,
0.015835674479603767,
-0.07112815976142883,
0.016271501779556274,
0.07571322470903397,
0.11224865168333054,
0.008744978345930576,
0.0727425292134285,
-0.02016051858663559,
0.06210291013121605,
-0.07318591326475143,
0.022908657789230347,
0.10824329406023026,
-0.004980417434126139,
-0.06107798591256142,
0.1551254838705063,
-0.14208556711673737,
0.27617180347442627,
0.19884657859802246,
-0.2908918559551239,
-0.02017652988433838,
-0.025536252185702324,
-0.004497346933931112,
0.02869114652276039,
0.03061705082654953,
0.020218806341290474,
0.08507369458675385,
-0.004770483355969191,
0.19971705973148346,
-0.023923929780721664,
-0.03995136916637421,
-0.006853953469544649,
-0.05899958312511444,
-0.02046085335314274,
0.09736490249633789,
0.06067580729722977,
-0.20302586257457733,
0.1880679875612259,
0.18911747634410858,
0.004468548111617565,
0.15545931458473206,
0.0003551404515746981,
0.04150131344795227,
0.0840771347284317,
-0.04905596002936363,
-0.014827928505837917,
-0.08001329749822617,
-0.17313796281814575,
-0.0473613403737545,
0.07346320897340775,
0.02623208612203598,
0.06899876892566681,
-0.10228913277387619,
-0.039061445742845535,
0.00035962145193479955,
0.028382880613207817,
-0.013808752410113811,
0.09584128856658936,
0.07520584017038345,
0.1137809306383133,
0.0046613202430307865,
-0.0636981874704361,
0.10756023973226547,
0.001925493124872446,
-0.07367260009050369,
0.1902109980583191,
-0.1323123574256897,
-0.337295800447464,
-0.15708862245082855,
-0.2121734917163849,
-0.019831912592053413,
0.06118001043796539,
0.10692126303911209,
-0.11363879591226578,
-0.04476870596408844,
0.04283493384718895,
-0.0034252714831382036,
-0.06604110449552536,
0.03876585140824318,
-0.06533156335353851,
0.08773002028465271,
-0.06364738941192627,
-0.05990470200777054,
-0.06569340825080872,
-0.038312867283821106,
-0.013243465684354305,
0.15560667216777802,
-0.12804582715034485,
0.08351859450340271,
0.1680022031068802,
-0.0029614458326250315,
0.06352407485246658,
-0.035965193063020706,
0.15339027345180511,
-0.10014189779758453,
-0.030315415933728218,
0.1889757513999939,
-0.068211130797863,
0.0744536817073822,
0.16308091580867767,
0.014739030972123146,
-0.0636824518442154,
0.030278878286480904,
-0.034673016518354416,
-0.10021910816431046,
-0.22630487382411957,
-0.14426615834236145,
-0.12350988388061523,
0.04796411097049713,
0.05659063905477524,
0.07286151498556137,
0.14150547981262207,
0.07591188699007034,
0.016282901167869568,
0.00013132939056959003,
0.0014127136673778296,
0.09078255295753479,
0.2534273564815521,
0.009143847972154617,
0.14892973005771637,
-0.07104091346263885,
-0.13614638149738312,
0.08464624732732773,
0.01240177545696497,
0.12425845116376877,
0.09204304963350296,
0.004151592496782541,
0.010810011066496372,
0.055354043841362,
0.16762010753154755,
0.10863393545150757,
0.043654248118400574,
-0.016212422400712967,
-0.020722083747386932,
0.008428140543401241,
-0.06790567189455032,
0.01627505198121071,
0.05736133083701134,
-0.15321840345859528,
-0.07510635256767273,
-0.13223819434642792,
0.09712949395179749,
0.09360931813716888,
0.0470423586666584,
-0.19960257411003113,
0.005815644282847643,
0.0936809852719307,
-0.0333959199488163,
-0.09875739365816116,
0.07462789863348007,
-0.07721393555402756,
-0.15226376056671143,
0.10575620085000992,
-0.032381944358348846,
0.13865694403648376,
-0.08692946285009384,
0.0790829285979271,
-0.055491261184215546,
-0.1101168543100357,
0.02726965956389904,
0.11307073384523392,
-0.266848623752594,
0.23194316029548645,
0.005795670207589865,
-0.06619521975517273,
-0.07798527181148529,
-0.02415461093187332,
0.04554189369082451,
0.19904311001300812,
0.074777752161026,
-0.0025721739511936903,
-0.0985921323299408,
-0.1604350209236145,
-0.0035795620642602444,
0.003904567565768957,
0.12411849945783615,
-0.031422633677721024,
-0.008662420324981213,
-0.048047564923763275,
-0.02887197583913803,
-0.036892786622047424,
-0.058628011494874954,
0.046003229916095734,
-0.17000669240951538,
0.05320185050368309,
0.03264983743429184,
0.053446345031261444,
0.021807091310620308,
-0.03694950044155121,
-0.10292581468820572,
0.20319914817810059,
-0.07320132106542587,
-0.07115907967090607,
-0.11742512881755829,
-0.0709628164768219,
0.02500193938612938,
-0.09203493595123291,
0.0746789500117302,
-0.09320281445980072,
0.01885121501982212,
-0.06376928836107254,
-0.19728536903858185,
0.12246022373437881,
-0.09933560341596603,
-0.025960171595215797,
-0.05696910247206688,
0.1400504857301712,
-0.08192209899425507,
0.008517257869243622,
0.03329753130674362,
0.0311837550252676,
-0.101662777364254,
-0.07855576276779175,
-0.006188572850078344,
0.003921836148947477,
0.04948630556464195,
0.07072073966264725,
-0.08801282197237015,
-0.08284960687160492,
-0.038000740110874176,
-0.000636666314676404,
0.2934253215789795,
0.15669852495193481,
-0.06931452453136444,
0.1525820940732956,
0.12009288370609283,
-0.07215527445077896,
-0.3365217447280884,
-0.08695725351572037,
-0.11067883670330048,
-0.03977072983980179,
-0.046555936336517334,
-0.1571168452501297,
0.11588511615991592,
-0.004439007956534624,
-0.020042624324560165,
0.07348743081092834,
-0.15523993968963623,
-0.08477700501680374,
0.19448399543762207,
-0.03307865187525749,
0.4068463146686554,
-0.11054953187704086,
-0.0950624942779541,
-0.07110202312469482,
-0.11997730284929276,
0.11309017986059189,
-0.012477992102503777,
0.09084546566009521,
-0.01921740733087063,
0.05707107484340668,
0.04382467269897461,
-0.038832247257232666,
0.09420378506183624,
0.010473021306097507,
0.019014105200767517,
-0.1254121959209442,
-0.10295052826404572,
0.04397515580058098,
-0.01995539292693138,
-0.017540106549859047,
-0.004275078419595957,
0.018242914229631424,
-0.16359899938106537,
-0.04212464764714241,
-0.0764581710100174,
0.05086830258369446,
0.042068950831890106,
-0.029563607648015022,
-0.007691054604947567,
-0.01661510579288006,
-0.006875237450003624,
-0.00004427770909387618,
0.25804147124290466,
-0.058748915791511536,
0.17537914216518402,
0.08201849460601807,
0.1287056803703308,
-0.1240130364894867,
0.03093533031642437,
-0.0719410628080368,
-0.06300289183855057,
0.056417182087898254,
-0.08991438895463943,
0.06572704762220383,
0.12093320488929749,
-0.05827133730053902,
0.05707010254263878,
0.11047960072755814,
0.05665024742484093,
-0.01672348380088806,
0.17151936888694763,
-0.21784354746341705,
0.039013735949993134,
-0.044656746089458466,
-0.028644291684031487,
0.0673145204782486,
0.06059737876057625,
0.13576023280620575,
0.056735020130872726,
-0.050879377871751785,
0.0027266712859272957,
-0.005394025705754757,
-0.006150494329631329,
0.053126439452171326,
0.06565799564123154,
0.03482401371002197,
-0.13315033912658691,
0.05063670501112938,
0.0488823726773262,
-0.1517987698316574,
-0.012782548554241657,
0.14544247090816498,
-0.16107147932052612,
-0.12472042441368103,
-0.008331404998898506,
0.12478770315647125,
-0.14226394891738892,
-0.04139966145157814,
-0.07949703186750412,
-0.13279509544372559,
0.07259250432252884,
0.21434538066387177,
0.12083859741687775,
0.07850685715675354,
-0.042793743312358856,
-0.04786950722336769,
-0.0023539469111710787,
-0.012482158839702606,
0.01542683970183134,
0.025215521454811096,
-0.1102122887969017,
0.020501524209976196,
-0.018913518637418747,
0.15162764489650726,
-0.09223692864179611,
-0.07566172629594803,
-0.17453664541244507,
0.05083347484469414,
-0.09629858285188675,
-0.03000663034617901,
-0.07866353541612625,
-0.022550096735358238,
-0.0028236843645572662,
-0.044283416122198105,
-0.0488961786031723,
-0.06393264979124069,
-0.12171278893947601,
0.03886346146464348,
-0.029917839914560318,
0.037657231092453,
-0.06475012004375458,
-0.04363161697983742,
0.10111245512962341,
-0.03200976178050041,
0.10144995152950287,
0.11017206311225891,
-0.0850227028131485,
0.09229160100221634,
-0.14469486474990845,
-0.11688804626464844,
0.12373711168766022,
0.01293914020061493,
0.06899435073137283,
0.06232675164937973,
0.043563924729824066,
0.06726560741662979,
0.018487147986888885,
0.07367441058158875,
0.053072553128004074,
-0.1265084147453308,
0.06561767309904099,
-0.036320216953754425,
-0.18026919662952423,
-0.048072449862957,
-0.03972756490111351,
0.10033562034368515,
0.002716359216719866,
0.1519702523946762,
-0.0539398267865181,
0.10532274842262268,
-0.027349798008799553,
0.010220712050795555,
-0.02422005869448185,
-0.21375028789043427,
-0.06186143308877945,
-0.0875529870390892,
0.020722882822155952,
0.0073815686628222466,
0.252583771944046,
0.05999443680047989,
0.043384622782468796,
0.05349669232964516,
0.09076128900051117,
0.006726719904690981,
0.02686336264014244,
0.16989776492118835,
0.10625749826431274,
-0.054806992411613464,
-0.05736768618226051,
0.0645746961236,
0.02236839570105076,
-0.004619908984750509,
0.13472703099250793,
0.07847650349140167,
-0.003241969272494316,
0.07508169859647751,
-0.022816574200987816,
0.055117711424827576,
-0.09602399915456772,
-0.16602522134780884,
-0.03789302334189415,
0.06136997044086456,
0.015683189034461975,
0.02769125998020172,
0.11872325092554092,
-0.02512430213391781,
0.04980659484863281,
-0.05039367452263832,
-0.047553956508636475,
-0.190480038523674,
-0.08713943511247635,
-0.1058601662516594,
-0.09514910727739334,
0.01605161279439926,
-0.07684782892465591,
-0.004868034739047289,
0.08577817678451538,
0.04222368076443672,
-0.05062175542116165,
0.061918362975120544,
0.013334243558347225,
-0.06097227334976196,
0.08177100121974945,
-0.04358189180493355,
0.034621722996234894,
0.009182103909552097,
-0.021071596071124077,
-0.13983911275863647,
-0.020003166049718857,
-0.04568071663379669,
0.04369695112109184,
-0.05934172868728638,
0.01186833344399929,
-0.15804150700569153,
-0.1167970523238182,
-0.02196607179939747,
0.06443385779857635,
-0.043019507080316544,
0.130898118019104,
0.007071374449878931,
0.0023405568208545446,
0.051037952303886414,
0.2136451154947281,
-0.05474230647087097,
-0.0692630410194397,
-0.049248065799474716,
0.22245341539382935,
0.08240340650081635,
0.10381010919809341,
-0.019030237570405006,
-0.010240653529763222,
-0.07025635242462158,
0.32316410541534424,
0.29418379068374634,
-0.039947621524333954,
0.0507243387401104,
0.01590515859425068,
0.03608250990509987,
0.1671287715435028,
0.1311587393283844,
0.09011198580265045,
0.23601709306240082,
-0.06328054517507553,
-0.04345982149243355,
-0.01966611109673977,
-0.020104875788092613,
-0.12282752990722656,
0.0899764820933342,
0.055597834289073944,
-0.05273459479212761,
-0.06209932267665863,
0.11512468755245209,
-0.2119985967874527,
0.14972861111164093,
0.014138193801045418,
-0.21562917530536652,
-0.07939939200878143,
-0.029313646256923676,
0.12147574126720428,
0.0022966889664530754,
0.08494418114423752,
-0.0009001587750390172,
-0.12122118473052979,
0.015007257461547852,
0.024496188387274742,
-0.21694234013557434,
-0.02909899316728115,
0.06782764196395874,
-0.03080557845532894,
-0.003602581797167659,
-0.033146586269140244,
0.021333782002329826,
0.08633527159690857,
0.06039969623088837,
-0.01826026290655136,
0.04458854719996452,
-0.0026062342803925276,
-0.034958090633153915,
0.02856798656284809,
0.025836622342467308,
-0.0018466500332579017,
-0.10822580754756927,
0.0704243928194046,
-0.1465485692024231,
0.05887974053621292,
-0.08138486742973328,
-0.04419925808906555,
-0.01359280850738287,
0.04496964067220688,
-0.06153792887926102,
0.05272408947348595,
0.10471534729003906,
-0.0008891951874829829,
-0.02254260517656803,
-0.05042839050292969,
-0.0399518720805645,
0.0006140783079899848,
-0.13396838307380676,
-0.14770886301994324,
-0.08994549512863159,
-0.0952795073390007,
0.11147317290306091,
0.0045949253253638744,
-0.17804133892059326,
-0.0027761408127844334,
-0.11016383022069931,
0.054672446101903915,
-0.16769617795944214,
0.0989537462592125,
0.04410397261381149,
0.011924069374799728,
-0.014897520653903484,
-0.04637180641293526,
0.04843989759683609,
0.07801199704408646,
-0.12207385152578354,
-0.09432519972324371
] |
null | null |
sentence-transformers
|
# aditeyabaral/sentencetransformer-bert-base-cased
This is a [sentence-transformers](https://www.SBERT.net) model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.
<!--- Describe your model here -->
## Usage (Sentence-Transformers)
Using this model becomes easy when you have [sentence-transformers](https://www.SBERT.net) installed:
```
pip install -U sentence-transformers
```
Then you can use the model like this:
```python
from sentence_transformers import SentenceTransformer
sentences = ["This is an example sentence", "Each sentence is converted"]
model = SentenceTransformer('aditeyabaral/sentencetransformer-bert-base-cased')
embeddings = model.encode(sentences)
print(embeddings)
```
## Usage (HuggingFace Transformers)
Without [sentence-transformers](https://www.SBERT.net), you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.
```python
from transformers import AutoTokenizer, AutoModel
import torch
#Mean Pooling - Take attention mask into account for correct averaging
def mean_pooling(model_output, attention_mask):
token_embeddings = model_output[0] #First element of model_output contains all token embeddings
input_mask_expanded = attention_mask.unsqueeze(-1).expand(token_embeddings.size()).float()
return torch.sum(token_embeddings * input_mask_expanded, 1) / torch.clamp(input_mask_expanded.sum(1), min=1e-9)
# Sentences we want sentence embeddings for
sentences = ['This is an example sentence', 'Each sentence is converted']
# Load model from HuggingFace Hub
tokenizer = AutoTokenizer.from_pretrained('aditeyabaral/sentencetransformer-bert-base-cased')
model = AutoModel.from_pretrained('aditeyabaral/sentencetransformer-bert-base-cased')
# Tokenize sentences
encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt')
# Compute token embeddings
with torch.no_grad():
model_output = model(**encoded_input)
# Perform pooling. In this case, mean pooling.
sentence_embeddings = mean_pooling(model_output, encoded_input['attention_mask'])
print("Sentence embeddings:")
print(sentence_embeddings)
```
## Evaluation Results
<!--- Describe how your model was evaluated -->
For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: [https://seb.sbert.net](https://seb.sbert.net?model_name=aditeyabaral/sentencetransformer-bert-base-cased)
## Training
The model was trained with the parameters:
**DataLoader**:
`torch.utils.data.dataloader.DataLoader` of length 9234 with parameters:
```
{'batch_size': 16, 'sampler': 'torch.utils.data.sampler.RandomSampler', 'batch_sampler': 'torch.utils.data.sampler.BatchSampler'}
```
**Loss**:
`sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss`
Parameters of the fit()-Method:
```
{
"epochs": 10,
"evaluation_steps": 0,
"evaluator": "NoneType",
"max_grad_norm": 1,
"optimizer_class": "<class 'transformers.optimization.AdamW'>",
"optimizer_params": {
"lr": 2e-05
},
"scheduler": "WarmupLinear",
"steps_per_epoch": null,
"warmup_steps": 100,
"weight_decay": 0.01
}
```
## Full Model Architecture
```
SentenceTransformer(
(0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: BertModel
(1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False})
)
```
## Citing & Authors
<!--- Describe where people can find more information -->
|
{"tags": ["sentence-transformers", "feature-extraction", "sentence-similarity", "transformers"], "pipeline_tag": "sentence-similarity"}
|
sentence-similarity
|
aditeyabaral/sentencetransformer-bert-base-cased
|
[
"sentence-transformers",
"pytorch",
"bert",
"feature-extraction",
"sentence-similarity",
"transformers",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#sentence-transformers #pytorch #bert #feature-extraction #sentence-similarity #transformers #endpoints_compatible #region-us
|
# aditeyabaral/sentencetransformer-bert-base-cased
This is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.
## Usage (Sentence-Transformers)
Using this model becomes easy when you have sentence-transformers installed:
Then you can use the model like this:
## Usage (HuggingFace Transformers)
Without sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.
## Evaluation Results
For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL
## Training
The model was trained with the parameters:
DataLoader:
'URL.dataloader.DataLoader' of length 9234 with parameters:
Loss:
'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss'
Parameters of the fit()-Method:
## Full Model Architecture
## Citing & Authors
|
[
"# aditeyabaral/sentencetransformer-bert-base-cased\n\nThis is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.",
"## Usage (Sentence-Transformers)\n\nUsing this model becomes easy when you have sentence-transformers installed:\n\n\n\nThen you can use the model like this:",
"## Usage (HuggingFace Transformers)\nWithout sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.",
"## Evaluation Results\n\n\n\nFor an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL",
"## Training\nThe model was trained with the parameters:\n\nDataLoader:\n\n'URL.dataloader.DataLoader' of length 9234 with parameters:\n\n\nLoss:\n\n'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss' \n\nParameters of the fit()-Method:",
"## Full Model Architecture",
"## Citing & Authors"
] |
[
"TAGS\n#sentence-transformers #pytorch #bert #feature-extraction #sentence-similarity #transformers #endpoints_compatible #region-us \n",
"# aditeyabaral/sentencetransformer-bert-base-cased\n\nThis is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.",
"## Usage (Sentence-Transformers)\n\nUsing this model becomes easy when you have sentence-transformers installed:\n\n\n\nThen you can use the model like this:",
"## Usage (HuggingFace Transformers)\nWithout sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.",
"## Evaluation Results\n\n\n\nFor an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL",
"## Training\nThe model was trained with the parameters:\n\nDataLoader:\n\n'URL.dataloader.DataLoader' of length 9234 with parameters:\n\n\nLoss:\n\n'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss' \n\nParameters of the fit()-Method:",
"## Full Model Architecture",
"## Citing & Authors"
] |
[
42,
61,
38,
64,
29,
78,
5,
6
] |
[
"passage: TAGS\n#sentence-transformers #pytorch #bert #feature-extraction #sentence-similarity #transformers #endpoints_compatible #region-us \n# aditeyabaral/sentencetransformer-bert-base-cased\n\nThis is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.## Usage (Sentence-Transformers)\n\nUsing this model becomes easy when you have sentence-transformers installed:\n\n\n\nThen you can use the model like this:## Usage (HuggingFace Transformers)\nWithout sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.## Evaluation Results\n\n\n\nFor an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL## Training\nThe model was trained with the parameters:\n\nDataLoader:\n\n'URL.dataloader.DataLoader' of length 9234 with parameters:\n\n\nLoss:\n\n'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss' \n\nParameters of the fit()-Method:## Full Model Architecture## Citing & Authors"
] |
[
-0.03772503882646561,
0.12598244845867157,
-0.0066234213300049305,
0.046048808842897415,
0.10814414918422699,
0.024636615067720413,
0.1343611478805542,
0.08604057878255844,
-0.001536516472697258,
0.08503501862287521,
0.013298631645739079,
0.10475227236747742,
0.0018099196022376418,
0.010961240157485008,
0.032635658979415894,
-0.27608048915863037,
0.02536492422223091,
-0.03841227665543556,
0.026021670550107956,
0.07200067490339279,
0.11499805748462677,
-0.07657081633806229,
0.06005631387233734,
0.01755552552640438,
-0.03823407366871834,
0.01639525406062603,
-0.028831247240304947,
-0.027044568210840225,
0.08838234096765518,
0.06280496716499329,
0.044551655650138855,
0.009455117397010326,
0.011792926117777824,
-0.20464448630809784,
0.011956833302974701,
0.06736936420202255,
-0.014080541208386421,
0.0643467828631401,
0.032578032463788986,
-0.04471561685204506,
0.1632092297077179,
-0.06949388980865479,
0.06528099626302719,
0.05076929181814194,
-0.11813030391931534,
-0.06443328410387039,
-0.042930711060762405,
-0.01116256508976221,
0.13358721137046814,
0.09494749456644058,
-0.06497763842344284,
0.10404817014932632,
-0.048843417316675186,
0.07798060029745102,
0.09482400119304657,
-0.2762231230735779,
-0.03159380331635475,
0.013560065999627113,
0.06441404670476913,
0.02642839401960373,
-0.10418365895748138,
0.01137364562600851,
-0.017106566578149796,
0.0320236012339592,
0.07380436360836029,
-0.04617137089371681,
0.07356592267751694,
-0.002238587476313114,
-0.11532622575759888,
0.007523376494646072,
0.17029598355293274,
0.03582018241286278,
-0.02147730067372322,
-0.19035257399082184,
-0.07209594547748566,
0.07596458494663239,
-0.05017349496483803,
-0.03742578998208046,
0.030625684186816216,
0.04801411181688309,
-0.013485637493431568,
-0.09385023266077042,
-0.10374079644680023,
-0.01539827510714531,
-0.059427570551633835,
0.014446848072111607,
-0.01711542345583439,
-0.05759190395474434,
0.0108401570469141,
0.06981959939002991,
-0.08749110996723175,
-0.11294527351856232,
-0.024247819557785988,
-0.02492516115307808,
-0.11906987428665161,
-0.03509792312979698,
-0.06072669103741646,
-0.0918036550283432,
0.034848880022764206,
0.1510319709777832,
0.09705793857574463,
0.014424819499254227,
-0.015213808976113796,
0.05534680560231209,
0.029537498950958252,
0.18878169357776642,
-0.05133486166596413,
-0.08299403637647629,
-0.04812334105372429,
0.03369273990392685,
-0.005467051174491644,
-0.021703261882066727,
-0.045621421188116074,
-0.00522862421348691,
0.03573933616280556,
0.05629928410053253,
0.0548485666513443,
0.05714092403650284,
-0.05353591963648796,
-0.04314674064517021,
0.05472922697663307,
-0.11756493896245956,
0.031967759132385254,
0.009621265344321728,
-0.05385559797286987,
0.016460897400975227,
0.07921139895915985,
-0.010420477017760277,
-0.069391168653965,
0.022588474676012993,
-0.10837601125240326,
-0.013242434710264206,
-0.0602496899664402,
-0.1335606724023819,
-0.010698366910219193,
-0.012181580066680908,
-0.031791724264621735,
-0.09818510711193085,
-0.1258719563484192,
-0.08068297803401947,
0.029219258576631546,
-0.04510601609945297,
-0.01096793171018362,
-0.12934142351150513,
-0.005743939429521561,
0.007627492770552635,
-0.008536070585250854,
-0.06131959706544876,
-0.004811887629330158,
0.01656196638941765,
-0.0473521426320076,
0.050436001271009445,
0.05509370192885399,
0.04050501808524132,
-0.12703381478786469,
0.026460962370038033,
-0.13077394664287567,
0.162642240524292,
-0.040530938655138016,
0.06340563297271729,
-0.14776159822940826,
0.026446916162967682,
0.017854886129498482,
0.06769468635320663,
0.01102349441498518,
0.14829538762569427,
-0.21995297074317932,
-0.07960482686758041,
0.11712450534105301,
-0.042677540332078934,
-0.08850152790546417,
0.10314709693193436,
-0.03530334681272507,
0.13740266859531403,
0.11536628752946854,
0.12426037341356277,
0.10741879045963287,
-0.05318400636315346,
-0.010625147260725498,
0.02475220151245594,
-0.052232775837183,
0.1425044685602188,
0.04328817501664162,
-0.06850409507751465,
0.0846603587269783,
-0.0026627983897924423,
-0.06965886056423187,
0.0014822736848145723,
-0.0013334109680727124,
-0.05682606250047684,
0.021428145468235016,
-0.037918418645858765,
0.06386665999889374,
-0.04062531888484955,
-0.0025295866653323174,
0.017401807010173798,
-0.11221134662628174,
0.11708450317382812,
0.06669654697179794,
-0.07517772167921066,
0.012898704037070274,
-0.09135281294584274,
0.018633520230650902,
-0.015764711424708366,
0.01399304997175932,
-0.20508384704589844,
-0.11248951405286789,
0.023441502824425697,
0.017204096540808678,
0.12422545254230499,
0.05090603604912758,
0.05880645662546158,
0.03680310398340225,
-0.015388832427561283,
-0.02431998960673809,
0.04351343959569931,
-0.0019428347004577518,
-0.1001700758934021,
-0.11684907227754593,
-0.0022895887959748507,
-0.03311418369412422,
0.09169336408376694,
-0.1216902956366539,
0.020385049283504486,
0.016025247052311897,
0.06539376080036163,
0.03977353870868683,
-0.02303566411137581,
-0.011066583916544914,
-0.028543397784233093,
-0.012569453567266464,
-0.0241226963698864,
0.05317379906773567,
0.01639864221215248,
-0.15717457234859467,
0.09726221114397049,
-0.20767317712306976,
-0.13625063002109528,
0.07217266410589218,
-0.002564209746196866,
-0.05909765139222145,
-0.06801404803991318,
-0.020496036857366562,
0.0011190995573997498,
-0.022321097552776337,
-0.06954246014356613,
0.21627376973628998,
0.08308696746826172,
0.10819811373949051,
-0.03569997474551201,
-0.026213139295578003,
-0.05324065685272217,
-0.03738046810030937,
-0.05127965658903122,
0.11019237339496613,
-0.030528586357831955,
-0.14358295500278473,
0.044698938727378845,
0.08481676131486893,
-0.05600511655211449,
0.1051529124379158,
-0.007421290967613459,
-0.07320824265480042,
-0.06719812750816345,
0.02036716230213642,
0.033610884100198746,
-0.004585471469908953,
-0.07908733934164047,
0.00877476017922163,
0.05631142854690552,
0.011807771399617195,
0.02217777632176876,
-0.05794379487633705,
0.05333706736564636,
0.062059495598077774,
0.0052781119011342525,
0.10096340626478195,
0.022041015326976776,
-0.006420413497835398,
0.06241115182638168,
0.016600746661424637,
0.011036772280931473,
-0.05141471326351166,
-0.04169527441263199,
-0.11422976106405258,
0.1683872938156128,
-0.1279905140399933,
-0.20008769631385803,
-0.15585674345493317,
0.0022774147801101208,
-0.04430598393082619,
0.030428418889641762,
0.07681228220462799,
-0.0619259849190712,
-0.06578696519136429,
-0.06917276978492737,
0.07477545738220215,
0.0760483369231224,
-0.03786357492208481,
-0.008477591909468174,
0.030666187405586243,
0.022912319749593735,
-0.12240619957447052,
-0.013231754302978516,
-0.0014518413227051497,
-0.07699135690927505,
-0.01312995608896017,
-0.02274751476943493,
0.0682053416967392,
0.12496909499168396,
0.06392847001552582,
-0.012577073648571968,
-0.009508871473371983,
0.23214811086654663,
-0.09068196266889572,
0.05592300370335579,
0.16143149137496948,
-0.012172475457191467,
0.06182381883263588,
0.09652166068553925,
0.019912464544177055,
-0.05699378252029419,
0.04933541640639305,
0.07295376807451248,
-0.014024817384779453,
-0.13438932597637177,
-0.10872730612754822,
-0.05680723488330841,
-0.0027223767247051,
0.13135084509849548,
0.03466542810201645,
0.028489310294389725,
0.045198142528533936,
-0.019018001854419708,
0.0031974276062101126,
0.09681369364261627,
0.1134105920791626,
0.11580076068639755,
-0.02964041568338871,
0.10719761997461319,
-0.03665866702795029,
-0.07470729202032089,
0.05530213564634323,
-0.0062294602394104,
0.14215709269046783,
0.026720445603132248,
0.16075488924980164,
0.07598835229873657,
-0.03888983279466629,
-0.022612404078245163,
0.07433325052261353,
-0.041160181164741516,
0.015800634399056435,
-0.03818924352526665,
-0.09586186707019806,
-0.00899593997746706,
0.09378495067358017,
0.09707038849592209,
-0.0390852615237236,
-0.043598685413599014,
0.050864383578300476,
0.12765385210514069,
0.12005040049552917,
0.0843355655670166,
-0.23572993278503418,
-0.0446963794529438,
0.039111774414777756,
-0.07408662140369415,
-0.06354546546936035,
-0.009062770754098892,
0.03958951681852341,
-0.10228529572486877,
0.03373434394598007,
-0.008607360534369946,
0.09998302906751633,
-0.0913204625248909,
0.03372959792613983,
-0.05797487497329712,
0.03974725306034088,
-0.000513911247253418,
0.07314398139715195,
-0.22243934869766235,
0.09815966337919235,
0.04004461690783501,
0.05295161157846451,
-0.05331306904554367,
0.021781429648399353,
0.0652901828289032,
0.01421173196285963,
0.17518948018550873,
-0.034428730607032776,
-0.002574097365140915,
-0.01614837534725666,
-0.06879452615976334,
-0.005716637242585421,
0.05118584260344505,
-0.1348205953836441,
0.0944974347949028,
-0.05054632946848869,
-0.028534187003970146,
-0.013668579049408436,
0.03423140570521355,
-0.04546302184462547,
-0.16763117909431458,
0.008691813796758652,
0.01115684024989605,
0.005244391970336437,
-0.01966906525194645,
-0.004094043280929327,
0.016878187656402588,
0.19670462608337402,
-0.12294566631317139,
-0.053821031004190445,
-0.1243695467710495,
-0.009597176685929298,
0.10994023084640503,
-0.09132115542888641,
0.003779431339353323,
0.004546329379081726,
0.15458498895168304,
-0.05406368151307106,
-0.06511551141738892,
0.07213881611824036,
-0.04948209226131439,
-0.055562831461429596,
-0.052735038101673126,
0.1048090010881424,
0.06470229476690292,
0.05928615853190422,
0.044368941336870193,
0.07533346861600876,
-0.030617475509643555,
-0.087436243891716,
-0.06597460806369781,
0.1066797599196434,
-0.009708334691822529,
0.05969247594475746,
-0.13273824751377106,
-0.026032915338873863,
-0.1143055260181427,
0.05504646524786949,
0.18509231507778168,
0.21578115224838257,
-0.06996023654937744,
0.060379475355148315,
0.14561505615711212,
-0.10359517484903336,
-0.22254981100559235,
-0.07087556272745132,
0.033342961221933365,
0.04678778350353241,
0.07175523787736893,
-0.1364867091178894,
0.0684862732887268,
0.05082311853766441,
-0.007150184828788042,
-0.07348576933145523,
-0.23598551750183105,
-0.14677293598651886,
0.1414554864168167,
0.00660668034106493,
-0.05357266962528229,
-0.0898231491446495,
-0.06461390852928162,
-0.07481628656387329,
-0.0159931518137455,
0.0809200331568718,
-0.0512656532227993,
0.10186713933944702,
0.0577494241297245,
0.031271208077669144,
0.06316924095153809,
0.010705435648560524,
0.13942180573940277,
0.07410094887018204,
0.03344540297985077,
-0.035945113748311996,
-0.038815565407276154,
0.06213869899511337,
-0.08343194425106049,
0.14434272050857544,
-0.061692316085100174,
0.035161588340997696,
-0.1253897249698639,
-0.03514496982097626,
-0.037156298756599426,
0.02539185620844364,
-0.04802205041050911,
-0.05595361441373825,
-0.014960491098463535,
0.04752124100923538,
0.09012128412723541,
-0.005166334565728903,
0.0015760920941829681,
-0.09273644536733627,
0.020824868232011795,
0.1498476266860962,
0.13246074318885803,
0.06078652665019035,
-0.16737522184848785,
0.02805217169225216,
0.008808367885649204,
0.060246337205171585,
-0.12079112976789474,
0.074700728058815,
0.08000602573156357,
-0.012101436965167522,
0.1585419774055481,
0.015014794655144215,
-0.08658517152070999,
-0.01673717424273491,
0.045911598950624466,
-0.07519980520009995,
-0.15726324915885925,
-0.04623405262827873,
-0.010975593701004982,
-0.11712955683469772,
-0.05637853220105171,
0.15786413848400116,
-0.008655181154608727,
0.003535617608577013,
0.042280565947294235,
0.03856121003627777,
-0.032638367265462875,
0.11717735230922699,
-0.015965651720762253,
0.05355555936694145,
-0.04707135260105133,
0.08182555437088013,
0.08546677231788635,
-0.08917039632797241,
0.03130577877163887,
0.10705950111150742,
-0.06564288586378098,
-0.09162184596061707,
-0.06540330499410629,
0.14140300452709198,
-0.08740628510713577,
0.03261477127671242,
-0.056660547852516174,
-0.06379777193069458,
0.020122261717915535,
0.0458524189889431,
0.051966555416584015,
0.059564653784036636,
-0.09889174997806549,
-0.012971789576113224,
-0.08309607952833176,
0.07796970754861832,
0.09112365543842316,
0.014935722574591637,
-0.023762300610542297,
0.08064565807580948,
-0.025694025680422783,
0.0029210401698946953,
-0.028503185138106346,
-0.04372620955109596,
-0.06798148155212402,
-0.009068344719707966,
-0.029965465888381004,
-0.015229800716042519,
-0.08820739388465881,
-0.01111091859638691,
0.03361465781927109,
0.03966427221894264,
-0.003674442647024989,
-0.0176815427839756,
-0.0495995469391346,
-0.07009893655776978,
-0.0463111437857151,
0.10495766252279282,
-0.1373443901538849,
-0.014271048828959465,
0.04439915344119072,
-0.0982125923037529,
0.08229158818721771,
-0.01433976087719202,
-0.02356363646686077,
0.035395924001932144,
-0.029131291434168816,
-0.037533245980739594,
0.02719660848379135,
0.03438446298241615,
0.05881146341562271,
-0.10213027149438858,
0.012448025867342949,
-0.057118434458971024,
0.028246331959962845,
0.0027476181276142597,
0.04335859790444374,
-0.09403448551893234,
0.011137901805341244,
-0.012696562334895134,
-0.027141865342855453,
-0.1015777587890625,
0.029092777520418167,
0.02991684153676033,
0.04202159866690636,
0.16443340480327606,
-0.05648907274007797,
0.07147444784641266,
-0.13263893127441406,
0.01306027453392744,
0.010018080472946167,
-0.052278902381658554,
0.10100308060646057,
-0.12304182350635529,
0.055431291460990906,
-0.04595338925719261,
0.05950842797756195,
-0.01455727033317089,
0.06041077896952629,
0.06507710367441177,
0.04488855600357056,
0.004056126810610294,
0.027219394221901894,
0.06569036841392517,
0.051629915833473206,
-0.003906938247382641,
-0.06592190265655518,
0.03476240485906601,
0.01228928379714489,
-0.018298201262950897,
0.03778495267033577,
0.08134738355875015,
0.03989574313163757,
0.0906066820025444,
0.08365658670663834,
-0.0031939626205712557,
-0.09123226255178452,
0.03190746530890465,
-0.02737756446003914,
0.06258760392665863,
-0.03755223751068115,
0.0786748006939888,
0.15199817717075348,
-0.15829843282699585,
0.11626911163330078,
0.013739531859755516,
-0.06542379409074783,
-0.07665670663118362,
-0.14181753993034363,
-0.07247931510210037,
-0.03328527882695198,
-0.01674661599099636,
-0.12186864763498306,
-0.013424145989120007,
-0.02614157274365425,
0.00334538915194571,
-0.007882828824222088,
0.13412459194660187,
-0.11475887894630432,
-0.09846262633800507,
0.09091754257678986,
-0.021719034761190414,
0.045092109590768814,
0.005737700033932924,
0.029435625299811363,
0.02024209126830101,
0.07987500727176666,
0.044287800788879395,
0.041086189448833466,
0.04282832890748978,
0.02516225539147854,
-0.08216580748558044,
-0.08248285949230194,
-0.005810101050883532,
-0.012219682335853577,
-0.04927933216094971,
0.07632219046354294,
0.02994392439723015,
-0.08155081421136856,
-0.013185940682888031,
0.22937020659446716,
-0.09401141852140427,
-0.09363920986652374,
-0.17595061659812927,
0.19314850866794586,
0.036378975957632065,
0.04497390612959862,
-0.0299260001629591,
-0.09196369349956512,
-0.01340609509497881,
0.14783841371536255,
0.16018207371234894,
-0.0893869623541832,
0.0133064491674304,
0.03357003256678581,
0.01996725983917713,
0.008549519814550877,
0.013752734288573265,
0.04788649082183838,
0.1939375400543213,
-0.045544564723968506,
0.09062780439853668,
-0.011813459917902946,
-0.057642675936222076,
-0.05604042485356331,
0.10164422541856766,
0.030136561021208763,
0.03462914004921913,
-0.01870516687631607,
0.11279002577066422,
-0.029573673382401466,
-0.1008719801902771,
-0.043402187526226044,
-0.07977273315191269,
-0.11735724657773972,
-0.03047374077141285,
0.026653949171304703,
0.024980448186397552,
0.10242684185504913,
0.03009762056171894,
-0.03350081667304039,
0.12246720492839813,
-0.011173184961080551,
-0.05508757382631302,
-0.025609470903873444,
0.021293699741363525,
-0.03016386181116104,
0.16852036118507385,
-0.0033973848912864923,
-0.02670852467417717,
0.1256837099790573,
0.00935950968414545,
-0.05831353738903999,
0.08080871403217316,
0.043434035032987595,
-0.07407184690237045,
0.12926776707172394,
0.07426963001489639,
-0.015523611567914486,
0.0886397510766983,
0.07697553187608719,
-0.2007860541343689,
0.06804607808589935,
-0.05221209302544594,
-0.04606632888317108,
-0.06492922455072403,
0.0597345270216465,
-0.08250849694013596,
0.12234371155500412,
0.1721678078174591,
-0.020902104675769806,
-0.01109527051448822,
-0.008212617598474026,
-0.0016960325883701444,
0.026532938703894615,
0.06897111237049103,
-0.04521743953227997,
-0.08671457320451736,
0.00005761437932960689,
0.0011726751690730453,
0.009113052859902382,
-0.2899332046508789,
-0.11605095118284225,
0.02239571325480938,
-0.013238335959613323,
-0.02072821743786335,
0.1282077133655548,
0.07933533936738968,
-0.0037591406144201756,
-0.03389783948659897,
-0.2120209038257599,
0.03504395857453346,
0.1039443239569664,
-0.1227288767695427,
-0.08051982522010803
] |
null | null |
sentence-transformers
|
# aditeyabaral/sentencetransformer-bert-hinglish-big
This is a [sentence-transformers](https://www.SBERT.net) model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.
<!--- Describe your model here -->
## Usage (Sentence-Transformers)
Using this model becomes easy when you have [sentence-transformers](https://www.SBERT.net) installed:
```
pip install -U sentence-transformers
```
Then you can use the model like this:
```python
from sentence_transformers import SentenceTransformer
sentences = ["This is an example sentence", "Each sentence is converted"]
model = SentenceTransformer('aditeyabaral/sentencetransformer-bert-hinglish-big')
embeddings = model.encode(sentences)
print(embeddings)
```
## Usage (HuggingFace Transformers)
Without [sentence-transformers](https://www.SBERT.net), you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.
```python
from transformers import AutoTokenizer, AutoModel
import torch
#Mean Pooling - Take attention mask into account for correct averaging
def mean_pooling(model_output, attention_mask):
token_embeddings = model_output[0] #First element of model_output contains all token embeddings
input_mask_expanded = attention_mask.unsqueeze(-1).expand(token_embeddings.size()).float()
return torch.sum(token_embeddings * input_mask_expanded, 1) / torch.clamp(input_mask_expanded.sum(1), min=1e-9)
# Sentences we want sentence embeddings for
sentences = ['This is an example sentence', 'Each sentence is converted']
# Load model from HuggingFace Hub
tokenizer = AutoTokenizer.from_pretrained('aditeyabaral/sentencetransformer-bert-hinglish-big')
model = AutoModel.from_pretrained('aditeyabaral/sentencetransformer-bert-hinglish-big')
# Tokenize sentences
encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt')
# Compute token embeddings
with torch.no_grad():
model_output = model(**encoded_input)
# Perform pooling. In this case, mean pooling.
sentence_embeddings = mean_pooling(model_output, encoded_input['attention_mask'])
print("Sentence embeddings:")
print(sentence_embeddings)
```
## Evaluation Results
<!--- Describe how your model was evaluated -->
For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: [https://seb.sbert.net](https://seb.sbert.net?model_name=aditeyabaral/sentencetransformer-bert-hinglish-big)
## Training
The model was trained with the parameters:
**DataLoader**:
`torch.utils.data.dataloader.DataLoader` of length 4617 with parameters:
```
{'batch_size': 32, 'sampler': 'torch.utils.data.sampler.RandomSampler', 'batch_sampler': 'torch.utils.data.sampler.BatchSampler'}
```
**Loss**:
`sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss`
Parameters of the fit()-Method:
```
{
"epochs": 10,
"evaluation_steps": 0,
"evaluator": "NoneType",
"max_grad_norm": 1,
"optimizer_class": "<class 'transformers.optimization.AdamW'>",
"optimizer_params": {
"lr": 2e-05
},
"scheduler": "WarmupLinear",
"steps_per_epoch": null,
"warmup_steps": 100,
"weight_decay": 0.01
}
```
## Full Model Architecture
```
SentenceTransformer(
(0): Transformer({'max_seq_length': 128, 'do_lower_case': False}) with Transformer model: BertModel
(1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False})
)
```
## Citing & Authors
<!--- Describe where people can find more information -->
|
{"tags": ["sentence-transformers", "feature-extraction", "sentence-similarity", "transformers"], "pipeline_tag": "sentence-similarity"}
|
sentence-similarity
|
aditeyabaral/sentencetransformer-bert-hinglish-big
|
[
"sentence-transformers",
"pytorch",
"bert",
"feature-extraction",
"sentence-similarity",
"transformers",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#sentence-transformers #pytorch #bert #feature-extraction #sentence-similarity #transformers #endpoints_compatible #region-us
|
# aditeyabaral/sentencetransformer-bert-hinglish-big
This is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.
## Usage (Sentence-Transformers)
Using this model becomes easy when you have sentence-transformers installed:
Then you can use the model like this:
## Usage (HuggingFace Transformers)
Without sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.
## Evaluation Results
For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL
## Training
The model was trained with the parameters:
DataLoader:
'URL.dataloader.DataLoader' of length 4617 with parameters:
Loss:
'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss'
Parameters of the fit()-Method:
## Full Model Architecture
## Citing & Authors
|
[
"# aditeyabaral/sentencetransformer-bert-hinglish-big\n\nThis is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.",
"## Usage (Sentence-Transformers)\n\nUsing this model becomes easy when you have sentence-transformers installed:\n\n\n\nThen you can use the model like this:",
"## Usage (HuggingFace Transformers)\nWithout sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.",
"## Evaluation Results\n\n\n\nFor an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL",
"## Training\nThe model was trained with the parameters:\n\nDataLoader:\n\n'URL.dataloader.DataLoader' of length 4617 with parameters:\n\n\nLoss:\n\n'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss' \n\nParameters of the fit()-Method:",
"## Full Model Architecture",
"## Citing & Authors"
] |
[
"TAGS\n#sentence-transformers #pytorch #bert #feature-extraction #sentence-similarity #transformers #endpoints_compatible #region-us \n",
"# aditeyabaral/sentencetransformer-bert-hinglish-big\n\nThis is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.",
"## Usage (Sentence-Transformers)\n\nUsing this model becomes easy when you have sentence-transformers installed:\n\n\n\nThen you can use the model like this:",
"## Usage (HuggingFace Transformers)\nWithout sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.",
"## Evaluation Results\n\n\n\nFor an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL",
"## Training\nThe model was trained with the parameters:\n\nDataLoader:\n\n'URL.dataloader.DataLoader' of length 4617 with parameters:\n\n\nLoss:\n\n'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss' \n\nParameters of the fit()-Method:",
"## Full Model Architecture",
"## Citing & Authors"
] |
[
42,
61,
38,
64,
29,
78,
5,
6
] |
[
"passage: TAGS\n#sentence-transformers #pytorch #bert #feature-extraction #sentence-similarity #transformers #endpoints_compatible #region-us \n# aditeyabaral/sentencetransformer-bert-hinglish-big\n\nThis is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.## Usage (Sentence-Transformers)\n\nUsing this model becomes easy when you have sentence-transformers installed:\n\n\n\nThen you can use the model like this:## Usage (HuggingFace Transformers)\nWithout sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.## Evaluation Results\n\n\n\nFor an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL## Training\nThe model was trained with the parameters:\n\nDataLoader:\n\n'URL.dataloader.DataLoader' of length 4617 with parameters:\n\n\nLoss:\n\n'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss' \n\nParameters of the fit()-Method:## Full Model Architecture## Citing & Authors"
] |
[
-0.03669226914644241,
0.10223659127950668,
-0.006723048631101847,
0.04737059399485588,
0.10860154777765274,
0.022636178880929947,
0.13038848340511322,
0.08889977633953094,
-0.003652328159660101,
0.0876670777797699,
0.01700890250504017,
0.09252256155014038,
0.008017050102353096,
0.011244688183069229,
0.03388363495469093,
-0.2845139503479004,
0.015176841989159584,
-0.038832202553749084,
0.018368598073720932,
0.06999921053647995,
0.12231907993555069,
-0.08191300928592682,
0.06114557757973671,
0.02232366055250168,
-0.034727636724710464,
0.013507586903870106,
-0.03315579891204834,
-0.028293954208493233,
0.08506662398576736,
0.0662076473236084,
0.03593570366501808,
0.006301478948444128,
0.005495523102581501,
-0.18593844771385193,
0.01319975033402443,
0.0613197460770607,
-0.0082715367898345,
0.06538830697536469,
0.037814170122146606,
-0.03554728627204895,
0.15502992272377014,
-0.08438215404748917,
0.060012560337781906,
0.05498691275715828,
-0.10952852666378021,
-0.07649681717157364,
-0.04447130486369133,
-0.01233877893537283,
0.12381856888532639,
0.09852433204650879,
-0.06391996145248413,
0.10656514018774033,
-0.06081702932715416,
0.07681666314601898,
0.10160614550113678,
-0.2726735472679138,
-0.031287405639886856,
0.01946423575282097,
0.06947899609804153,
0.03273599594831467,
-0.10703418403863907,
0.003716452978551388,
-0.018986495211720467,
0.03820522874593735,
0.07116410881280899,
-0.03765017166733742,
0.09157145023345947,
-0.00156122213229537,
-0.1141153872013092,
0.013581030070781708,
0.16829966008663177,
0.036949846893548965,
-0.018955878913402557,
-0.20337890088558197,
-0.06813984364271164,
0.06349621713161469,
-0.05605003610253334,
-0.04137546569108963,
0.03509030491113663,
0.043158724904060364,
-0.010383824817836285,
-0.09750425815582275,
-0.10651832818984985,
-0.006922919303178787,
-0.07444532960653305,
0.00891647208482027,
-0.019184619188308716,
-0.056641437113285065,
-0.0006607507821172476,
0.0558912493288517,
-0.08513455837965012,
-0.11601825803518295,
-0.03422042354941368,
-0.025113984942436218,
-0.11222918331623077,
-0.04220353811979294,
-0.05934286117553711,
-0.08996249735355377,
0.03795104846358299,
0.13950955867767334,
0.08707258105278015,
0.009376686997711658,
-0.015916846692562103,
0.055327653884887695,
0.03227909281849861,
0.19107572734355927,
-0.05873210355639458,
-0.08165439963340759,
-0.042688023298978806,
0.030663345009088516,
-0.00251193274743855,
-0.014934414066374302,
-0.04849718138575554,
-0.011639927513897419,
0.028283048421144485,
0.05481299012899399,
0.05187651515007019,
0.04993594437837601,
-0.049230147153139114,
-0.04481659457087517,
0.057440727949142456,
-0.11582356691360474,
0.03702792152762413,
0.015264783054590225,
-0.06122239679098129,
0.03648439794778824,
0.061092112213373184,
-0.016282182186841965,
-0.06763109564781189,
0.028309395536780357,
-0.10586200654506683,
-0.01034968439489603,
-0.056547146290540695,
-0.13182008266448975,
-0.006873908918350935,
0.0010594766354188323,
-0.02814011089503765,
-0.09594639390707016,
-0.12432535737752914,
-0.07802657037973404,
0.021428018808364868,
-0.05462086200714111,
-0.011866787448525429,
-0.12114103883504868,
-0.011065686121582985,
0.009084063582122326,
-0.006894242018461227,
-0.05391580983996391,
-0.006102617830038071,
0.017295747995376587,
-0.047735828906297684,
0.057111822068691254,
0.054325710982084274,
0.0406549908220768,
-0.12696129083633423,
0.017810840159654617,
-0.1382228583097458,
0.16328318417072296,
-0.03234555199742317,
0.0674900934100151,
-0.14974690973758698,
0.01682891882956028,
0.012757739052176476,
0.06737557053565979,
0.005953226238489151,
0.1478927731513977,
-0.21671965718269348,
-0.0847398191690445,
0.11985249817371368,
-0.03553708642721176,
-0.09611359983682632,
0.11763616651296616,
-0.03134917467832565,
0.13514186441898346,
0.1146276444196701,
0.11799154430627823,
0.10506130754947662,
-0.042650993913412094,
-0.017379986122250557,
0.030427241697907448,
-0.0581473745405674,
0.1374555230140686,
0.05085379630327225,
-0.06419911235570908,
0.0912243202328682,
-0.005199234001338482,
-0.056743405759334564,
0.004513198044151068,
-0.00091077561955899,
-0.056225214153528214,
0.022545883432030678,
-0.03242049738764763,
0.058588217943906784,
-0.04295385628938675,
0.007022466976195574,
0.011433042585849762,
-0.10962408035993576,
0.10234016180038452,
0.07040780782699585,
-0.0635996162891388,
0.011057913303375244,
-0.09385120123624802,
0.019503651186823845,
-0.01787414401769638,
0.016112694516777992,
-0.19461387395858765,
-0.10147729516029358,
0.02234981581568718,
0.014864076860249043,
0.12041652202606201,
0.04980538785457611,
0.06713055819272995,
0.034453731030225754,
-0.015120993368327618,
-0.01876402273774147,
0.052879780530929565,
-0.010147247463464737,
-0.10108409076929092,
-0.1166776567697525,
-0.0006555322324857116,
-0.0326264463365078,
0.0936870202422142,
-0.12205727398395538,
0.015201774425804615,
0.029842659831047058,
0.056450191885232925,
0.04372617229819298,
-0.015923723578453064,
-0.0091221509501338,
-0.03252594918012619,
-0.013473700731992722,
-0.022272996604442596,
0.049436070024967194,
0.012874139472842216,
-0.16050638258457184,
0.09607560932636261,
-0.2004329264163971,
-0.12482908368110657,
0.07731199264526367,
0.00829421728849411,
-0.05628850311040878,
-0.07321326434612274,
-0.019876185804605484,
0.001285012811422348,
-0.021258268505334854,
-0.07032988965511322,
0.20931196212768555,
0.08514430373907089,
0.10937023162841797,
-0.03658837452530861,
-0.026949018239974976,
-0.05130581185221672,
-0.03207985311746597,
-0.04815593734383583,
0.11500675231218338,
-0.038203269243240356,
-0.15224167704582214,
0.04893141984939575,
0.09074705094099045,
-0.05702013522386551,
0.11177356541156769,
-0.009247006848454475,
-0.07274926453828812,
-0.0673813596367836,
0.02456498332321644,
0.036661166697740555,
-0.012269243597984314,
-0.07919823378324509,
0.01742660626769066,
0.060275543481111526,
0.013384043239057064,
0.017299629747867584,
-0.047524385154247284,
0.05335168167948723,
0.06691332161426544,
0.005361413117498159,
0.10468616336584091,
0.02049063891172409,
0.0014458083314821124,
0.06604604423046112,
0.024808254092931747,
0.010154004208743572,
-0.04387371242046356,
-0.04029526934027672,
-0.10757859796285629,
0.16748128831386566,
-0.12338656932115555,
-0.19774247705936432,
-0.15513582527637482,
0.0021057859994471073,
-0.04633818566799164,
0.026213757693767548,
0.07659382373094559,
-0.07060956209897995,
-0.0645151361823082,
-0.07114068418741226,
0.08513371646404266,
0.07861214876174927,
-0.036760807037353516,
-0.0047952476888895035,
0.03183466196060181,
0.01788615621626377,
-0.12405741214752197,
-0.013817141763865948,
-0.0007897092727944255,
-0.08212453126907349,
-0.01112599577754736,
-0.016672596335411072,
0.05563933029770851,
0.11618834733963013,
0.06549306213855743,
-0.011141476221382618,
-0.01167371217161417,
0.2305564135313034,
-0.09662014245986938,
0.05444052442908287,
0.16763347387313843,
0.001143797766417265,
0.0573035292327404,
0.09294179081916809,
0.025243375450372696,
-0.05087549611926079,
0.04563148319721222,
0.06582775712013245,
-0.01265635248273611,
-0.13449248671531677,
-0.11275143176317215,
-0.06132006645202637,
0.006380492355674505,
0.12753726541996002,
0.03775249794125557,
0.019817769527435303,
0.04715920239686966,
-0.020700212568044662,
0.004407453816384077,
0.09330149739980698,
0.1082959920167923,
0.11015160381793976,
-0.0240373183041811,
0.10303746163845062,
-0.03785255178809166,
-0.07859848439693451,
0.06411568820476532,
-0.013387909159064293,
0.13188426196575165,
0.022693641483783722,
0.1821960061788559,
0.0638837143778801,
-0.04438386112451553,
-0.019727276638150215,
0.07867008447647095,
-0.04475514963269234,
0.0031054832506924868,
-0.037843573838472366,
-0.0978129655122757,
-0.0015690047293901443,
0.09481048583984375,
0.10691070556640625,
-0.029382459819316864,
-0.04563688114285469,
0.05451873689889908,
0.12989576160907745,
0.12581002712249756,
0.08155886083841324,
-0.24317707121372223,
-0.04927278310060501,
0.038537897169589996,
-0.07388126105070114,
-0.06086568906903267,
-0.007044483441859484,
0.043329257518053055,
-0.10362312197685242,
0.036232247948646545,
-0.0055285049602389336,
0.09549951553344727,
-0.0844932422041893,
0.03794785588979721,
-0.0643065944314003,
0.048220716416835785,
0.0004592867335304618,
0.07260628044605255,
-0.23096875846385956,
0.08495455980300903,
0.039674311876297,
0.049450039863586426,
-0.06511983275413513,
0.020936574786901474,
0.06973345577716827,
0.004009174648672342,
0.175594300031662,
-0.03600364550948143,
0.008063076995313168,
-0.01626104861497879,
-0.07039035856723785,
-0.0060327076353132725,
0.0494321808218956,
-0.13167785108089447,
0.0928933322429657,
-0.05179566144943237,
-0.02988305315375328,
-0.014556797221302986,
0.037104781717061996,
-0.03018389828503132,
-0.1691587120294571,
0.004914477467536926,
0.013539894483983517,
0.0014460613019764423,
-0.020050745457410812,
-0.006121523678302765,
0.01204937044531107,
0.1962655484676361,
-0.12479472905397415,
-0.05498000606894493,
-0.12308506667613983,
-0.0025545447133481503,
0.10987996309995651,
-0.09323938190937042,
0.0013131432933732867,
0.008082501590251923,
0.15845881402492523,
-0.05034475773572922,
-0.0629231184720993,
0.07767710834741592,
-0.05168193578720093,
-0.0605856291949749,
-0.04564584046602249,
0.10783682018518448,
0.05667181685566902,
0.06753052026033401,
0.041921216994524,
0.07060199975967407,
-0.036557529121637344,
-0.08573313057422638,
-0.06613039970397949,
0.10464607179164886,
-0.012190166860818863,
0.06824386119842529,
-0.1237034723162651,
-0.0210421085357666,
-0.10365048050880432,
0.052696067839860916,
0.2090374082326889,
0.22484081983566284,
-0.07053710520267487,
0.06522563099861145,
0.14206230640411377,
-0.09120015054941177,
-0.22722753882408142,
-0.06674400717020035,
0.03589896857738495,
0.052668970078229904,
0.07860992848873138,
-0.13235031068325043,
0.07960949838161469,
0.06046443432569504,
-0.007797743193805218,
-0.0709625780582428,
-0.24635924398899078,
-0.14455214142799377,
0.1300484836101532,
-0.004551769234240055,
-0.044625476002693176,
-0.09166917204856873,
-0.06037438288331032,
-0.0759100615978241,
-0.0024381056427955627,
0.07949264347553253,
-0.05292929708957672,
0.104698047041893,
0.05302860960364342,
0.03194534778594971,
0.06463924050331116,
0.006793270818889141,
0.13837742805480957,
0.06124498322606087,
0.03006567992269993,
-0.04094050079584122,
-0.03358457237482071,
0.06491341441869736,
-0.08167169243097305,
0.14810097217559814,
-0.06740877032279968,
0.02710081823170185,
-0.12269200384616852,
-0.036119673401117325,
-0.042815178632736206,
0.02644263580441475,
-0.046246547251939774,
-0.054384659975767136,
-0.020273763686418533,
0.052094295620918274,
0.0891643613576889,
-0.007515530567616224,
0.01009269431233406,
-0.0839691311120987,
0.020509708672761917,
0.14672422409057617,
0.14417223632335663,
0.041582100093364716,
-0.1717749387025833,
0.031102009117603302,
0.012286475859582424,
0.058250684291124344,
-0.11236471682786942,
0.07784750312566757,
0.08034544438123703,
-0.012057903222739697,
0.1526341736316681,
0.0206623338162899,
-0.08114051818847656,
-0.013554367236793041,
0.048483140766620636,
-0.06376931071281433,
-0.18090017139911652,
-0.05000318959355354,
-0.0055807800963521,
-0.12265394628047943,
-0.06078236177563667,
0.1561710238456726,
-0.005435329861938953,
0.003959189169108868,
0.0428226999938488,
0.04375562444329262,
-0.032457076013088226,
0.11897984892129898,
-0.01761980541050434,
0.05577089264988899,
-0.053413987159729004,
0.07257109880447388,
0.09126737713813782,
-0.0952591598033905,
0.03285093605518341,
0.1227218359708786,
-0.05785917863249779,
-0.09488936513662338,
-0.05929384008049965,
0.13069839775562286,
-0.07882314920425415,
0.0384368859231472,
-0.053541943430900574,
-0.07594520598649979,
0.022975588217377663,
0.02637646347284317,
0.05161284655332565,
0.054344017058610916,
-0.09775276482105255,
-0.011159754358232021,
-0.09128445386886597,
0.0811818391084671,
0.08054855465888977,
0.015691138803958893,
-0.027898192405700684,
0.08049505949020386,
-0.03358554095029831,
0.006112545263022184,
-0.029247906059026718,
-0.038813695311546326,
-0.07000839710235596,
-0.009794747456908226,
-0.035039860755205154,
-0.013930044136941433,
-0.09153532236814499,
-0.010068132542073727,
0.02906842529773712,
0.04638582095503807,
-0.003793935524299741,
-0.017687639221549034,
-0.05236871540546417,
-0.07102685421705246,
-0.045931097120046616,
0.1103626936674118,
-0.13086457550525665,
-0.00916057638823986,
0.03936231881380081,
-0.09554708003997803,
0.09277331084012985,
-0.01724308356642723,
-0.025632651522755623,
0.022843364626169205,
-0.023729586973786354,
-0.05169966444373131,
0.028182998299598694,
0.03349154815077782,
0.06441576033830643,
-0.10666608065366745,
0.016323572024703026,
-0.05216441676020622,
0.02425570972263813,
0.006406809203326702,
0.039919644594192505,
-0.09236827492713928,
0.021228116005659103,
-0.022148799151182175,
-0.018385866656899452,
-0.1051003709435463,
0.024781018495559692,
0.018832750618457794,
0.03737100213766098,
0.16705363988876343,
-0.055825747549533844,
0.07108202576637268,
-0.1357416808605194,
0.01283977273851633,
0.002480271738022566,
-0.058011848479509354,
0.09559907019138336,
-0.11676985025405884,
0.05996200814843178,
-0.043144069612026215,
0.05766521394252777,
-0.01607976295053959,
0.04912766069173813,
0.06989666074514389,
0.04421902447938919,
0.010878168046474457,
0.021883806213736534,
0.06707654893398285,
0.05412980914115906,
-0.011313458904623985,
-0.0586882010102272,
0.03440559282898903,
0.02202496863901615,
-0.00484648859128356,
0.046848829835653305,
0.08609309792518616,
0.030636096373200417,
0.09009046107530594,
0.08575029671192169,
-0.009926741942763329,
-0.09209810942411423,
0.028378885239362717,
-0.02532009594142437,
0.06341516971588135,
-0.03769734129309654,
0.07250413298606873,
0.15929406881332397,
-0.1544582098722458,
0.11415175348520279,
0.004873804748058319,
-0.0604560561478138,
-0.07855921238660812,
-0.13584144413471222,
-0.07824411243200302,
-0.03279343619942665,
-0.020086204633116722,
-0.12215031683444977,
-0.013703543692827225,
-0.019123490899801254,
0.013949181884527206,
-0.012695166282355785,
0.13775880634784698,
-0.11418115347623825,
-0.10002145171165466,
0.09527212381362915,
-0.024188602343201637,
0.042942438274621964,
0.0063356622122228146,
0.029179155826568604,
0.01611248403787613,
0.07630684226751328,
0.04817968234419823,
0.046477653086185455,
0.038092996925115585,
0.0198103878647089,
-0.08744625747203827,
-0.0872979387640953,
-0.007499872241169214,
-0.0095033785328269,
-0.04824850335717201,
0.07299000769853592,
0.02616792917251587,
-0.0819982960820198,
-0.01629304513335228,
0.22580519318580627,
-0.09462068229913712,
-0.09377510100603104,
-0.16604478657245636,
0.18987150490283966,
0.04379185289144516,
0.041263047605752945,
-0.03372208774089813,
-0.09319868683815002,
-0.018758676946163177,
0.1442587822675705,
0.17342883348464966,
-0.09599509835243225,
0.013848011381924152,
0.033979129046201706,
0.01959294266998768,
0.00001979822809516918,
0.0198082122951746,
0.054074689745903015,
0.20731651782989502,
-0.04797951132059097,
0.10940159112215042,
-0.008615287020802498,
-0.05484269931912422,
-0.06531545519828796,
0.10509934276342392,
0.03500580042600632,
0.04247061535716057,
-0.010270779021084309,
0.10812114179134369,
-0.0350852869451046,
-0.09217569977045059,
-0.05007408931851387,
-0.08745323121547699,
-0.12293676286935806,
-0.03311358392238617,
0.030318964272737503,
0.03251641243696213,
0.10624942183494568,
0.03322258219122887,
-0.036807842552661896,
0.10010142624378204,
-0.014733034186065197,
-0.06008658558130264,
-0.01820952631533146,
0.027791054919362068,
-0.03441803902387619,
0.16904699802398682,
-0.00018417451065033674,
-0.029368117451667786,
0.13067391514778137,
0.005877455230802298,
-0.07130223512649536,
0.07098578661680222,
0.04160701856017113,
-0.07534189522266388,
0.13366340100765228,
0.07937116175889969,
-0.015231958590447903,
0.08222074806690216,
0.08242916315793991,
-0.20977817475795746,
0.061881016939878464,
-0.04768233746290207,
-0.03883296251296997,
-0.06754522025585175,
0.05975912883877754,
-0.0769708976149559,
0.1187037006020546,
0.17762763798236847,
-0.024150770157575607,
-0.014947667717933655,
-0.010421723127365112,
0.0023224602919071913,
0.031407639384269714,
0.07624389976263046,
-0.04894767329096794,
-0.09468607604503632,
0.0017345884116366506,
-0.0031110516283661127,
0.012984377332031727,
-0.2932187616825104,
-0.11771663278341293,
0.021181350573897362,
-0.008003270253539085,
-0.026085976511240005,
0.12207777798175812,
0.08767665922641754,
-0.0023387724068015814,
-0.030805256217718124,
-0.20167231559753418,
0.028186511248350143,
0.099129319190979,
-0.12727244198322296,
-0.07788670808076859
] |
null | null |
sentence-transformers
|
# aditeyabaral/sentencetransformer-bert-hinglish-small
This is a [sentence-transformers](https://www.SBERT.net) model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.
<!--- Describe your model here -->
## Usage (Sentence-Transformers)
Using this model becomes easy when you have [sentence-transformers](https://www.SBERT.net) installed:
```
pip install -U sentence-transformers
```
Then you can use the model like this:
```python
from sentence_transformers import SentenceTransformer
sentences = ["This is an example sentence", "Each sentence is converted"]
model = SentenceTransformer('aditeyabaral/sentencetransformer-bert-hinglish-small')
embeddings = model.encode(sentences)
print(embeddings)
```
## Usage (HuggingFace Transformers)
Without [sentence-transformers](https://www.SBERT.net), you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.
```python
from transformers import AutoTokenizer, AutoModel
import torch
#Mean Pooling - Take attention mask into account for correct averaging
def mean_pooling(model_output, attention_mask):
token_embeddings = model_output[0] #First element of model_output contains all token embeddings
input_mask_expanded = attention_mask.unsqueeze(-1).expand(token_embeddings.size()).float()
return torch.sum(token_embeddings * input_mask_expanded, 1) / torch.clamp(input_mask_expanded.sum(1), min=1e-9)
# Sentences we want sentence embeddings for
sentences = ['This is an example sentence', 'Each sentence is converted']
# Load model from HuggingFace Hub
tokenizer = AutoTokenizer.from_pretrained('aditeyabaral/sentencetransformer-bert-hinglish-small')
model = AutoModel.from_pretrained('aditeyabaral/sentencetransformer-bert-hinglish-small')
# Tokenize sentences
encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt')
# Compute token embeddings
with torch.no_grad():
model_output = model(**encoded_input)
# Perform pooling. In this case, mean pooling.
sentence_embeddings = mean_pooling(model_output, encoded_input['attention_mask'])
print("Sentence embeddings:")
print(sentence_embeddings)
```
## Evaluation Results
<!--- Describe how your model was evaluated -->
For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: [https://seb.sbert.net](https://seb.sbert.net?model_name=aditeyabaral/sentencetransformer-bert-hinglish-small)
## Training
The model was trained with the parameters:
**DataLoader**:
`torch.utils.data.dataloader.DataLoader` of length 4617 with parameters:
```
{'batch_size': 32, 'sampler': 'torch.utils.data.sampler.RandomSampler', 'batch_sampler': 'torch.utils.data.sampler.BatchSampler'}
```
**Loss**:
`sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss`
Parameters of the fit()-Method:
```
{
"epochs": 10,
"evaluation_steps": 0,
"evaluator": "NoneType",
"max_grad_norm": 1,
"optimizer_class": "<class 'transformers.optimization.AdamW'>",
"optimizer_params": {
"lr": 2e-05
},
"scheduler": "WarmupLinear",
"steps_per_epoch": null,
"warmup_steps": 100,
"weight_decay": 0.01
}
```
## Full Model Architecture
```
SentenceTransformer(
(0): Transformer({'max_seq_length': 128, 'do_lower_case': False}) with Transformer model: BertModel
(1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False})
)
```
## Citing & Authors
<!--- Describe where people can find more information -->
|
{"tags": ["sentence-transformers", "feature-extraction", "sentence-similarity", "transformers"], "pipeline_tag": "sentence-similarity"}
|
sentence-similarity
|
aditeyabaral/sentencetransformer-bert-hinglish-small
|
[
"sentence-transformers",
"pytorch",
"bert",
"feature-extraction",
"sentence-similarity",
"transformers",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#sentence-transformers #pytorch #bert #feature-extraction #sentence-similarity #transformers #endpoints_compatible #region-us
|
# aditeyabaral/sentencetransformer-bert-hinglish-small
This is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.
## Usage (Sentence-Transformers)
Using this model becomes easy when you have sentence-transformers installed:
Then you can use the model like this:
## Usage (HuggingFace Transformers)
Without sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.
## Evaluation Results
For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL
## Training
The model was trained with the parameters:
DataLoader:
'URL.dataloader.DataLoader' of length 4617 with parameters:
Loss:
'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss'
Parameters of the fit()-Method:
## Full Model Architecture
## Citing & Authors
|
[
"# aditeyabaral/sentencetransformer-bert-hinglish-small\n\nThis is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.",
"## Usage (Sentence-Transformers)\n\nUsing this model becomes easy when you have sentence-transformers installed:\n\n\n\nThen you can use the model like this:",
"## Usage (HuggingFace Transformers)\nWithout sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.",
"## Evaluation Results\n\n\n\nFor an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL",
"## Training\nThe model was trained with the parameters:\n\nDataLoader:\n\n'URL.dataloader.DataLoader' of length 4617 with parameters:\n\n\nLoss:\n\n'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss' \n\nParameters of the fit()-Method:",
"## Full Model Architecture",
"## Citing & Authors"
] |
[
"TAGS\n#sentence-transformers #pytorch #bert #feature-extraction #sentence-similarity #transformers #endpoints_compatible #region-us \n",
"# aditeyabaral/sentencetransformer-bert-hinglish-small\n\nThis is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.",
"## Usage (Sentence-Transformers)\n\nUsing this model becomes easy when you have sentence-transformers installed:\n\n\n\nThen you can use the model like this:",
"## Usage (HuggingFace Transformers)\nWithout sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.",
"## Evaluation Results\n\n\n\nFor an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL",
"## Training\nThe model was trained with the parameters:\n\nDataLoader:\n\n'URL.dataloader.DataLoader' of length 4617 with parameters:\n\n\nLoss:\n\n'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss' \n\nParameters of the fit()-Method:",
"## Full Model Architecture",
"## Citing & Authors"
] |
[
42,
62,
38,
64,
29,
78,
5,
6
] |
[
"passage: TAGS\n#sentence-transformers #pytorch #bert #feature-extraction #sentence-similarity #transformers #endpoints_compatible #region-us \n# aditeyabaral/sentencetransformer-bert-hinglish-small\n\nThis is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.## Usage (Sentence-Transformers)\n\nUsing this model becomes easy when you have sentence-transformers installed:\n\n\n\nThen you can use the model like this:## Usage (HuggingFace Transformers)\nWithout sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.## Evaluation Results\n\n\n\nFor an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL## Training\nThe model was trained with the parameters:\n\nDataLoader:\n\n'URL.dataloader.DataLoader' of length 4617 with parameters:\n\n\nLoss:\n\n'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss' \n\nParameters of the fit()-Method:## Full Model Architecture## Citing & Authors"
] |
[
-0.04255929961800575,
0.11247279495000839,
-0.006484323646873236,
0.044859159737825394,
0.10971245169639587,
0.022095168009400368,
0.1344633400440216,
0.09001798927783966,
0.004209983628243208,
0.08227401971817017,
0.015548992902040482,
0.09878256916999817,
0.005663628689944744,
0.01119601633399725,
0.02903926372528076,
-0.289505273103714,
0.019564881920814514,
-0.03696504980325699,
0.03079400025308132,
0.07392791658639908,
0.11488751322031021,
-0.0801514983177185,
0.06277209520339966,
0.028856929391622543,
-0.04063872620463371,
0.014866640791296959,
-0.0272457804530859,
-0.03045395016670227,
0.08888815343379974,
0.06279709190130234,
0.03942035511136055,
0.012244999408721924,
0.01363371405750513,
-0.19706550240516663,
0.013172169215977192,
0.06483221054077148,
-0.00932267028838396,
0.06872503459453583,
0.039926767349243164,
-0.047613874077796936,
0.17915305495262146,
-0.06809663027524948,
0.05826426297426224,
0.05862383916974068,
-0.11252114176750183,
-0.07439326494932175,
-0.04113153740763664,
-0.015651913359761238,
0.13518428802490234,
0.10013242065906525,
-0.06506109237670898,
0.10931581258773804,
-0.059857841581106186,
0.07741113007068634,
0.09819992631673813,
-0.27510198950767517,
-0.030833452939987183,
0.02686687558889389,
0.056990571320056915,
0.024754198268055916,
-0.10361839085817337,
0.009845554828643799,
-0.01777738519012928,
0.033512476831674576,
0.07588586211204529,
-0.04431673139333725,
0.08213206380605698,
0.0019239892717450857,
-0.11518718302249908,
0.006430495996028185,
0.16204734146595,
0.036293890327215195,
-0.01814146526157856,
-0.19631631672382355,
-0.06923063844442368,
0.07208739966154099,
-0.06076490134000778,
-0.039870936423540115,
0.03186266869306564,
0.0427132174372673,
-0.015179631300270557,
-0.09246183186769485,
-0.10317099839448929,
-0.017698807641863823,
-0.0667603462934494,
0.013346996158361435,
-0.01325132790952921,
-0.0530560277402401,
0.0008040785905905068,
0.06139976903796196,
-0.09050597250461578,
-0.11243471503257751,
-0.02912646345794201,
-0.02442600578069687,
-0.11928697675466537,
-0.03722264990210533,
-0.0618150494992733,
-0.11663416028022766,
0.03576362133026123,
0.13415582478046417,
0.08592859655618668,
0.01169903576374054,
-0.008615751750767231,
0.05795610323548317,
0.028320131823420525,
0.18700246512889862,
-0.05625446140766144,
-0.08206821978092194,
-0.04739534482359886,
0.03494279831647873,
-0.001791905495338142,
-0.01718899980187416,
-0.04887295514345169,
-0.007382252253592014,
0.028591059148311615,
0.05398853123188019,
0.05607827752828598,
0.05748780071735382,
-0.05246015638113022,
-0.04571405425667763,
0.06460070610046387,
-0.11572471261024475,
0.027989253401756287,
0.010186579078435898,
-0.06127898395061493,
0.026229891926050186,
0.0684911459684372,
-0.015944605693221092,
-0.07579293102025986,
0.032577019184827805,
-0.10819066315889359,
-0.010280699469149113,
-0.06349655985832214,
-0.1365082561969757,
-0.012500187382102013,
0.008294186554849148,
-0.03056950494647026,
-0.09972984343767166,
-0.13582250475883484,
-0.07728952169418335,
0.03086281195282936,
-0.0490681417286396,
-0.008623026311397552,
-0.12508824467658997,
-0.004620908293873072,
0.005178447347134352,
-0.00783916749060154,
-0.060360897332429886,
-0.009550347924232483,
0.0159305427223444,
-0.0468013659119606,
0.05868687480688095,
0.056933529675006866,
0.04449835792183876,
-0.1310131549835205,
0.02034583128988743,
-0.1365133672952652,
0.16122546792030334,
-0.036657996475696564,
0.07043658196926117,
-0.14790882170200348,
0.012960061430931091,
0.007695590145885944,
0.06918495148420334,
0.011192427016794682,
0.15167082846164703,
-0.20805248618125916,
-0.08347177505493164,
0.11627117544412613,
-0.04486473649740219,
-0.09622177481651306,
0.11404623091220856,
-0.026059675961732864,
0.12766335904598236,
0.11539497971534729,
0.1336062103509903,
0.11197026073932648,
-0.03657860681414604,
-0.007888504303991795,
0.03372080996632576,
-0.05919763073325157,
0.12701335549354553,
0.041799746453762054,
-0.059790387749671936,
0.08884764462709427,
-0.003595096291974187,
-0.05133684724569321,
0.005916808731853962,
-0.0055532511323690414,
-0.05893517658114433,
0.02278793789446354,
-0.03731909021735191,
0.06199084594845772,
-0.03259561210870743,
0.0021265316754579544,
0.020104026421904564,
-0.1041102409362793,
0.10434302687644958,
0.07315944880247116,
-0.06927847117185593,
0.00988661590963602,
-0.09068334847688675,
0.01689499244093895,
-0.011279086582362652,
0.01834169775247574,
-0.20836280286312103,
-0.09860008209943771,
0.018224352970719337,
0.0111647704616189,
0.11479011178016663,
0.05942787975072861,
0.060784995555877686,
0.03466511517763138,
-0.013777933083474636,
-0.024150371551513672,
0.04948149621486664,
-0.011275696568191051,
-0.10040592402219772,
-0.11147353798151016,
-0.004711075220257044,
-0.026099199429154396,
0.07885076105594635,
-0.1240713819861412,
0.01615261659026146,
0.011189769022166729,
0.05222694203257561,
0.04548557475209236,
-0.021282002329826355,
-0.0011949785985052586,
-0.02593817189335823,
-0.010511210188269615,
-0.02066218852996826,
0.05370870977640152,
0.013551200740039349,
-0.15068531036376953,
0.098639115691185,
-0.19348114728927612,
-0.11986451596021652,
0.07323870062828064,
-0.0033588395453989506,
-0.053468845784664154,
-0.06752936542034149,
-0.02243049629032612,
0.0008612474193796515,
-0.02881024219095707,
-0.06378059834241867,
0.20773205161094666,
0.08863751590251923,
0.11353452503681183,
-0.03824206441640854,
-0.020834429189562798,
-0.052523668855428696,
-0.039061110466718674,
-0.041148923337459564,
0.11360908299684525,
-0.03425431251525879,
-0.13731153309345245,
0.04242241755127907,
0.06835189461708069,
-0.06057443842291832,
0.1098637804389,
-0.007419364061206579,
-0.07740532606840134,
-0.056310880929231644,
0.02289716899394989,
0.03618020936846733,
-0.00558131281286478,
-0.07957109808921814,
0.0051135472021996975,
0.06210846081376076,
0.012976431287825108,
0.02331971749663353,
-0.05918021872639656,
0.05175791680812836,
0.061745695769786835,
-0.00027820433024317026,
0.09217008203268051,
0.028131481260061264,
-0.0033320763614028692,
0.06631103903055191,
0.016735339537262917,
0.00839456170797348,
-0.0588042326271534,
-0.04493953660130501,
-0.11192728579044342,
0.1729234904050827,
-0.11327758431434631,
-0.20837092399597168,
-0.1559467613697052,
0.0019931872375309467,
-0.05195839703083038,
0.028001630678772926,
0.07700037956237793,
-0.06463636457920074,
-0.07041230797767639,
-0.07064292579889297,
0.08062723278999329,
0.07529620081186295,
-0.04039587825536728,
-0.016884764656424522,
0.036447349935770035,
0.021667618304491043,
-0.12146321684122086,
-0.011897766962647438,
-0.0073241801001131535,
-0.07163125276565552,
-0.00533284991979599,
-0.017712438479065895,
0.06332048028707504,
0.11836757510900497,
0.060199566185474396,
-0.008267408236861229,
-0.01228878740221262,
0.22585532069206238,
-0.08882612735033035,
0.053965236991643906,
0.17980918288230896,
0.009988564997911453,
0.06253664940595627,
0.09444291889667511,
0.025033291429281235,
-0.053538087755441666,
0.04933900758624077,
0.06609516590833664,
-0.012717053294181824,
-0.13374468684196472,
-0.11394944787025452,
-0.06211930140852928,
0.008772527799010277,
0.1341361701488495,
0.034715354442596436,
0.016533374786376953,
0.048434704542160034,
-0.023899583145976067,
0.008791020140051842,
0.08553154021501541,
0.1104595810174942,
0.11878105998039246,
-0.02180572599172592,
0.10955574363470078,
-0.0392196923494339,
-0.07336942106485367,
0.059637367725372314,
-0.010747767053544521,
0.14843644201755524,
0.014997903257608414,
0.17220725119113922,
0.07257068157196045,
-0.03787948563694954,
-0.01636369712650776,
0.07881515473127365,
-0.040922485291957855,
0.004516139626502991,
-0.03391052037477493,
-0.09248165041208267,
-0.010200079530477524,
0.09426295012235641,
0.09251824021339417,
-0.03288635239005089,
-0.04409587383270264,
0.05683652684092522,
0.12639299035072327,
0.13162517547607422,
0.0863746851682663,
-0.24507839977741241,
-0.05254229158163071,
0.029582634568214417,
-0.07987554371356964,
-0.059696849435567856,
-0.005021876189857721,
0.038651250302791595,
-0.1062968373298645,
0.03389729931950569,
-0.012974300421774387,
0.100425586104393,
-0.08998123556375504,
0.038777437061071396,
-0.06201787665486336,
0.053062792867422104,
-0.0013010059483349323,
0.07941921800374985,
-0.23100095987319946,
0.10424447059631348,
0.038923539221286774,
0.04982062056660652,
-0.05577196553349495,
0.02037559077143669,
0.07558072358369827,
0.018198976293206215,
0.16960269212722778,
-0.032160479575395584,
0.00835003238171339,
-0.016314227133989334,
-0.07049760222434998,
-0.004198813810944557,
0.0439835786819458,
-0.12474752962589264,
0.0906430184841156,
-0.05121482163667679,
-0.03107357583940029,
-0.020288139581680298,
0.03056960739195347,
-0.03936387971043587,
-0.1649959832429886,
0.00718201557174325,
0.016728807240724564,
0.004004906862974167,
-0.017278168350458145,
-0.002807940822094679,
0.02284865453839302,
0.19824470579624176,
-0.09863795340061188,
-0.06152454391121864,
-0.12626385688781738,
0.006868230178952217,
0.10305946320295334,
-0.09728941321372986,
0.0016988819697871804,
0.0067728194408118725,
0.15662652254104614,
-0.05098941549658775,
-0.06867457926273346,
0.07084045559167862,
-0.05700360983610153,
-0.04875767230987549,
-0.04457259550690651,
0.09433525800704956,
0.059361737221479416,
0.06412710249423981,
0.04058494046330452,
0.06785962730646133,
-0.043619461357593536,
-0.08896210789680481,
-0.07363823801279068,
0.10284620523452759,
-0.00748594943434,
0.06649179756641388,
-0.12024558335542679,
-0.025812583044171333,
-0.10795668512582779,
0.05335456132888794,
0.19276244938373566,
0.20358410477638245,
-0.06644505262374878,
0.06162688508629799,
0.14249970018863678,
-0.08572462946176529,
-0.23348502814769745,
-0.06984947621822357,
0.03654225170612335,
0.052285753190517426,
0.08351205289363861,
-0.13462449610233307,
0.06964985281229019,
0.05530628561973572,
-0.009404370561242104,
-0.0703151747584343,
-0.24828164279460907,
-0.14230594038963318,
0.13822297751903534,
0.014103319495916367,
-0.042517803609371185,
-0.0870833620429039,
-0.059687916189432144,
-0.07691918313503265,
-0.011099872179329395,
0.06264819204807281,
-0.04031762480735779,
0.10068370401859283,
0.0451015830039978,
0.0385911799967289,
0.06611751019954681,
0.00894507858902216,
0.13422822952270508,
0.06924902647733688,
0.035742081701755524,
-0.03412491828203201,
-0.011928951367735863,
0.059329140931367874,
-0.08369627594947815,
0.15993009507656097,
-0.07651922851800919,
0.03731854259967804,
-0.11959176510572433,
-0.03556034341454506,
-0.03860867768526077,
0.022768869996070862,
-0.039125386625528336,
-0.057439472526311874,
-0.021674899384379387,
0.04626687988638878,
0.0905364602804184,
-0.0023885141126811504,
0.002449515974149108,
-0.09266949445009232,
0.01849813014268875,
0.1504906266927719,
0.1399509459733963,
0.03069789707660675,
-0.17264151573181152,
0.027653886005282402,
0.013803080655634403,
0.06228810176253319,
-0.13395678997039795,
0.07097112387418747,
0.07760631293058395,
-0.009734381921589375,
0.15097717940807343,
0.016140803694725037,
-0.0803101658821106,
-0.01379389502108097,
0.05289313197135925,
-0.06534041464328766,
-0.174154132604599,
-0.05329456925392151,
-0.00780889019370079,
-0.12311127781867981,
-0.054164715111255646,
0.14891605079174042,
-0.016534579917788506,
0.0069067454896867275,
0.04503486305475235,
0.03925640881061554,
-0.030504446476697922,
0.11785413324832916,
-0.02210710197687149,
0.059022579342126846,
-0.05529472604393959,
0.07394768297672272,
0.08832971751689911,
-0.09302345663309097,
0.0233035609126091,
0.11542510986328125,
-0.06666514277458191,
-0.09771336615085602,
-0.0714077278971672,
0.12568554282188416,
-0.08552692830562592,
0.03218565881252289,
-0.061699312180280685,
-0.06872178614139557,
0.019639229401946068,
0.02331184595823288,
0.05282193049788475,
0.05525378882884979,
-0.09754621982574463,
-0.019964121282100677,
-0.08935240656137466,
0.07937531173229218,
0.08907593041658401,
0.01655934751033783,
-0.027928978204727173,
0.07802154868841171,
-0.02947932854294777,
0.006841149181127548,
-0.028684331104159355,
-0.04316039755940437,
-0.06391895562410355,
-0.004087161272764206,
-0.041476618498563766,
-0.019082609564065933,
-0.10058379173278809,
-0.011338088661432266,
0.02848879247903824,
0.04361293092370033,
-0.008429431356489658,
-0.014161401428282261,
-0.05257195979356766,
-0.07321836054325104,
-0.04577163979411125,
0.10621334612369537,
-0.13238714635372162,
-0.0044069248251616955,
0.03603902831673622,
-0.09162435680627823,
0.08428218215703964,
-0.014736729674041271,
-0.026243198662996292,
0.028979778289794922,
-0.01817643642425537,
-0.04711281880736351,
0.026172734797000885,
0.0388607494533062,
0.0656699612736702,
-0.09551696479320526,
0.014363695867359638,
-0.050931598991155624,
0.021366098895668983,
0.0061632138676941395,
0.03877595067024231,
-0.09753090143203735,
0.019343670457601547,
-0.02595524676144123,
-0.014150922186672688,
-0.10545745491981506,
0.027558278292417526,
0.020095936954021454,
0.04167884960770607,
0.17001284658908844,
-0.05626079812645912,
0.06745671480894089,
-0.14090651273727417,
0.013209305703639984,
0.007280318532139063,
-0.050676535815000534,
0.09193269908428192,
-0.12409482151269913,
0.05712103843688965,
-0.04632262513041496,
0.061825916171073914,
-0.0024336560163646936,
0.055553942918777466,
0.0656421035528183,
0.050678402185440063,
0.009983797557651997,
0.016007965430617332,
0.05969718098640442,
0.057159971445798874,
-0.007187733426690102,
-0.05538580194115639,
0.04069733992218971,
0.012812284752726555,
-0.0037030589301139116,
0.04436836019158363,
0.08448923379182816,
0.03095731884241104,
0.09283656626939774,
0.07561028003692627,
0.0034232167527079582,
-0.09863634407520294,
0.0344419851899147,
-0.03436458855867386,
0.06885652989149094,
-0.029029689729213715,
0.06281258910894394,
0.16958938539028168,
-0.1551017314195633,
0.12012600153684616,
0.01705862022936344,
-0.06223062798380852,
-0.07549277693033218,
-0.1511317640542984,
-0.07278639823198318,
-0.052190542221069336,
-0.017206363379955292,
-0.12236565351486206,
-0.015160486102104187,
-0.018891239538788795,
0.006362909451127052,
-0.011532027274370193,
0.13711322844028473,
-0.11476434767246246,
-0.10128747671842575,
0.09500817954540253,
-0.02902846410870552,
0.0417989045381546,
0.013938957825303078,
0.030764130875468254,
0.0188588947057724,
0.06943170726299286,
0.04716206341981888,
0.04185209795832634,
0.03480597585439682,
0.030734535306692123,
-0.08664809912443161,
-0.08678097277879715,
-0.006950694601982832,
-0.010092131793498993,
-0.04464205726981163,
0.07338853180408478,
0.03492778539657593,
-0.08345409482717514,
-0.015144631266593933,
0.2345687299966812,
-0.09481919556856155,
-0.09723734110593796,
-0.17531827092170715,
0.2070256471633911,
0.04562058299779892,
0.03430529311299324,
-0.03122197464108467,
-0.09112242609262466,
-0.019777411594986916,
0.15548516809940338,
0.16933080554008484,
-0.09260101616382599,
0.012015570886433125,
0.026294616982340813,
0.017400603741407394,
-0.0011049507884308696,
0.02114156261086464,
0.052811235189437866,
0.2025461047887802,
-0.045204516500234604,
0.08703287690877914,
-0.015310895629227161,
-0.0592062771320343,
-0.06285561621189117,
0.10496681183576584,
0.04087289422750473,
0.031168127432465553,
-0.015053854323923588,
0.11832980811595917,
-0.035928674042224884,
-0.0747852474451065,
-0.05189366266131401,
-0.08043283224105835,
-0.11957252025604248,
-0.033761166036129,
0.020629065111279488,
0.034525349736213684,
0.1094464659690857,
0.02837396413087845,
-0.02878645993769169,
0.10600654035806656,
-0.015708820894360542,
-0.05819292739033699,
-0.024823348969221115,
0.023692408576607704,
-0.024410557001829147,
0.15826129913330078,
-0.0023233031388372183,
-0.036294806748628616,
0.12487610429525375,
0.008378133177757263,
-0.060945700854063034,
0.08381646126508713,
0.03911086171865463,
-0.07623662799596786,
0.13806310296058655,
0.08187410235404968,
-0.009910349734127522,
0.07978318631649017,
0.07552622258663177,
-0.20663611590862274,
0.061306845396757126,
-0.051315102726221085,
-0.03610329329967499,
-0.06269422918558121,
0.0573250837624073,
-0.07121574133634567,
0.11879786103963852,
0.1780928373336792,
-0.01939769834280014,
-0.010695922188460827,
-0.007660125382244587,
0.0035481546074151993,
0.026050465181469917,
0.06597400456666946,
-0.046494320034980774,
-0.08986823260784149,
-0.002844674978405237,
-0.009627539664506912,
0.0029634900856763124,
-0.2915664315223694,
-0.10878108441829681,
0.02052699588239193,
-0.014006366021931171,
-0.023155320435762405,
0.12745127081871033,
0.083023801445961,
-0.005904953461140394,
-0.033554546535015106,
-0.20050711929798126,
0.036437489092350006,
0.10549002885818481,
-0.12343566864728928,
-0.08773646503686905
] |
null | null |
sentence-transformers
|
# aditeyabaral/sentencetransformer-contrastive-roberta-base
This is a [sentence-transformers](https://www.SBERT.net) model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.
<!--- Describe your model here -->
## Usage (Sentence-Transformers)
Using this model becomes easy when you have [sentence-transformers](https://www.SBERT.net) installed:
```
pip install -U sentence-transformers
```
Then you can use the model like this:
```python
from sentence_transformers import SentenceTransformer
sentences = ["This is an example sentence", "Each sentence is converted"]
model = SentenceTransformer('aditeyabaral/sentencetransformer-contrastive-roberta-base')
embeddings = model.encode(sentences)
print(embeddings)
```
## Usage (HuggingFace Transformers)
Without [sentence-transformers](https://www.SBERT.net), you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.
```python
from transformers import AutoTokenizer, AutoModel
import torch
#Mean Pooling - Take attention mask into account for correct averaging
def mean_pooling(model_output, attention_mask):
token_embeddings = model_output[0] #First element of model_output contains all token embeddings
input_mask_expanded = attention_mask.unsqueeze(-1).expand(token_embeddings.size()).float()
return torch.sum(token_embeddings * input_mask_expanded, 1) / torch.clamp(input_mask_expanded.sum(1), min=1e-9)
# Sentences we want sentence embeddings for
sentences = ['This is an example sentence', 'Each sentence is converted']
# Load model from HuggingFace Hub
tokenizer = AutoTokenizer.from_pretrained('aditeyabaral/sentencetransformer-contrastive-roberta-base')
model = AutoModel.from_pretrained('aditeyabaral/sentencetransformer-contrastive-roberta-base')
# Tokenize sentences
encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt')
# Compute token embeddings
with torch.no_grad():
model_output = model(**encoded_input)
# Perform pooling. In this case, mean pooling.
sentence_embeddings = mean_pooling(model_output, encoded_input['attention_mask'])
print("Sentence embeddings:")
print(sentence_embeddings)
```
## Evaluation Results
<!--- Describe how your model was evaluated -->
For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: [https://seb.sbert.net](https://seb.sbert.net?model_name=aditeyabaral/sentencetransformer-contrastive-roberta-base)
## Training
The model was trained with the parameters:
**DataLoader**:
`torch.utils.data.dataloader.DataLoader` of length 9234 with parameters:
```
{'batch_size': 16, 'sampler': 'torch.utils.data.sampler.RandomSampler', 'batch_sampler': 'torch.utils.data.sampler.BatchSampler'}
```
**Loss**:
`sentence_transformers.losses.ContrastiveLoss.ContrastiveLoss` with parameters:
```
{'distance_metric': 'SiameseDistanceMetric.COSINE_DISTANCE', 'margin': 0.5, 'size_average': True}
```
Parameters of the fit()-Method:
```
{
"epochs": 10,
"evaluation_steps": 0,
"evaluator": "NoneType",
"max_grad_norm": 1,
"optimizer_class": "<class 'transformers.optimization.AdamW'>",
"optimizer_params": {
"lr": 2e-05
},
"scheduler": "WarmupLinear",
"steps_per_epoch": null,
"warmup_steps": 100,
"weight_decay": 0.01
}
```
## Full Model Architecture
```
SentenceTransformer(
(0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: RobertaModel
(1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False})
)
```
## Citing & Authors
<!--- Describe where people can find more information -->
|
{"tags": ["sentence-transformers", "feature-extraction", "sentence-similarity", "transformers"], "pipeline_tag": "sentence-similarity"}
|
sentence-similarity
|
aditeyabaral/sentencetransformer-contrastive-roberta-base
|
[
"sentence-transformers",
"pytorch",
"roberta",
"feature-extraction",
"sentence-similarity",
"transformers",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#sentence-transformers #pytorch #roberta #feature-extraction #sentence-similarity #transformers #endpoints_compatible #region-us
|
# aditeyabaral/sentencetransformer-contrastive-roberta-base
This is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.
## Usage (Sentence-Transformers)
Using this model becomes easy when you have sentence-transformers installed:
Then you can use the model like this:
## Usage (HuggingFace Transformers)
Without sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.
## Evaluation Results
For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL
## Training
The model was trained with the parameters:
DataLoader:
'URL.dataloader.DataLoader' of length 9234 with parameters:
Loss:
'sentence_transformers.losses.ContrastiveLoss.ContrastiveLoss' with parameters:
Parameters of the fit()-Method:
## Full Model Architecture
## Citing & Authors
|
[
"# aditeyabaral/sentencetransformer-contrastive-roberta-base\n\nThis is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.",
"## Usage (Sentence-Transformers)\n\nUsing this model becomes easy when you have sentence-transformers installed:\n\n\n\nThen you can use the model like this:",
"## Usage (HuggingFace Transformers)\nWithout sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.",
"## Evaluation Results\n\n\n\nFor an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL",
"## Training\nThe model was trained with the parameters:\n\nDataLoader:\n\n'URL.dataloader.DataLoader' of length 9234 with parameters:\n\n\nLoss:\n\n'sentence_transformers.losses.ContrastiveLoss.ContrastiveLoss' with parameters:\n \n\nParameters of the fit()-Method:",
"## Full Model Architecture",
"## Citing & Authors"
] |
[
"TAGS\n#sentence-transformers #pytorch #roberta #feature-extraction #sentence-similarity #transformers #endpoints_compatible #region-us \n",
"# aditeyabaral/sentencetransformer-contrastive-roberta-base\n\nThis is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.",
"## Usage (Sentence-Transformers)\n\nUsing this model becomes easy when you have sentence-transformers installed:\n\n\n\nThen you can use the model like this:",
"## Usage (HuggingFace Transformers)\nWithout sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.",
"## Evaluation Results\n\n\n\nFor an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL",
"## Training\nThe model was trained with the parameters:\n\nDataLoader:\n\n'URL.dataloader.DataLoader' of length 9234 with parameters:\n\n\nLoss:\n\n'sentence_transformers.losses.ContrastiveLoss.ContrastiveLoss' with parameters:\n \n\nParameters of the fit()-Method:",
"## Full Model Architecture",
"## Citing & Authors"
] |
[
43,
63,
38,
64,
29,
76,
5,
6
] |
[
"passage: TAGS\n#sentence-transformers #pytorch #roberta #feature-extraction #sentence-similarity #transformers #endpoints_compatible #region-us \n# aditeyabaral/sentencetransformer-contrastive-roberta-base\n\nThis is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.## Usage (Sentence-Transformers)\n\nUsing this model becomes easy when you have sentence-transformers installed:\n\n\n\nThen you can use the model like this:## Usage (HuggingFace Transformers)\nWithout sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.## Evaluation Results\n\n\n\nFor an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL## Training\nThe model was trained with the parameters:\n\nDataLoader:\n\n'URL.dataloader.DataLoader' of length 9234 with parameters:\n\n\nLoss:\n\n'sentence_transformers.losses.ContrastiveLoss.ContrastiveLoss' with parameters:\n \n\nParameters of the fit()-Method:## Full Model Architecture## Citing & Authors"
] |
[
-0.029313521459698677,
0.15913435816764832,
-0.006337138824164867,
0.04489193856716156,
0.11829474568367004,
0.02001447044312954,
0.12981878221035004,
0.08843887597322464,
-0.015264231711626053,
0.08997602015733719,
0.02497459389269352,
0.11372677236795425,
-0.00014875047781970352,
0.026351166889071465,
0.027499793097376823,
-0.27577656507492065,
0.01088726706802845,
-0.03982880711555481,
0.043970003724098206,
0.08111259341239929,
0.10759256780147552,
-0.0787271037697792,
0.06603983044624329,
0.021439848467707634,
-0.04464036971330643,
0.020625177770853043,
-0.022522641345858574,
-0.03829121217131615,
0.08812988549470901,
0.055812396109104156,
0.046897973865270615,
-0.005305991042405367,
0.030381327494978905,
-0.20186053216457367,
0.012134571559727192,
0.06352964788675308,
-0.017585041001439095,
0.06814710795879364,
0.04055077210068703,
-0.05144234746694565,
0.19566307961940765,
-0.05412590503692627,
0.06095132604241371,
0.06265922635793686,
-0.11414620280265808,
-0.08163173496723175,
-0.05193302407860756,
-0.009052880108356476,
0.1381690800189972,
0.10730201750993729,
-0.06564740091562271,
0.10523807257413864,
-0.06191156432032585,
0.07975778728723526,
0.08787502348423004,
-0.27901366353034973,
-0.02743474394083023,
0.015724223107099533,
0.05107003077864647,
0.021455105394124985,
-0.10888369381427765,
-0.0012307076249271631,
-0.016674183309078217,
0.03586304560303688,
0.08107532560825348,
-0.05429285764694214,
0.03356791287660599,
-0.002909202827140689,
-0.10954231023788452,
0.0013202197151258588,
0.18007859587669373,
0.036183323711156845,
-0.024284882470965385,
-0.1661103367805481,
-0.06487639993429184,
0.04899958148598671,
-0.04539189860224724,
-0.024266337975859642,
0.027084816247224808,
0.04669192433357239,
-0.020243007689714432,
-0.08722791075706482,
-0.1147175282239914,
-0.02347324788570404,
-0.073795385658741,
0.00780818797647953,
-0.01465090923011303,
-0.0431651696562767,
-0.004096833057701588,
0.07503147423267365,
-0.09931270033121109,
-0.11323659121990204,
-0.02535841055214405,
-0.017515230923891068,
-0.12119411677122116,
-0.0259284395724535,
-0.049212027341127396,
-0.10743535310029984,
0.027411090210080147,
0.16145172715187073,
0.09198669344186783,
0.006928322371095419,
-0.00862883310765028,
0.04836762696504593,
0.023732434958219528,
0.17958085238933563,
-0.04698063060641289,
-0.10071051865816116,
-0.048966482281684875,
0.021276600658893585,
0.002182310214266181,
-0.01307167299091816,
-0.04181281849741936,
-0.007036797236651182,
0.014722524210810661,
0.05914594978094101,
0.05781722068786621,
0.05424904450774193,
-0.0538540817797184,
-0.029130559414625168,
0.065846748650074,
-0.12315148115158081,
0.024417823180556297,
0.015618463978171349,
-0.060397919267416,
0.013513632118701935,
0.08898859471082687,
-0.015975236892700195,
-0.06766580045223236,
0.03184184804558754,
-0.1105542778968811,
-0.019086942076683044,
-0.06366026401519775,
-0.14128641784191132,
-0.015307254157960415,
-0.009283524006605148,
-0.031832851469516754,
-0.09895235300064087,
-0.13952982425689697,
-0.0796244665980339,
0.026782387867569923,
-0.04236418753862381,
0.0032725969795137644,
-0.12428514659404755,
-0.0020436060149222612,
0.007860354147851467,
-0.011454659514129162,
-0.05054448917508125,
-0.011166302487254143,
0.019516168162226677,
-0.02701660804450512,
0.058149613440036774,
0.04192257672548294,
0.046860501170158386,
-0.12370765954256058,
0.026741739362478256,
-0.09716728329658508,
0.1493862420320511,
-0.029148267582058907,
0.0814908891916275,
-0.14981253445148468,
0.006565338000655174,
0.010557986795902252,
0.06620825827121735,
0.01584886573255062,
0.13867194950580597,
-0.21083290874958038,
-0.06746320426464081,
0.1363215446472168,
-0.05152454599738121,
-0.09575754404067993,
0.09854589402675629,
-0.03136998042464256,
0.15033966302871704,
0.12257618457078934,
0.11136689782142639,
0.1383722573518753,
-0.04170137271285057,
-0.01527799665927887,
0.021839788183569908,
-0.05390740558505058,
0.09641361981630325,
0.044921062886714935,
-0.07625415176153183,
0.1009458675980568,
-0.003330083331093192,
-0.0502140074968338,
0.00724664144217968,
0.0027469920460134745,
-0.062051352113485336,
0.025979386642575264,
-0.046166110783815384,
0.05928768962621689,
-0.03109866753220558,
-0.0014881643000990152,
0.014256151393055916,
-0.106551393866539,
0.09653781354427338,
0.07611354440450668,
-0.0721425861120224,
0.0063108582980930805,
-0.08948205411434174,
0.00798412598669529,
-0.012588288635015488,
0.01349981501698494,
-0.20714865624904633,
-0.09738408029079437,
0.004037171136587858,
0.018453894183039665,
0.1096654087305069,
0.08086325228214264,
0.05582116171717644,
0.027352232486009598,
0.000783552648499608,
-0.018524885177612305,
0.041253115981817245,
-0.018977632746100426,
-0.09451892971992493,
-0.08218414336442947,
0.0022896642331033945,
-0.02937055379152298,
0.05007025972008705,
-0.1349039077758789,
0.0203031525015831,
0.016302473843097687,
0.022993700578808784,
0.04778924211859703,
-0.029757220298051834,
0.001098878332413733,
-0.029350215569138527,
-0.016066286712884903,
-0.02068476937711239,
0.0451602004468441,
0.009611266665160656,
-0.14161613583564758,
0.08139252662658691,
-0.21486090123653412,
-0.125160813331604,
0.06581494957208633,
0.025008082389831543,
-0.06621752679347992,
-0.037362322211265564,
-0.02395177073776722,
-0.003781900741159916,
-0.02823411114513874,
-0.06424585729837418,
0.18374058604240417,
0.09304940700531006,
0.11484801769256592,
-0.028807949274778366,
-0.026781726628541946,
-0.051026780158281326,
-0.04811419919133186,
-0.032411132007837296,
0.11059410125017166,
-0.035720840096473694,
-0.11410406976938248,
0.045881420373916626,
0.08240407705307007,
-0.07764625549316406,
0.10995307564735413,
0.008250348269939423,
-0.07783766090869904,
-0.05842449888586998,
0.027315326035022736,
0.03980855271220207,
-0.010744789615273476,
-0.08551473915576935,
0.0015751213068142533,
0.05879869684576988,
0.013860804960131645,
0.015383047983050346,
-0.06696904450654984,
0.04557434096932411,
0.058734092861413956,
0.004135582130402327,
0.07802866399288177,
0.03155551105737686,
-0.010498834773898125,
0.0613584965467453,
0.021295025944709778,
0.011292342096567154,
-0.05328639969229698,
-0.04761777073144913,
-0.11122844368219376,
0.17625008523464203,
-0.11883247643709183,
-0.20311519503593445,
-0.17214202880859375,
-0.021097427234053612,
-0.04276623949408531,
0.030488677322864532,
0.07915287464857101,
-0.072184719145298,
-0.08120518177747726,
-0.07352075725793839,
0.08056596666574478,
0.0704970583319664,
-0.0506780706346035,
-0.006682281848043203,
0.047142017632722855,
0.013860518112778664,
-0.12048972398042679,
-0.010326149873435497,
-0.009332496672868729,
-0.08374606817960739,
-0.006130008492618799,
-0.00892734806984663,
0.06951991468667984,
0.12496230751276016,
0.048793062567710876,
-0.01669110357761383,
0.0028520619962364435,
0.21316970884799957,
-0.07761306315660477,
0.05153648182749748,
0.2048845738172531,
-0.0047902329824864864,
0.06507837772369385,
0.09716751426458359,
0.025604022666811943,
-0.06496687978506088,
0.06334204971790314,
0.06815850734710693,
-0.022488927468657494,
-0.15168368816375732,
-0.1018034890294075,
-0.0669536218047142,
0.008702150546014309,
0.14019837975502014,
0.028455516323447227,
-0.0033745577093213797,
0.06575525552034378,
-0.010873774997889996,
0.010775293223559856,
0.06908150762319565,
0.11621914058923721,
0.12943245470523834,
-0.028852662071585655,
0.10903598368167877,
-0.05353565514087677,
-0.06676620990037918,
0.06390790641307831,
-0.006064172368496656,
0.14013870060443878,
0.015253235585987568,
0.17361250519752502,
0.0784568190574646,
-0.022812968119978905,
-0.02906159684062004,
0.08519585430622101,
-0.04241299256682396,
0.008107074536383152,
-0.02329866960644722,
-0.09213029593229294,
-0.0346485935151577,
0.08862505108118057,
0.06912898272275925,
-0.028225120157003403,
-0.03628828376531601,
0.06237112730741501,
0.10817838460206985,
0.12226299196481705,
0.09264013171195984,
-0.25220316648483276,
-0.06114143505692482,
0.033503707498311996,
-0.07584314793348312,
-0.07199963927268982,
-0.006790852639824152,
0.05748117342591286,
-0.11471112072467804,
0.023123297840356827,
-0.01197564136236906,
0.10067760199308395,
-0.0932898223400116,
0.033537089824676514,
-0.05091185122728348,
0.04298262670636177,
-0.0039380318485200405,
0.08364003896713257,
-0.20817871391773224,
0.09565205872058868,
0.036840446293354034,
0.05062106251716614,
-0.050164900720119476,
0.03010023944079876,
0.057397834956645966,
-0.00758526474237442,
0.16917364299297333,
-0.033768076449632645,
0.007734782062470913,
-0.009663979522883892,
-0.06786864995956421,
-0.003845816943794489,
0.04919976741075516,
-0.13108249008655548,
0.09902139753103256,
-0.050957102328538895,
-0.02447601966559887,
-0.019098224118351936,
-0.0004791271057911217,
-0.026221858337521553,
-0.16718792915344238,
0.0019053863361477852,
0.022443391382694244,
0.02510971948504448,
-0.011668955907225609,
0.006087332498282194,
0.03588346019387245,
0.21662281453609467,
-0.12915164232254028,
-0.06806173920631409,
-0.12004910409450531,
0.0011575807584449649,
0.10861379653215408,
-0.1033671572804451,
0.012217224575579166,
-0.0032392351422458887,
0.14440929889678955,
-0.043803565204143524,
-0.05942750722169876,
0.05780767649412155,
-0.05720091611146927,
-0.0548359751701355,
-0.04017309471964836,
0.09606830030679703,
0.05755724757909775,
0.050119925290346146,
0.0383884459733963,
0.06467080861330032,
-0.06099894642829895,
-0.0995563492178917,
-0.08343580365180969,
0.08527770638465881,
0.009106962941586971,
0.064185731112957,
-0.11578358709812164,
-0.07298652827739716,
-0.10351431369781494,
0.044344138354063034,
0.20204295217990875,
0.19594813883304596,
-0.07208441942930222,
0.05886077880859375,
0.13411210477352142,
-0.08460244536399841,
-0.2227950245141983,
-0.0702485591173172,
0.03740929812192917,
0.0483158715069294,
0.0927107110619545,
-0.14023426175117493,
0.07245589792728424,
0.0599796399474144,
-0.010518666356801987,
-0.0488441102206707,
-0.25411534309387207,
-0.1385446935892105,
0.13850685954093933,
0.024831702932715416,
-0.045498739928007126,
-0.09390155971050262,
-0.05910422280430794,
-0.07640925794839859,
-0.044122323393821716,
0.08018568158149719,
-0.02629084140062332,
0.09598806500434875,
0.03292061388492584,
0.07228308916091919,
0.0653204545378685,
0.00003346158337080851,
0.1246575117111206,
0.07647690176963806,
0.03456622362136841,
-0.04698830470442772,
-0.019705625250935555,
0.08014096319675446,
-0.08861613273620605,
0.1670844405889511,
-0.07016321271657944,
0.03804195299744606,
-0.1337466984987259,
-0.030771153047680855,
-0.04156774654984474,
0.015563391149044037,
-0.04365900903940201,
-0.057111956179142,
-0.015645790845155716,
0.03585611656308174,
0.08246570080518723,
-0.0027105973567813635,
0.016558939591050148,
-0.08799280226230621,
0.03251134231686592,
0.14834430813789368,
0.12446343898773193,
0.08584675192832947,
-0.17705665528774261,
0.020780054852366447,
0.013848494738340378,
0.053785890340805054,
-0.1566585898399353,
0.06787480413913727,
0.07384215295314789,
-0.0029832578729838133,
0.1444421261548996,
0.016527753323316574,
-0.0807860940694809,
0.009531459771096706,
0.06784863024950027,
-0.07475677132606506,
-0.14343175292015076,
-0.030963363125920296,
-0.01722564361989498,
-0.1480623185634613,
-0.05007407069206238,
0.14824050664901733,
-0.010703686624765396,
0.008505277335643768,
0.04091746360063553,
0.04325387626886368,
-0.037716127932071686,
0.1259821653366089,
-0.00544056948274374,
0.05517003685235977,
-0.05368627980351448,
0.06819303333759308,
0.08079623430967331,
-0.060989949852228165,
0.023752553388476372,
0.11532878130674362,
-0.06824688613414764,
-0.09832897037267685,
-0.0701771229505539,
0.0963057354092598,
-0.10586397349834442,
0.030425280332565308,
-0.05079539492726326,
-0.04907352849841118,
0.014502040110528469,
0.021880745887756348,
0.045645687729120255,
0.0667910948395729,
-0.0957716554403305,
-0.030415691435337067,
-0.08744250982999802,
0.07972774654626846,
0.07961370050907135,
0.006306653842329979,
-0.030027395114302635,
0.08305948227643967,
-0.026618249714374542,
0.014372453093528748,
-0.01793232187628746,
-0.04365656152367592,
-0.06841100752353668,
0.002252514474093914,
-0.043488942086696625,
-0.016811570152640343,
-0.10648873448371887,
-0.006998901721090078,
0.02390301786363125,
0.05129491910338402,
-0.013924933969974518,
-0.017774468287825584,
-0.053451117128133774,
-0.06922945380210876,
-0.04654449224472046,
0.10078109055757523,
-0.14994360506534576,
0.008955538272857666,
0.04004824534058571,
-0.08428993076086044,
0.08219651877880096,
-0.019671235233545303,
-0.012710833922028542,
0.033761780709028244,
-0.022231727838516235,
-0.029714461416006088,
0.023837676271796227,
0.02982795424759388,
0.0637291669845581,
-0.07180143892765045,
0.012874736450612545,
-0.050266627222299576,
0.02476571500301361,
0.0024282264057546854,
0.048984721302986145,
-0.11740335822105408,
0.03496609628200531,
-0.02087259292602539,
-0.007166064344346523,
-0.10698901116847992,
0.036791421473026276,
0.007423699833452702,
0.04090559482574463,
0.1685541868209839,
-0.051702406257390976,
0.07603901624679565,
-0.13064339756965637,
0.006685316096991301,
0.012811479158699512,
-0.03687697649002075,
0.07469993084669113,
-0.1109500452876091,
0.04886919632554054,
-0.04563242942094803,
0.04019486531615257,
0.0005820919759571552,
0.0638471394777298,
0.05582951009273529,
0.04820697009563446,
0.0027956401463598013,
0.0021004355512559414,
0.0629081055521965,
0.046667736023664474,
-0.012646789662539959,
-0.054321784526109695,
0.033784881234169006,
0.004140475764870644,
-0.04643107205629349,
0.050465233623981476,
0.07308682054281235,
0.026762189343571663,
0.08278273046016693,
0.07074619084596634,
-0.001859293901361525,
-0.11880207806825638,
0.02637803740799427,
-0.04304015263915062,
0.06010531261563301,
-0.03221140056848526,
0.0471588633954525,
0.16422927379608154,
-0.14846491813659668,
0.10809632390737534,
0.029692959040403366,
-0.057209037244319916,
-0.08249805122613907,
-0.15495018661022186,
-0.06576285511255264,
-0.028887389227747917,
-0.005231593735516071,
-0.1227443739771843,
-0.001565794344060123,
-0.006229524966329336,
-0.0011633681133389473,
-0.01638180762529373,
0.13109135627746582,
-0.10807810723781586,
-0.11291385442018509,
0.076511450111866,
-0.023235177621245384,
0.043444547802209854,
0.02422848343849182,
0.05499090999364853,
0.01993805170059204,
0.06877728551626205,
0.06032686308026314,
0.055851344019174576,
0.030641958117485046,
0.02640800178050995,
-0.09335135668516159,
-0.07975180447101593,
-0.020127227529883385,
0.014063210226595402,
-0.03765511512756348,
0.0870630294084549,
0.043211985379457474,
-0.07975698262453079,
-0.015417586080729961,
0.2377733439207077,
-0.09699487686157227,
-0.08378387987613678,
-0.17509722709655762,
0.21446499228477478,
0.05063321441411972,
0.033083271235227585,
-0.04187928885221481,
-0.09492985159158707,
-0.003625775221735239,
0.1656886786222458,
0.1585782915353775,
-0.07714542001485825,
0.016540298238396645,
0.02713237702846527,
0.019199509173631668,
0.0012213606387376785,
0.03142330422997475,
0.04775814339518547,
0.2255162000656128,
-0.04353945329785347,
0.08790790289640427,
-0.010424518026411533,
-0.054468657821416855,
-0.05189819261431694,
0.0837768092751503,
0.017099397256970406,
0.030540578067302704,
-0.015333871357142925,
0.12509556114673615,
-0.04054926708340645,
-0.0838015228509903,
-0.03152806684374809,
-0.07718487083911896,
-0.11706208437681198,
-0.0216582790017128,
0.026083417236804962,
0.030501214787364006,
0.10645677149295807,
0.024141134694218636,
-0.03568609803915024,
0.12253914028406143,
-0.019079899415373802,
-0.06940224021673203,
-0.03463844582438469,
0.020523477345705032,
-0.006939902435988188,
0.15666863322257996,
-0.006111886817961931,
-0.039360471069812775,
0.11970571428537369,
0.002545785391703248,
-0.06670767813920975,
0.10096748918294907,
0.035757437348365784,
-0.06374680995941162,
0.1303696185350418,
0.07167906314134598,
-0.01604916900396347,
0.08755628019571304,
0.07613413035869598,
-0.17939303815364838,
0.052233729511499405,
-0.04102978855371475,
-0.020370671525597572,
-0.07372332364320755,
0.04859774187207222,
-0.07300129532814026,
0.1272079050540924,
0.18437348306179047,
-0.02772512473165989,
-0.010805287398397923,
-0.007239812053740025,
0.011807559058070183,
0.021503062918782234,
0.06511484086513519,
-0.049471575766801834,
-0.08171966671943665,
-0.006470184773206711,
0.004555437248200178,
0.0016795892734080553,
-0.3169232904911041,
-0.0931277722120285,
0.013409804552793503,
-0.012583252973854542,
-0.02966979891061783,
0.11876314133405685,
0.07422438263893127,
0.0055720922537148,
-0.03223663195967674,
-0.20478519797325134,
0.030653417110443115,
0.10920297354459763,
-0.1309458166360855,
-0.09604526311159134
] |
null | null |
sentence-transformers
|
# aditeyabaral/sentencetransformer-distilbert-base-cased
This is a [sentence-transformers](https://www.SBERT.net) model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.
<!--- Describe your model here -->
## Usage (Sentence-Transformers)
Using this model becomes easy when you have [sentence-transformers](https://www.SBERT.net) installed:
```
pip install -U sentence-transformers
```
Then you can use the model like this:
```python
from sentence_transformers import SentenceTransformer
sentences = ["This is an example sentence", "Each sentence is converted"]
model = SentenceTransformer('aditeyabaral/sentencetransformer-distilbert-base-cased')
embeddings = model.encode(sentences)
print(embeddings)
```
## Usage (HuggingFace Transformers)
Without [sentence-transformers](https://www.SBERT.net), you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.
```python
from transformers import AutoTokenizer, AutoModel
import torch
#Mean Pooling - Take attention mask into account for correct averaging
def mean_pooling(model_output, attention_mask):
token_embeddings = model_output[0] #First element of model_output contains all token embeddings
input_mask_expanded = attention_mask.unsqueeze(-1).expand(token_embeddings.size()).float()
return torch.sum(token_embeddings * input_mask_expanded, 1) / torch.clamp(input_mask_expanded.sum(1), min=1e-9)
# Sentences we want sentence embeddings for
sentences = ['This is an example sentence', 'Each sentence is converted']
# Load model from HuggingFace Hub
tokenizer = AutoTokenizer.from_pretrained('aditeyabaral/sentencetransformer-distilbert-base-cased')
model = AutoModel.from_pretrained('aditeyabaral/sentencetransformer-distilbert-base-cased')
# Tokenize sentences
encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt')
# Compute token embeddings
with torch.no_grad():
model_output = model(**encoded_input)
# Perform pooling. In this case, mean pooling.
sentence_embeddings = mean_pooling(model_output, encoded_input['attention_mask'])
print("Sentence embeddings:")
print(sentence_embeddings)
```
## Evaluation Results
<!--- Describe how your model was evaluated -->
For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: [https://seb.sbert.net](https://seb.sbert.net?model_name=aditeyabaral/sentencetransformer-distilbert-base-cased)
## Training
The model was trained with the parameters:
**DataLoader**:
`torch.utils.data.dataloader.DataLoader` of length 9234 with parameters:
```
{'batch_size': 16, 'sampler': 'torch.utils.data.sampler.RandomSampler', 'batch_sampler': 'torch.utils.data.sampler.BatchSampler'}
```
**Loss**:
`sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss`
Parameters of the fit()-Method:
```
{
"epochs": 10,
"evaluation_steps": 0,
"evaluator": "NoneType",
"max_grad_norm": 1,
"optimizer_class": "<class 'transformers.optimization.AdamW'>",
"optimizer_params": {
"lr": 2e-05
},
"scheduler": "WarmupLinear",
"steps_per_epoch": null,
"warmup_steps": 100,
"weight_decay": 0.01
}
```
## Full Model Architecture
```
SentenceTransformer(
(0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: DistilBertModel
(1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False})
)
```
## Citing & Authors
<!--- Describe where people can find more information -->
|
{"tags": ["sentence-transformers", "feature-extraction", "sentence-similarity", "transformers"], "pipeline_tag": "sentence-similarity"}
|
sentence-similarity
|
aditeyabaral/sentencetransformer-distilbert-base-cased
|
[
"sentence-transformers",
"pytorch",
"distilbert",
"feature-extraction",
"sentence-similarity",
"transformers",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#sentence-transformers #pytorch #distilbert #feature-extraction #sentence-similarity #transformers #endpoints_compatible #region-us
|
# aditeyabaral/sentencetransformer-distilbert-base-cased
This is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.
## Usage (Sentence-Transformers)
Using this model becomes easy when you have sentence-transformers installed:
Then you can use the model like this:
## Usage (HuggingFace Transformers)
Without sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.
## Evaluation Results
For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL
## Training
The model was trained with the parameters:
DataLoader:
'URL.dataloader.DataLoader' of length 9234 with parameters:
Loss:
'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss'
Parameters of the fit()-Method:
## Full Model Architecture
## Citing & Authors
|
[
"# aditeyabaral/sentencetransformer-distilbert-base-cased\n\nThis is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.",
"## Usage (Sentence-Transformers)\n\nUsing this model becomes easy when you have sentence-transformers installed:\n\n\n\nThen you can use the model like this:",
"## Usage (HuggingFace Transformers)\nWithout sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.",
"## Evaluation Results\n\n\n\nFor an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL",
"## Training\nThe model was trained with the parameters:\n\nDataLoader:\n\n'URL.dataloader.DataLoader' of length 9234 with parameters:\n\n\nLoss:\n\n'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss' \n\nParameters of the fit()-Method:",
"## Full Model Architecture",
"## Citing & Authors"
] |
[
"TAGS\n#sentence-transformers #pytorch #distilbert #feature-extraction #sentence-similarity #transformers #endpoints_compatible #region-us \n",
"# aditeyabaral/sentencetransformer-distilbert-base-cased\n\nThis is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.",
"## Usage (Sentence-Transformers)\n\nUsing this model becomes easy when you have sentence-transformers installed:\n\n\n\nThen you can use the model like this:",
"## Usage (HuggingFace Transformers)\nWithout sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.",
"## Evaluation Results\n\n\n\nFor an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL",
"## Training\nThe model was trained with the parameters:\n\nDataLoader:\n\n'URL.dataloader.DataLoader' of length 9234 with parameters:\n\n\nLoss:\n\n'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss' \n\nParameters of the fit()-Method:",
"## Full Model Architecture",
"## Citing & Authors"
] |
[
44,
63,
38,
64,
29,
78,
5,
6
] |
[
"passage: TAGS\n#sentence-transformers #pytorch #distilbert #feature-extraction #sentence-similarity #transformers #endpoints_compatible #region-us \n# aditeyabaral/sentencetransformer-distilbert-base-cased\n\nThis is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.## Usage (Sentence-Transformers)\n\nUsing this model becomes easy when you have sentence-transformers installed:\n\n\n\nThen you can use the model like this:## Usage (HuggingFace Transformers)\nWithout sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.## Evaluation Results\n\n\n\nFor an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL## Training\nThe model was trained with the parameters:\n\nDataLoader:\n\n'URL.dataloader.DataLoader' of length 9234 with parameters:\n\n\nLoss:\n\n'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss' \n\nParameters of the fit()-Method:## Full Model Architecture## Citing & Authors"
] |
[
-0.046746522188186646,
0.1405700147151947,
-0.0059985010884702206,
0.047824542969465256,
0.11225137114524841,
0.026609737426042557,
0.14218978583812714,
0.08865829557180405,
0.016082148998975754,
0.10252051055431366,
0.018299173563718796,
0.09666856378316879,
0.003986857831478119,
0.030990252271294594,
0.022644687443971634,
-0.24829137325286865,
0.0230727456510067,
-0.05043899267911911,
0.04400848597288132,
0.0786566212773323,
0.10305573046207428,
-0.08618488907814026,
0.07305455207824707,
0.022053474560379982,
-0.052397165447473526,
-0.0003666631819214672,
-0.024038787931203842,
-0.022307708859443665,
0.08184180408716202,
0.05355115607380867,
0.050980519503355026,
0.007105500437319279,
0.02949114702641964,
-0.20389269292354584,
0.011615223251283169,
0.05785415321588516,
-0.007948556914925575,
0.05743607506155968,
0.02351824752986431,
-0.041536301374435425,
0.17468300461769104,
-0.05781315639615059,
0.06047334149479866,
0.061746567487716675,
-0.11727184057235718,
-0.03825124725699425,
-0.06395325064659119,
-0.024885743856430054,
0.1509765386581421,
0.1067313477396965,
-0.06489346921443939,
0.10508766770362854,
-0.05717545375227928,
0.06863605976104736,
0.0821746438741684,
-0.26716485619544983,
-0.026957932859659195,
0.034408994019031525,
0.052773188799619675,
0.026393214240670204,
-0.09849301725625992,
0.010606965981423855,
-0.008951326832175255,
0.02199331484735012,
0.06327540427446365,
-0.05722634121775627,
0.06075553968548775,
-0.014712224714457989,
-0.10864188522100449,
-0.009813512675464153,
0.16847124695777893,
0.03993423655629158,
-0.033180270344018936,
-0.17393948137760162,
-0.056207090616226196,
0.0307978056371212,
-0.05619029700756073,
-0.034941431134939194,
0.029009001329541206,
0.04249412938952446,
0.006180899683386087,
-0.0776907280087471,
-0.12106934189796448,
-0.0211512241512537,
-0.06831957399845123,
0.0007946689147502184,
-0.02394631691277027,
-0.047809600830078125,
0.011023973114788532,
0.06846258789300919,
-0.09923780709505081,
-0.1126522645354271,
-0.04241186007857323,
-0.01137600652873516,
-0.1252748668193817,
-0.038152825087308884,
-0.050924867391586304,
-0.098819300532341,
0.04184291884303093,
0.15385206043720245,
0.09067626297473907,
0.010004406794905663,
-0.004236659500747919,
0.04963034763932228,
0.02645774930715561,
0.18728488683700562,
-0.033902592957019806,
-0.09070312231779099,
-0.043484386056661606,
0.025847313925623894,
-0.0015541263855993748,
-0.02343006804585457,
-0.04911419376730919,
0.003075603162869811,
0.022264236584305763,
0.06974207609891891,
0.055456314235925674,
0.04831188917160034,
-0.06638117879629135,
-0.0251501202583313,
0.07287104427814484,
-0.1141546368598938,
0.033458732068538666,
0.013122783973813057,
-0.0437198281288147,
0.013304179534316063,
0.08154729008674622,
-0.004154498223215342,
-0.06182814761996269,
0.02378513477742672,
-0.11350568383932114,
-0.012463954277336597,
-0.06338353455066681,
-0.13556639850139618,
-0.003433378180488944,
-0.022571878507733345,
-0.0375722274184227,
-0.10416582971811295,
-0.12439490109682083,
-0.09113100916147232,
0.011352667585015297,
-0.036060404032468796,
0.004037501756101847,
-0.13383182883262634,
-0.0027223534416407347,
0.018115686252713203,
-0.008787637576460838,
-0.0714082419872284,
-0.016246482729911804,
-0.0004303180321585387,
-0.03477793186903,
0.05132241174578667,
0.03545966371893883,
0.040775615721940994,
-0.12282299995422363,
0.035615209490060806,
-0.11500366032123566,
0.1467173993587494,
-0.04023667797446251,
0.09140978753566742,
-0.14602287113666534,
0.01471792533993721,
0.025364182889461517,
0.056332189589738846,
0.011322923004627228,
0.14926867187023163,
-0.21991346776485443,
-0.059248775243759155,
0.1197097972035408,
-0.05172959715127945,
-0.10535842180252075,
0.09997780621051788,
-0.02321101725101471,
0.1309681385755539,
0.11392652243375778,
0.09583867341279984,
0.14567162096500397,
-0.0588749498128891,
-0.025946272537112236,
0.02153877541422844,
-0.033755313605070114,
0.10449827462434769,
0.043214697390794754,
-0.0733000859618187,
0.10306108742952347,
-0.00592664023861289,
-0.059426140040159225,
-0.005489840172231197,
-0.000802780210506171,
-0.0627879723906517,
0.023095929995179176,
-0.038657836616039276,
0.05254479497671127,
-0.03495629504323006,
-0.009636582806706429,
0.011236891150474548,
-0.10144426673650742,
0.08793744444847107,
0.07611475139856339,
-0.06949030607938766,
0.011125952005386353,
-0.10019496083259583,
0.02381325699388981,
-0.003105144016444683,
0.010481422767043114,
-0.1989024430513382,
-0.12665246427059174,
0.01777036488056183,
0.015430937521159649,
0.11489347368478775,
0.07673901319503784,
0.0466485358774662,
0.03385050222277641,
0.010415827855467796,
-0.01638551615178585,
0.03981786221265793,
-0.02013949118554592,
-0.09939104318618774,
-0.08856860548257828,
0.008051687851548195,
-0.03009306639432907,
0.08670838922262192,
-0.1185058206319809,
0.015958309173583984,
0.03676562383770943,
0.030571434646844864,
0.046497974544763565,
-0.037237007170915604,
-0.0003703269176185131,
-0.012244527228176594,
-0.01799132488667965,
-0.023672059178352356,
0.04531398415565491,
0.008528897538781166,
-0.14100977778434753,
0.08416855335235596,
-0.22432498633861542,
-0.1372411549091339,
0.06996971368789673,
0.030743811279535294,
-0.06025049462914467,
-0.05315250903367996,
-0.01614341139793396,
0.0034083875361829996,
-0.0176372230052948,
-0.06647548824548721,
0.1971137672662735,
0.08971846848726273,
0.1020364984869957,
-0.030804404988884926,
-0.024458056315779686,
-0.048248495906591415,
-0.03888242691755295,
-0.03239962086081505,
0.09792905300855637,
-0.022738859057426453,
-0.11153732985258102,
0.0398731492459774,
0.08208650350570679,
-0.06714434176683426,
0.092379130423069,
0.00019414488633628935,
-0.07993284612894058,
-0.05860945209860802,
0.03390162065625191,
0.04924729838967323,
-0.010190530680119991,
-0.09798555821180344,
0.00422762893140316,
0.06147884577512741,
0.011504394933581352,
0.012019195593893528,
-0.06548240035772324,
0.052898045629262924,
0.05584716796875,
0.00015356490621343255,
0.08578235656023026,
0.0284249447286129,
-0.011681449599564075,
0.05005098879337311,
0.0225734431296587,
0.012243715114891529,
-0.06484052538871765,
-0.04967193678021431,
-0.11568331718444824,
0.18275614082813263,
-0.12285810708999634,
-0.22624057531356812,
-0.17096646130084991,
-0.0016092391451820731,
-0.04054218903183937,
0.03797559440135956,
0.07704216986894608,
-0.05784910172224045,
-0.08345234394073486,
-0.0788259208202362,
0.07320113480091095,
0.08714582026004791,
-0.032298509031534195,
-0.0069222282618284225,
0.034489117562770844,
0.009202299639582634,
-0.12273287028074265,
-0.018898537382483482,
-0.003260485827922821,
-0.07407785207033157,
-0.005726787261664867,
-0.0074047208763659,
0.0826028510928154,
0.13268402218818665,
0.05285615846514702,
-0.023525699973106384,
-0.0006580771296285093,
0.21568013727664948,
-0.08986826986074448,
0.04383980110287666,
0.19013279676437378,
-0.0035665060859173536,
0.06114041805267334,
0.09646471589803696,
0.023385077714920044,
-0.07117956131696701,
0.05635746568441391,
0.06251510232686996,
-0.021735595539212227,
-0.14053334295749664,
-0.11089058965444565,
-0.06942792236804962,
0.011751013807952404,
0.13315697014331818,
0.027295313775539398,
-0.002906567882746458,
0.05881476402282715,
-0.015966689214110374,
0.007133134175091982,
0.061685651540756226,
0.10978217422962189,
0.12987945973873138,
-0.022821879014372826,
0.10522881895303726,
-0.0476512685418129,
-0.056670892983675,
0.06482519954442978,
-0.0001829142711358145,
0.13319408893585205,
0.030328018590807915,
0.18720415234565735,
0.08025842159986496,
-0.029104243963956833,
-0.02194400690495968,
0.07867275923490524,
-0.02693885751068592,
0.0023042457178235054,
-0.01683647930622101,
-0.09295539557933807,
-0.031824931502342224,
0.09544750303030014,
0.08104729652404785,
-0.04160015657544136,
-0.044555824249982834,
0.07867997884750366,
0.10864700376987457,
0.12174256891012192,
0.10575716197490692,
-0.26069051027297974,
-0.05877263471484184,
0.02829112485051155,
-0.06926573067903519,
-0.06531763076782227,
-0.01462706457823515,
0.0432717464864254,
-0.11200836300849915,
0.025510618463158607,
-0.0002845238195732236,
0.10065100342035294,
-0.1045636385679245,
0.0201979111880064,
-0.05348803102970123,
0.02882136031985283,
-0.003530048532411456,
0.0707864761352539,
-0.20117942988872528,
0.08970871567726135,
0.037375904619693756,
0.04789392650127411,
-0.04493448883295059,
0.032284926623106,
0.0580751895904541,
0.019754301756620407,
0.1675766557455063,
-0.02957097254693508,
-0.0016544366953894496,
-0.030115246772766113,
-0.08300280570983887,
-0.0036480179987847805,
0.054961610585451126,
-0.13744401931762695,
0.10455825179815292,
-0.04196764528751373,
-0.021860601380467415,
-0.022803355008363724,
-0.0004246341122779995,
-0.037494026124477386,
-0.16071967780590057,
0.01428389921784401,
0.026420870795845985,
0.021408729255199432,
-0.016066867858171463,
-0.000451806205091998,
0.019633684307336807,
0.21680176258087158,
-0.13137850165367126,
-0.060183119028806686,
-0.12507109344005585,
-0.008540372364223003,
0.10201115906238556,
-0.09860727190971375,
0.017889032140374184,
-0.007152565289288759,
0.15894310176372528,
-0.04883313551545143,
-0.04595952108502388,
0.06259580701589584,
-0.059764184057712555,
-0.05926770716905594,
-0.04519648104906082,
0.10572735965251923,
0.05939937382936478,
0.05362372472882271,
0.048249226063489914,
0.06391235440969467,
-0.044509127736091614,
-0.10560940951108932,
-0.08428863435983658,
0.10707157105207443,
0.005409163422882557,
0.06792178750038147,
-0.09573096036911011,
-0.05269067734479904,
-0.10359131544828415,
0.04894394800066948,
0.19471095502376556,
0.2157551497220993,
-0.06669094413518906,
0.054787490516901016,
0.11797711998224258,
-0.08595304936170578,
-0.23285752534866333,
-0.057168055325746536,
0.04108121618628502,
0.054997123777866364,
0.058359503746032715,
-0.12573537230491638,
0.07864277809858322,
0.05888081341981888,
-0.014492555521428585,
-0.03709288686513901,
-0.24822579324245453,
-0.15114520490169525,
0.13521814346313477,
0.012490046210587025,
-0.053404390811920166,
-0.08656775951385498,
-0.061560142785310745,
-0.07816878706216812,
-0.05071673542261124,
0.07661422342061996,
-0.021367445588111877,
0.09666705131530762,
0.03705403208732605,
0.06431825459003448,
0.0641317367553711,
0.0054206084460020065,
0.13623835146427155,
0.08411933481693268,
0.03851063549518585,
-0.043731071054935455,
-0.012585512362420559,
0.08085743337869644,
-0.09301936626434326,
0.16143380105495453,
-0.07877720892429352,
0.0410459078848362,
-0.1303146481513977,
-0.03366616740822792,
-0.024562809616327286,
0.023584535345435143,
-0.04245208948850632,
-0.05440223962068558,
-0.029202401638031006,
0.02907053381204605,
0.07312457263469696,
0.0010973119642585516,
0.0020342604257166386,
-0.09440910816192627,
0.009241207502782345,
0.15548387169837952,
0.1412995606660843,
0.04335911571979523,
-0.16232270002365112,
0.027902260422706604,
0.010290115140378475,
0.05298345163464546,
-0.1432226449251175,
0.0661366730928421,
0.08506710082292557,
-0.0050188396126031876,
0.1415162980556488,
0.014408604241907597,
-0.0786912590265274,
0.014109020121395588,
0.07453514635562897,
-0.0675014853477478,
-0.1594577580690384,
-0.026779767125844955,
-0.008847976103425026,
-0.14466774463653564,
-0.045068446546792984,
0.16177335381507874,
-0.00750013766810298,
0.012641767039895058,
0.042019955813884735,
0.03674757480621338,
-0.032701849937438965,
0.1271599978208542,
-0.021532349288463593,
0.06042201444506645,
-0.055530156940221786,
0.06319673359394073,
0.07606780529022217,
-0.08108314871788025,
0.013679303228855133,
0.11124835908412933,
-0.07870495319366455,
-0.09951688349246979,
-0.07065220177173615,
0.11009114235639572,
-0.1170964315533638,
0.0318077877163887,
-0.04653771221637726,
-0.054743554443120956,
0.014281434006989002,
0.031091004610061646,
0.05370507389307022,
0.07608482986688614,
-0.08459345996379852,
-0.029589997604489326,
-0.07934358716011047,
0.07419300079345703,
0.06890162080526352,
0.02125943824648857,
-0.03402206674218178,
0.07990089803934097,
-0.03500651940703392,
0.0024382425472140312,
-0.019882073625922203,
-0.040513619780540466,
-0.05446219444274902,
-0.006854409817606211,
-0.04520519822835922,
-0.009939068928360939,
-0.10417807847261429,
-0.007218382321298122,
0.037297509610652924,
0.041204020380973816,
-0.014826943166553974,
-0.011997569352388382,
-0.04817235469818115,
-0.07420431822538376,
-0.05815904587507248,
0.1027064099907875,
-0.13786116242408752,
0.004592170938849449,
0.04445186257362366,
-0.08558571338653564,
0.09407804906368256,
-0.022942008450627327,
-0.00852245558053255,
0.034588735550642014,
-0.03017560765147209,
-0.021605994552373886,
0.04211443290114403,
0.03869976848363876,
0.06499706208705902,
-0.07902045547962189,
0.008357228711247444,
-0.04775749146938324,
0.021949000656604767,
0.009984155185520649,
0.0491916798055172,
-0.10135839879512787,
0.01974291168153286,
-0.01634821854531765,
-0.0034539494663476944,
-0.1048293262720108,
0.032088398933410645,
0.0324900783598423,
0.04001564532518387,
0.15988458693027496,
-0.04581573233008385,
0.07663840800523758,
-0.12272576242685318,
0.006312401499599218,
0.015924550592899323,
-0.023082222789525986,
0.07305777072906494,
-0.1235690712928772,
0.0562126561999321,
-0.047609832137823105,
0.04795508086681366,
-0.008318734355270863,
0.07097936421632767,
0.052931636571884155,
0.05470281466841698,
0.015136701986193657,
0.0170492734760046,
0.06147807464003563,
0.06100369989871979,
-0.01701485924422741,
-0.053422972559928894,
0.03620496764779091,
0.013125722296535969,
-0.04741552472114563,
0.06975436210632324,
0.06420715153217316,
0.03245098888874054,
0.09631353616714478,
0.07280655205249786,
-0.0004173803608864546,
-0.12386544048786163,
0.0256953164935112,
-0.06690137088298798,
0.06394988298416138,
-0.03641694411635399,
0.06339620798826218,
0.15446417033672333,
-0.15651780366897583,
0.09761761873960495,
0.01537585910409689,
-0.061218928545713425,
-0.06880024075508118,
-0.1447768658399582,
-0.07385525852441788,
-0.04940672963857651,
-0.0077510373666882515,
-0.12683360278606415,
-0.00868165772408247,
-0.0213363878428936,
0.003557767253369093,
-0.015441331081092358,
0.14183606207370758,
-0.12098605185747147,
-0.09969957172870636,
0.08062490820884705,
-0.0239797905087471,
0.047562748193740845,
0.025866813957691193,
0.031643591821193695,
0.023285651579499245,
0.060800619423389435,
0.06209215521812439,
0.04978861287236214,
0.045597247779369354,
0.03747063875198364,
-0.08874376118183136,
-0.08226451277732849,
-0.014983879402279854,
0.007836369797587395,
-0.04004841297864914,
0.0775115117430687,
0.04297637939453125,
-0.0701940655708313,
-0.008861245587468147,
0.21045702695846558,
-0.08486614376306534,
-0.08488334715366364,
-0.17605070769786835,
0.19742940366268158,
0.037537723779678345,
0.04401135444641113,
-0.041282862424850464,
-0.08939258754253387,
-0.0035725990310311317,
0.14140623807907104,
0.1443973332643509,
-0.08185999095439911,
0.01614440232515335,
0.022062689065933228,
0.01627485454082489,
-0.015106778591871262,
0.02962595783174038,
0.04135973006486893,
0.21780739724636078,
-0.04313724488019943,
0.07331033796072006,
-0.0033418922685086727,
-0.06268928945064545,
-0.046064432710409164,
0.07873552292585373,
0.01273463573306799,
0.035618629306554794,
-0.019695991650223732,
0.12201236933469772,
-0.034951094537973404,
-0.07094742357730865,
-0.013916661962866783,
-0.08430927246809006,
-0.11306031793355942,
-0.017238080501556396,
0.012529966421425343,
0.020584069192409515,
0.10497388988733292,
0.020614085718989372,
-0.047121454030275345,
0.11847619712352753,
-0.025648711249232292,
-0.06844118237495422,
-0.03586420789361,
0.009900898672640324,
-0.01150435209274292,
0.1588304489850998,
-0.006200715899467468,
-0.02657461352646351,
0.12317507714033127,
0.01615634374320507,
-0.06918448954820633,
0.0886620357632637,
0.044104307889938354,
-0.06974378228187561,
0.13614171743392944,
0.0696229562163353,
-0.019245445728302002,
0.08921819925308228,
0.0843503400683403,
-0.180502787232399,
0.05370207503437996,
-0.060338519513607025,
-0.017565324902534485,
-0.07901682704687119,
0.03464261814951897,
-0.0807633250951767,
0.12937265634536743,
0.16745734214782715,
-0.02371104247868061,
-0.01291975099593401,
0.012006825767457485,
0.01352313905954361,
0.018502935767173767,
0.06764434278011322,
-0.04992041364312172,
-0.09794559329748154,
0.008707672357559204,
0.017476266250014305,
0.002597342012450099,
-0.2857886552810669,
-0.10123705118894577,
0.013679323717951775,
-0.0026579697150737047,
-0.017619630321860313,
0.13276517391204834,
0.06447112560272217,
-0.007818270474672318,
-0.03412424400448799,
-0.23810796439647675,
0.039915986359119415,
0.11445984244346619,
-0.11935745924711227,
-0.10347459465265274
] |
null | null |
sentence-transformers
|
# aditeyabaral/sentencetransformer-distilbert-hinglish-big
This is a [sentence-transformers](https://www.SBERT.net) model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.
<!--- Describe your model here -->
## Usage (Sentence-Transformers)
Using this model becomes easy when you have [sentence-transformers](https://www.SBERT.net) installed:
```
pip install -U sentence-transformers
```
Then you can use the model like this:
```python
from sentence_transformers import SentenceTransformer
sentences = ["This is an example sentence", "Each sentence is converted"]
model = SentenceTransformer('aditeyabaral/sentencetransformer-distilbert-hinglish-big')
embeddings = model.encode(sentences)
print(embeddings)
```
## Usage (HuggingFace Transformers)
Without [sentence-transformers](https://www.SBERT.net), you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.
```python
from transformers import AutoTokenizer, AutoModel
import torch
#Mean Pooling - Take attention mask into account for correct averaging
def mean_pooling(model_output, attention_mask):
token_embeddings = model_output[0] #First element of model_output contains all token embeddings
input_mask_expanded = attention_mask.unsqueeze(-1).expand(token_embeddings.size()).float()
return torch.sum(token_embeddings * input_mask_expanded, 1) / torch.clamp(input_mask_expanded.sum(1), min=1e-9)
# Sentences we want sentence embeddings for
sentences = ['This is an example sentence', 'Each sentence is converted']
# Load model from HuggingFace Hub
tokenizer = AutoTokenizer.from_pretrained('aditeyabaral/sentencetransformer-distilbert-hinglish-big')
model = AutoModel.from_pretrained('aditeyabaral/sentencetransformer-distilbert-hinglish-big')
# Tokenize sentences
encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt')
# Compute token embeddings
with torch.no_grad():
model_output = model(**encoded_input)
# Perform pooling. In this case, mean pooling.
sentence_embeddings = mean_pooling(model_output, encoded_input['attention_mask'])
print("Sentence embeddings:")
print(sentence_embeddings)
```
## Evaluation Results
<!--- Describe how your model was evaluated -->
For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: [https://seb.sbert.net](https://seb.sbert.net?model_name=aditeyabaral/sentencetransformer-distilbert-hinglish-big)
## Training
The model was trained with the parameters:
**DataLoader**:
`torch.utils.data.dataloader.DataLoader` of length 4617 with parameters:
```
{'batch_size': 32, 'sampler': 'torch.utils.data.sampler.RandomSampler', 'batch_sampler': 'torch.utils.data.sampler.BatchSampler'}
```
**Loss**:
`sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss`
Parameters of the fit()-Method:
```
{
"epochs": 10,
"evaluation_steps": 0,
"evaluator": "NoneType",
"max_grad_norm": 1,
"optimizer_class": "<class 'transformers.optimization.AdamW'>",
"optimizer_params": {
"lr": 2e-05
},
"scheduler": "WarmupLinear",
"steps_per_epoch": null,
"warmup_steps": 100,
"weight_decay": 0.01
}
```
## Full Model Architecture
```
SentenceTransformer(
(0): Transformer({'max_seq_length': 128, 'do_lower_case': False}) with Transformer model: DistilBertModel
(1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False})
)
```
## Citing & Authors
<!--- Describe where people can find more information -->
|
{"tags": ["sentence-transformers", "feature-extraction", "sentence-similarity", "transformers"], "pipeline_tag": "sentence-similarity"}
|
sentence-similarity
|
aditeyabaral/sentencetransformer-distilbert-hinglish-big
|
[
"sentence-transformers",
"pytorch",
"distilbert",
"feature-extraction",
"sentence-similarity",
"transformers",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#sentence-transformers #pytorch #distilbert #feature-extraction #sentence-similarity #transformers #endpoints_compatible #region-us
|
# aditeyabaral/sentencetransformer-distilbert-hinglish-big
This is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.
## Usage (Sentence-Transformers)
Using this model becomes easy when you have sentence-transformers installed:
Then you can use the model like this:
## Usage (HuggingFace Transformers)
Without sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.
## Evaluation Results
For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL
## Training
The model was trained with the parameters:
DataLoader:
'URL.dataloader.DataLoader' of length 4617 with parameters:
Loss:
'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss'
Parameters of the fit()-Method:
## Full Model Architecture
## Citing & Authors
|
[
"# aditeyabaral/sentencetransformer-distilbert-hinglish-big\n\nThis is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.",
"## Usage (Sentence-Transformers)\n\nUsing this model becomes easy when you have sentence-transformers installed:\n\n\n\nThen you can use the model like this:",
"## Usage (HuggingFace Transformers)\nWithout sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.",
"## Evaluation Results\n\n\n\nFor an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL",
"## Training\nThe model was trained with the parameters:\n\nDataLoader:\n\n'URL.dataloader.DataLoader' of length 4617 with parameters:\n\n\nLoss:\n\n'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss' \n\nParameters of the fit()-Method:",
"## Full Model Architecture",
"## Citing & Authors"
] |
[
"TAGS\n#sentence-transformers #pytorch #distilbert #feature-extraction #sentence-similarity #transformers #endpoints_compatible #region-us \n",
"# aditeyabaral/sentencetransformer-distilbert-hinglish-big\n\nThis is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.",
"## Usage (Sentence-Transformers)\n\nUsing this model becomes easy when you have sentence-transformers installed:\n\n\n\nThen you can use the model like this:",
"## Usage (HuggingFace Transformers)\nWithout sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.",
"## Evaluation Results\n\n\n\nFor an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL",
"## Training\nThe model was trained with the parameters:\n\nDataLoader:\n\n'URL.dataloader.DataLoader' of length 4617 with parameters:\n\n\nLoss:\n\n'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss' \n\nParameters of the fit()-Method:",
"## Full Model Architecture",
"## Citing & Authors"
] |
[
44,
63,
38,
64,
29,
78,
5,
6
] |
[
"passage: TAGS\n#sentence-transformers #pytorch #distilbert #feature-extraction #sentence-similarity #transformers #endpoints_compatible #region-us \n# aditeyabaral/sentencetransformer-distilbert-hinglish-big\n\nThis is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.## Usage (Sentence-Transformers)\n\nUsing this model becomes easy when you have sentence-transformers installed:\n\n\n\nThen you can use the model like this:## Usage (HuggingFace Transformers)\nWithout sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.## Evaluation Results\n\n\n\nFor an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL## Training\nThe model was trained with the parameters:\n\nDataLoader:\n\n'URL.dataloader.DataLoader' of length 4617 with parameters:\n\n\nLoss:\n\n'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss' \n\nParameters of the fit()-Method:## Full Model Architecture## Citing & Authors"
] |
[
-0.049218278378248215,
0.1033191829919815,
-0.006312766578048468,
0.04641738533973694,
0.11152619123458862,
0.02377561293542385,
0.13578233122825623,
0.08989156037569046,
0.014280537143349648,
0.10241477191448212,
0.021421734243631363,
0.08400843292474747,
0.012742012739181519,
0.027975741773843765,
0.01901540532708168,
-0.25817593932151794,
0.013967328704893589,
-0.0501788966357708,
0.029681025072932243,
0.07518401741981506,
0.11007670313119888,
-0.09107589721679688,
0.07257257401943207,
0.03119395673274994,
-0.04563469439744949,
-0.005995439365506172,
-0.028028402477502823,
-0.024126097559928894,
0.07633822411298752,
0.061812080442905426,
0.04378342255949974,
0.007243949454277754,
0.020659545436501503,
-0.18327656388282776,
0.013948368839919567,
0.056300580501556396,
-0.005036385729908943,
0.06279802322387695,
0.03326547145843506,
-0.03326142206788063,
0.1568223237991333,
-0.06745384633541107,
0.05357872322201729,
0.06897477805614471,
-0.11064180731773376,
-0.055297188460826874,
-0.06500355899333954,
-0.028282221406698227,
0.14252856373786926,
0.10748818516731262,
-0.06370274722576141,
0.10749278217554092,
-0.0666184276342392,
0.06859400123357773,
0.09240452945232391,
-0.2617395222187042,
-0.032976556569337845,
0.04284001886844635,
0.05822113901376724,
0.03539186343550682,
-0.09911131113767624,
0.00429724995046854,
-0.009904712438583374,
0.02756461128592491,
0.0594472736120224,
-0.04534357041120529,
0.09066284447908401,
-0.014287730678915977,
-0.1081576943397522,
-0.004346140660345554,
0.1676708459854126,
0.04135040193796158,
-0.029089627787470818,
-0.18782831728458405,
-0.05447608605027199,
0.015974557027220726,
-0.06379160284996033,
-0.04340726509690285,
0.030603187158703804,
0.04031262546777725,
0.01082812249660492,
-0.08472608774900436,
-0.12206427752971649,
-0.012364082038402557,
-0.07831789553165436,
-0.0006491359672509134,
-0.027335669845342636,
-0.04671875759959221,
-0.0027983863838016987,
0.053168345242738724,
-0.10029539465904236,
-0.11654879152774811,
-0.05443738400936127,
-0.013745488598942757,
-0.12245047837495804,
-0.04883340001106262,
-0.048544783145189285,
-0.11881217360496521,
0.043251339346170425,
0.13747596740722656,
0.07564640045166016,
0.006701339967548847,
-0.01692250557243824,
0.05254432559013367,
0.02850400283932686,
0.1924230009317398,
-0.04239486902952194,
-0.08131229132413864,
-0.03904028981924057,
0.023191630840301514,
-0.0027071537915617228,
-0.01628657802939415,
-0.04499983415007591,
-0.0019654736388474703,
0.01715581677854061,
0.06655610352754593,
0.0531153529882431,
0.04141689091920853,
-0.06273461133241653,
-0.03044884279370308,
0.07846664637327194,
-0.10981539636850357,
0.037806134670972824,
0.01795056089758873,
-0.04950335621833801,
0.03234758973121643,
0.06297944486141205,
-0.009611349552869797,
-0.062202293425798416,
0.035103313624858856,
-0.10960191488265991,
-0.00761650875210762,
-0.05929594114422798,
-0.1343030482530594,
-0.0009737025247886777,
0.004572698380798101,
-0.03622690588235855,
-0.10123173147439957,
-0.12686456739902496,
-0.08769356459379196,
0.010077822022140026,
-0.04876747354865074,
0.004454585257917643,
-0.1219492256641388,
-0.006251487415283918,
0.017814580351114273,
-0.004814963787794113,
-0.061933089047670364,
-0.015266490168869495,
0.0005339113413356245,
-0.03834285959601402,
0.056117717176675797,
0.040834199637174606,
0.04337696731090546,
-0.12354213744401932,
0.027276404201984406,
-0.13710036873817444,
0.1523459553718567,
-0.03734505549073219,
0.09824558347463608,
-0.15185949206352234,
0.007082583382725716,
0.010171659290790558,
0.052514929324388504,
0.007385019678622484,
0.14916136860847473,
-0.2142437845468521,
-0.0653524175286293,
0.11995897442102432,
-0.04791397973895073,
-0.11218655854463577,
0.1183520257472992,
-0.01670585758984089,
0.1284160166978836,
0.10895786434412003,
0.09403828531503677,
0.14579510688781738,
-0.046974990516901016,
-0.027178779244422913,
0.025885630398988724,
-0.042265087366104126,
0.10779257118701935,
0.049400702118873596,
-0.06844399124383926,
0.10401739925146103,
-0.009554749354720116,
-0.04154770076274872,
-0.0034655265044420958,
-0.0027669307310134172,
-0.06380083411931992,
0.022329634055495262,
-0.032863516360521317,
0.04899745434522629,
-0.03563098609447479,
0.001371426391415298,
0.006230467464774847,
-0.09787068516016006,
0.0628271996974945,
0.0782574713230133,
-0.05810868367552757,
0.01056480873376131,
-0.10075820982456207,
0.0245962031185627,
-0.00406454736366868,
0.01328996941447258,
-0.1892981082201004,
-0.11087993532419205,
0.019331881776452065,
0.01227391604334116,
0.11023732274770737,
0.0729825347661972,
0.056282490491867065,
0.0308658666908741,
0.008460151962935925,
-0.012193717062473297,
0.05139903724193573,
-0.028773950412869453,
-0.10091666132211685,
-0.09113778173923492,
0.008816136978566647,
-0.02876298502087593,
0.0907566174864769,
-0.12320391088724136,
0.011473585851490498,
0.048306047916412354,
0.02674620971083641,
0.053426362574100494,
-0.03028995916247368,
0.0014447334688156843,
-0.019862409681081772,
-0.017864828929305077,
-0.020345579832792282,
0.03864017128944397,
0.004737295676022768,
-0.14325080811977386,
0.08371485024690628,
-0.2066752016544342,
-0.12615403532981873,
0.07201169431209564,
0.03500223532319069,
-0.05630956217646599,
-0.065861776471138,
-0.016054881736636162,
0.0030397980008274317,
-0.0217338465154171,
-0.06824376434087753,
0.19089211523532867,
0.09563042968511581,
0.10489176958799362,
-0.03158426657319069,
-0.020319988951086998,
-0.042400456964969635,
-0.03200749307870865,
-0.033588435500860214,
0.10240773111581802,
-0.033004000782966614,
-0.13098497688770294,
0.042759958654642105,
0.08482879400253296,
-0.06448036432266235,
0.09897209703922272,
-0.0017269867239519954,
-0.07741319388151169,
-0.0560731403529644,
0.04284362867474556,
0.05558423697948456,
-0.018696662038564682,
-0.0955064445734024,
0.016849374398589134,
0.06587286293506622,
0.013704923912882805,
0.006293349899351597,
-0.05374985560774803,
0.05315064638853073,
0.061428166925907135,
0.0017936324002221227,
0.09189596027135849,
0.027186082676053047,
-0.0015333304181694984,
0.05427633225917816,
0.024880414828658104,
0.012955564074218273,
-0.05779094249010086,
-0.048844367265701294,
-0.1086333766579628,
0.18021532893180847,
-0.11713331192731857,
-0.2245159149169922,
-0.16796144843101501,
0.0035999685060232878,
-0.043281059712171555,
0.03167777135968208,
0.07908972352743149,
-0.06526634842157364,
-0.08225493878126144,
-0.07706212252378464,
0.08504939079284668,
0.0837632343173027,
-0.03273577243089676,
-0.0036512380465865135,
0.03331121429800987,
0.0075906505808234215,
-0.12452808767557144,
-0.01775074563920498,
-0.0004983970429748297,
-0.07722271978855133,
-0.0012666252441704273,
-0.003391710575670004,
0.06643056124448776,
0.1187291368842125,
0.053730130195617676,
-0.019791854545474052,
-0.005725768860429525,
0.21777762472629547,
-0.09901095181703568,
0.0419904999434948,
0.19024859368801117,
0.015689637511968613,
0.058000192046165466,
0.09451545029878616,
0.031152943149209023,
-0.06362586468458176,
0.051117103546857834,
0.053436554968357086,
-0.020454978570342064,
-0.14145220816135406,
-0.1164797767996788,
-0.07625602185726166,
0.025024184957146645,
0.1348428726196289,
0.030163023620843887,
0.0005992276128381491,
0.06173273175954819,
-0.024026157334446907,
0.011053073219954967,
0.06445205211639404,
0.10370287299156189,
0.12378662824630737,
-0.014029276557266712,
0.10203922539949417,
-0.04337773844599724,
-0.057587746530771255,
0.07187744230031967,
-0.009143099188804626,
0.12771272659301758,
0.024243852123618126,
0.20846574008464813,
0.06424619257450104,
-0.030878474935889244,
-0.016971096396446228,
0.0850098580121994,
-0.030055943876504898,
-0.012720044702291489,
-0.016227271407842636,
-0.09444937855005264,
-0.02131214365363121,
0.0970434844493866,
0.09197010099887848,
-0.030730439350008965,
-0.042194586247205734,
0.08191472291946411,
0.1133604422211647,
0.1319282203912735,
0.0983828529715538,
-0.2597965896129608,
-0.061522476375103,
0.023308008909225464,
-0.07173129916191101,
-0.06308145821094513,
-0.010275200009346008,
0.03993377834558487,
-0.1122913733124733,
0.030888758599758148,
-0.0068743242882192135,
0.0989602655172348,
-0.08788632601499557,
0.025864021852612495,
-0.06298219412565231,
0.03950691968202591,
-0.007071704603731632,
0.07159718126058578,
-0.21783500909805298,
0.08065967261791229,
0.03944609686732292,
0.044361285865306854,
-0.059161264449357986,
0.03293664753437042,
0.06274416297674179,
0.013758999295532703,
0.16742466390132904,
-0.030454957857728004,
0.01581246592104435,
-0.02522248961031437,
-0.08422739058732986,
-0.007590854074805975,
0.050252143293619156,
-0.13142605125904083,
0.09565194696187973,
-0.041936762630939484,
-0.023082267493009567,
-0.025416793301701546,
0.010213585570454597,
-0.02884201519191265,
-0.16037864983081818,
0.013619959354400635,
0.028874032199382782,
0.022526543587446213,
-0.017129989340901375,
-0.001364834955893457,
0.012554576620459557,
0.21050117909908295,
-0.12199840694665909,
-0.05873747915029526,
-0.12795069813728333,
0.005740506108850241,
0.10033392906188965,
-0.09882775694131851,
0.01302117295563221,
-0.004531491547822952,
0.16192513704299927,
-0.048957984894514084,
-0.04544058442115784,
0.07039003074169159,
-0.06382793188095093,
-0.0627007782459259,
-0.03911004588007927,
0.10758331418037415,
0.052777763456106186,
0.06386514753103256,
0.04641345515847206,
0.05791211873292923,
-0.04930087551474571,
-0.0994439497590065,
-0.08318908512592316,
0.09636474400758743,
-0.0032736051362007856,
0.07771036028862,
-0.0794738382101059,
-0.04545785114169121,
-0.096809022128582,
0.04476084187626839,
0.21090558171272278,
0.21976418793201447,
-0.06818646937608719,
0.059968240559101105,
0.1123657375574112,
-0.07010821253061295,
-0.23498030006885529,
-0.05446826294064522,
0.04934219270944595,
0.05869027227163315,
0.07326779514551163,
-0.12031575292348862,
0.08892861753702164,
0.07101278007030487,
-0.016284091398119926,
-0.03850346431136131,
-0.2525629699230194,
-0.14852234721183777,
0.12360083311796188,
0.0015897502889856696,
-0.046298686414957047,
-0.0863591805100441,
-0.057427193969488144,
-0.0732111856341362,
-0.03605235740542412,
0.07133384048938751,
-0.023929579183459282,
0.09606809169054031,
0.03570220619440079,
0.0603904202580452,
0.06688397377729416,
0.0019435436697676778,
0.13540199398994446,
0.0658237636089325,
0.035809848457574844,
-0.04375263676047325,
-0.0011074542999267578,
0.0819145143032074,
-0.08783595263957977,
0.163271963596344,
-0.08153671771287918,
0.03196614980697632,
-0.12310963124036789,
-0.03246426582336426,
-0.027479318901896477,
0.022943813353776932,
-0.040666479617357254,
-0.05253632739186287,
-0.03355874493718147,
0.03528785705566406,
0.07389968633651733,
-0.0014916923828423023,
0.0239577479660511,
-0.0838417187333107,
0.014782477170228958,
0.16536962985992432,
0.1523013859987259,
0.016070162877440453,
-0.1654682159423828,
0.02889167331159115,
0.015329807996749878,
0.05709310248494148,
-0.13415738940238953,
0.07423096150159836,
0.0831059068441391,
-0.007369383238255978,
0.13797292113304138,
0.01964542083442211,
-0.0752440094947815,
0.014567111618816853,
0.07271455228328705,
-0.05286102369427681,
-0.19305488467216492,
-0.037470974028110504,
0.004150160122662783,
-0.14496421813964844,
-0.046438440680503845,
0.16004952788352966,
-0.008933615870773792,
0.01343303918838501,
0.0446917749941349,
0.04079944267868996,
-0.03184916824102402,
0.13180580735206604,
-0.03150308132171631,
0.06424132734537125,
-0.06357351690530777,
0.052813570946455,
0.08756221830844879,
-0.08707885444164276,
0.011482476256787777,
0.12809228897094727,
-0.06765343248844147,
-0.10428902506828308,
-0.07040999829769135,
0.09601540118455887,
-0.10383503884077072,
0.04045088216662407,
-0.039992641657590866,
-0.06703091412782669,
0.01754385232925415,
0.013492838479578495,
0.0508844293653965,
0.0657326877117157,
-0.08276435732841492,
-0.028288409113883972,
-0.08742523193359375,
0.07731078565120697,
0.05910621955990791,
0.02281208522617817,
-0.03602832555770874,
0.07543433457612991,
-0.04500369727611542,
0.002955262316390872,
-0.020993102341890335,
-0.03872702270746231,
-0.05509868264198303,
-0.007011875510215759,
-0.047205980867147446,
-0.010323849506676197,
-0.10559351742267609,
-0.006501719821244478,
0.03500840812921524,
0.045824430882930756,
-0.014200101606547832,
-0.011579249054193497,
-0.05088558420538902,
-0.0731888934969902,
-0.05672658979892731,
0.109440378844738,
-0.13022945821285248,
0.009891525842249393,
0.03694486618041992,
-0.08049630373716354,
0.09977079182863235,
-0.02405601367354393,
-0.012953774072229862,
0.019540514796972275,
-0.02765580825507641,
-0.03664551302790642,
0.039849769324064255,
0.03950252756476402,
0.07218192517757416,
-0.08077980577945709,
0.009444190189242363,
-0.04264914244413376,
0.014111979864537716,
0.014151427894830704,
0.0414164736866951,
-0.09859074652194977,
0.02863042615354061,
-0.026974957436323166,
0.0046723950654268265,
-0.10890097916126251,
0.029542136937379837,
0.01574586145579815,
0.03484347462654114,
0.16293571889400482,
-0.044901348650455475,
0.07658356428146362,
-0.13056421279907227,
0.006796658504754305,
0.009722153656184673,
-0.03106883354485035,
0.07453359663486481,
-0.11865796893835068,
0.06094302237033844,
-0.044084977358579636,
0.04242125153541565,
-0.005445142742246389,
0.06089971587061882,
0.06062573939561844,
0.053288377821445465,
0.014434383250772953,
0.00999942421913147,
0.05470355227589607,
0.06499253213405609,
-0.019396139308810234,
-0.046076029539108276,
0.037010129541158676,
0.02443569339811802,
-0.021657228469848633,
0.07266350090503693,
0.06523378193378448,
0.014391424134373665,
0.09200750291347504,
0.0679040476679802,
-0.00848270021378994,
-0.12082133442163467,
0.02619783580303192,
-0.06450674682855606,
0.0660981610417366,
-0.034710295498371124,
0.05148857459425926,
0.1663479208946228,
-0.15311339497566223,
0.0995449647307396,
0.004826471675187349,
-0.055967506021261215,
-0.06997187435626984,
-0.14947780966758728,
-0.07768363505601883,
-0.06019517034292221,
-0.011616447940468788,
-0.12751910090446472,
-0.0059491172432899475,
-0.019221624359488487,
0.014148342423141003,
-0.0158664770424366,
0.1455281376838684,
-0.11719286441802979,
-0.1003357395529747,
0.08621039241552353,
-0.028665287420153618,
0.04961724579334259,
0.023586174473166466,
0.02932751178741455,
0.017655394971370697,
0.054103460162878036,
0.0643921047449112,
0.05330883711576462,
0.041274525225162506,
0.03435366228222847,
-0.09077885746955872,
-0.09259222447872162,
-0.013003414496779442,
0.005559728480875492,
-0.045670632272958755,
0.07855240255594254,
0.03807087987661362,
-0.06541797518730164,
-0.012414145283401012,
0.21138733625411987,
-0.08558333665132523,
-0.07813411206007004,
-0.16929692029953003,
0.1890384554862976,
0.04628850892186165,
0.03883248195052147,
-0.04359345883131027,
-0.09143572300672531,
-0.008046048693358898,
0.1377372443675995,
0.15866945683956146,
-0.0847984030842781,
0.018590794876217842,
0.021392200142145157,
0.01481650210916996,
-0.02339966408908367,
0.03089299239218235,
0.04651669040322304,
0.22085732221603394,
-0.047526340931653976,
0.09860113263130188,
-0.0016759522259235382,
-0.06242949515581131,
-0.059448953717947006,
0.07635559141635895,
0.019002646207809448,
0.04313761740922928,
-0.008212950080633163,
0.12116372585296631,
-0.035225819796323776,
-0.06220098212361336,
-0.02431228756904602,
-0.09034087508916855,
-0.11477695405483246,
-0.029026756063103676,
0.01638306863605976,
0.029933765530586243,
0.108830526471138,
0.02310253493487835,
-0.0494871512055397,
0.09436846524477005,
-0.029612885788083076,
-0.06680627912282944,
-0.033194445073604584,
0.018098851665854454,
-0.0238952599465847,
0.15757876634597778,
-0.0012807634193450212,
-0.038864895701408386,
0.12486040592193604,
0.012577471323311329,
-0.07788959890604019,
0.07360483705997467,
0.043308280408382416,
-0.07296936213970184,
0.13791289925575256,
0.08069080114364624,
-0.017919620499014854,
0.0909314677119255,
0.08800092339515686,
-0.20169687271118164,
0.04682808741927147,
-0.05792882665991783,
-0.008646942675113678,
-0.07282470911741257,
0.038326069712638855,
-0.07123720645904541,
0.12271493673324585,
0.17345967888832092,
-0.025003744289278984,
-0.01636962778866291,
0.006919191684573889,
0.015281381085515022,
0.023567067459225655,
0.0796223059296608,
-0.055704858154058456,
-0.10652481019496918,
0.00834620650857687,
0.01518675871193409,
0.005896477494388819,
-0.2824505567550659,
-0.10218604654073715,
0.01308612059801817,
-0.0009298617951571941,
-0.02245687134563923,
0.12492185086011887,
0.07775736600160599,
-0.010636381804943085,
-0.02950812317430973,
-0.22905081510543823,
0.032704684883356094,
0.10941357910633087,
-0.11730300635099411,
-0.09347262233495712
] |
null | null |
sentence-transformers
|
# aditeyabaral/sentencetransformer-distilbert-hinglish-small
This is a [sentence-transformers](https://www.SBERT.net) model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.
<!--- Describe your model here -->
## Usage (Sentence-Transformers)
Using this model becomes easy when you have [sentence-transformers](https://www.SBERT.net) installed:
```
pip install -U sentence-transformers
```
Then you can use the model like this:
```python
from sentence_transformers import SentenceTransformer
sentences = ["This is an example sentence", "Each sentence is converted"]
model = SentenceTransformer('aditeyabaral/sentencetransformer-distilbert-hinglish-small')
embeddings = model.encode(sentences)
print(embeddings)
```
## Usage (HuggingFace Transformers)
Without [sentence-transformers](https://www.SBERT.net), you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.
```python
from transformers import AutoTokenizer, AutoModel
import torch
#Mean Pooling - Take attention mask into account for correct averaging
def mean_pooling(model_output, attention_mask):
token_embeddings = model_output[0] #First element of model_output contains all token embeddings
input_mask_expanded = attention_mask.unsqueeze(-1).expand(token_embeddings.size()).float()
return torch.sum(token_embeddings * input_mask_expanded, 1) / torch.clamp(input_mask_expanded.sum(1), min=1e-9)
# Sentences we want sentence embeddings for
sentences = ['This is an example sentence', 'Each sentence is converted']
# Load model from HuggingFace Hub
tokenizer = AutoTokenizer.from_pretrained('aditeyabaral/sentencetransformer-distilbert-hinglish-small')
model = AutoModel.from_pretrained('aditeyabaral/sentencetransformer-distilbert-hinglish-small')
# Tokenize sentences
encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt')
# Compute token embeddings
with torch.no_grad():
model_output = model(**encoded_input)
# Perform pooling. In this case, mean pooling.
sentence_embeddings = mean_pooling(model_output, encoded_input['attention_mask'])
print("Sentence embeddings:")
print(sentence_embeddings)
```
## Evaluation Results
<!--- Describe how your model was evaluated -->
For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: [https://seb.sbert.net](https://seb.sbert.net?model_name=aditeyabaral/sentencetransformer-distilbert-hinglish-small)
## Training
The model was trained with the parameters:
**DataLoader**:
`torch.utils.data.dataloader.DataLoader` of length 4617 with parameters:
```
{'batch_size': 32, 'sampler': 'torch.utils.data.sampler.RandomSampler', 'batch_sampler': 'torch.utils.data.sampler.BatchSampler'}
```
**Loss**:
`sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss`
Parameters of the fit()-Method:
```
{
"epochs": 10,
"evaluation_steps": 0,
"evaluator": "NoneType",
"max_grad_norm": 1,
"optimizer_class": "<class 'transformers.optimization.AdamW'>",
"optimizer_params": {
"lr": 2e-05
},
"scheduler": "WarmupLinear",
"steps_per_epoch": null,
"warmup_steps": 100,
"weight_decay": 0.01
}
```
## Full Model Architecture
```
SentenceTransformer(
(0): Transformer({'max_seq_length': 128, 'do_lower_case': False}) with Transformer model: DistilBertModel
(1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False})
)
```
## Citing & Authors
<!--- Describe where people can find more information -->
|
{"tags": ["sentence-transformers", "feature-extraction", "sentence-similarity", "transformers"], "pipeline_tag": "sentence-similarity"}
|
sentence-similarity
|
aditeyabaral/sentencetransformer-distilbert-hinglish-small
|
[
"sentence-transformers",
"pytorch",
"distilbert",
"feature-extraction",
"sentence-similarity",
"transformers",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#sentence-transformers #pytorch #distilbert #feature-extraction #sentence-similarity #transformers #endpoints_compatible #region-us
|
# aditeyabaral/sentencetransformer-distilbert-hinglish-small
This is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.
## Usage (Sentence-Transformers)
Using this model becomes easy when you have sentence-transformers installed:
Then you can use the model like this:
## Usage (HuggingFace Transformers)
Without sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.
## Evaluation Results
For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL
## Training
The model was trained with the parameters:
DataLoader:
'URL.dataloader.DataLoader' of length 4617 with parameters:
Loss:
'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss'
Parameters of the fit()-Method:
## Full Model Architecture
## Citing & Authors
|
[
"# aditeyabaral/sentencetransformer-distilbert-hinglish-small\n\nThis is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.",
"## Usage (Sentence-Transformers)\n\nUsing this model becomes easy when you have sentence-transformers installed:\n\n\n\nThen you can use the model like this:",
"## Usage (HuggingFace Transformers)\nWithout sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.",
"## Evaluation Results\n\n\n\nFor an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL",
"## Training\nThe model was trained with the parameters:\n\nDataLoader:\n\n'URL.dataloader.DataLoader' of length 4617 with parameters:\n\n\nLoss:\n\n'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss' \n\nParameters of the fit()-Method:",
"## Full Model Architecture",
"## Citing & Authors"
] |
[
"TAGS\n#sentence-transformers #pytorch #distilbert #feature-extraction #sentence-similarity #transformers #endpoints_compatible #region-us \n",
"# aditeyabaral/sentencetransformer-distilbert-hinglish-small\n\nThis is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.",
"## Usage (Sentence-Transformers)\n\nUsing this model becomes easy when you have sentence-transformers installed:\n\n\n\nThen you can use the model like this:",
"## Usage (HuggingFace Transformers)\nWithout sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.",
"## Evaluation Results\n\n\n\nFor an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL",
"## Training\nThe model was trained with the parameters:\n\nDataLoader:\n\n'URL.dataloader.DataLoader' of length 4617 with parameters:\n\n\nLoss:\n\n'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss' \n\nParameters of the fit()-Method:",
"## Full Model Architecture",
"## Citing & Authors"
] |
[
44,
64,
38,
64,
29,
78,
5,
6
] |
[
"passage: TAGS\n#sentence-transformers #pytorch #distilbert #feature-extraction #sentence-similarity #transformers #endpoints_compatible #region-us \n# aditeyabaral/sentencetransformer-distilbert-hinglish-small\n\nThis is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.## Usage (Sentence-Transformers)\n\nUsing this model becomes easy when you have sentence-transformers installed:\n\n\n\nThen you can use the model like this:## Usage (HuggingFace Transformers)\nWithout sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.## Evaluation Results\n\n\n\nFor an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL## Training\nThe model was trained with the parameters:\n\nDataLoader:\n\n'URL.dataloader.DataLoader' of length 4617 with parameters:\n\n\nLoss:\n\n'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss' \n\nParameters of the fit()-Method:## Full Model Architecture## Citing & Authors"
] |
[
-0.0515366829931736,
0.11866600811481476,
-0.006596288178116083,
0.04974748566746712,
0.11960072815418243,
0.02220773883163929,
0.13080322742462158,
0.09255021810531616,
0.013335508294403553,
0.0956651121377945,
0.00914028100669384,
0.10340506583452225,
0.008849953301250935,
0.023853115737438202,
0.014239281415939331,
-0.2474725991487503,
0.014603250660002232,
-0.046834833920001984,
0.03312491253018379,
0.07835653424263,
0.10192057490348816,
-0.09051138162612915,
0.06543668359518051,
0.02970469743013382,
-0.04265329986810684,
-0.008130952715873718,
-0.022955844178795815,
-0.022352010011672974,
0.07886100560426712,
0.060248542577028275,
0.0460946224629879,
0.00824373122304678,
0.020307939499616623,
-0.19412289559841156,
0.012194313108921051,
0.059996772557497025,
-0.009206600487232208,
0.06350045651197433,
0.03519327566027641,
-0.037911273539066315,
0.16082192957401276,
-0.06951115280389786,
0.05156463012099266,
0.06644278019666672,
-0.10856010764837265,
-0.0720495656132698,
-0.06744381040334702,
-0.02149822562932968,
0.14196601510047913,
0.10441253334283829,
-0.060954391956329346,
0.11649754643440247,
-0.06128738820552826,
0.07635470479726791,
0.09993057698011398,
-0.2711262106895447,
-0.03238647058606148,
0.034274425357580185,
0.05245475471019745,
0.02742117829620838,
-0.10356240719556808,
0.006726422347128391,
-0.0103938402608037,
0.026371803134679794,
0.06053571403026581,
-0.05231392756104469,
0.0722629576921463,
-0.013163105584681034,
-0.10842632502317429,
-0.00590120255947113,
0.17354990541934967,
0.034189313650131226,
-0.032407090067863464,
-0.18429675698280334,
-0.055571720004081726,
0.02434351295232773,
-0.05596347525715828,
-0.038460247218608856,
0.02874123305082321,
0.038915328681468964,
0.008654511533677578,
-0.08408486098051071,
-0.11706490814685822,
-0.014006685465574265,
-0.06765732169151306,
0.02576187439262867,
-0.021059412509202957,
-0.04432687535881996,
-0.005391125567257404,
0.06707870215177536,
-0.08039285987615585,
-0.11081521958112717,
-0.0503869466483593,
-0.009999381378293037,
-0.1193365529179573,
-0.04404972121119499,
-0.05115211009979248,
-0.1302768588066101,
0.039959635585546494,
0.14420604705810547,
0.06906113773584366,
0.008438019081950188,
-0.014523414894938469,
0.0501888208091259,
0.025338344275951385,
0.19392676651477814,
-0.03214440122246742,
-0.08152834326028824,
-0.042423784732818604,
0.022468598559498787,
-0.001946216099895537,
-0.018752917647361755,
-0.03328566253185272,
0.00002283541289216373,
0.030914723873138428,
0.06967770308256149,
0.05270065367221832,
0.04665951058268547,
-0.07067586481571198,
-0.030301282182335854,
0.0679488405585289,
-0.11738871037960052,
0.03874717280268669,
0.016713498160243034,
-0.05046340078115463,
0.028400827199220657,
0.07937651127576828,
-0.015525455586612225,
-0.07264167815446854,
0.04246409237384796,
-0.10768531262874603,
-0.005585510283708572,
-0.06256213784217834,
-0.13349668681621552,
-0.0050676362589001656,
0.00593603728339076,
-0.03761807456612587,
-0.09973501414060593,
-0.13724583387374878,
-0.08926811069250107,
0.017278842628002167,
-0.039324160665273666,
0.008055418729782104,
-0.11685299128293991,
-0.004770202096551657,
0.016422012820839882,
-0.007147608324885368,
-0.06280888617038727,
-0.013294844888150692,
0.011245768517255783,
-0.03733396157622337,
0.04943615198135376,
0.04429823160171509,
0.047081079334020615,
-0.1238127052783966,
0.030978061258792877,
-0.12755319476127625,
0.15130428969860077,
-0.04742692410945892,
0.090786874294281,
-0.14928019046783447,
0.00024057381961029023,
0.016423173248767853,
0.051773134618997574,
0.013664544560015202,
0.14803284406661987,
-0.20709531009197235,
-0.06532566994428635,
0.11192558705806732,
-0.05396388843655586,
-0.10021601617336273,
0.10738646239042282,
-0.015807732939720154,
0.13116322457790375,
0.11025714129209518,
0.10394195467233658,
0.15517354011535645,
-0.04779107868671417,
-0.029535595327615738,
0.022287098690867424,
-0.040562257170677185,
0.10378013551235199,
0.03874463960528374,
-0.07069535553455353,
0.09790316969156265,
-0.004670036491006613,
-0.056815288960933685,
-0.004475451540201902,
-0.007258846890181303,
-0.06498865783214569,
0.018249263986945152,
-0.03681843355298042,
0.060711249709129333,
-0.02758525311946869,
0.00047153260675258934,
0.007908872328698635,
-0.09854696691036224,
0.07931813597679138,
0.07765152305364609,
-0.06182254105806351,
0.012810435146093369,
-0.09936429560184479,
0.03234199061989784,
-0.00032620810088701546,
0.013620122335851192,
-0.2031152993440628,
-0.12237664312124252,
0.01749080792069435,
-0.008617733605206013,
0.10000287741422653,
0.06627905368804932,
0.05602868273854256,
0.03096887469291687,
0.007978059351444244,
-0.020470544695854187,
0.03571489825844765,
-0.0244500283151865,
-0.09827256202697754,
-0.08953284472227097,
0.001992881763726473,
-0.028503959998488426,
0.08243943750858307,
-0.12500131130218506,
0.01389862410724163,
0.04359612241387367,
0.033990278840065,
0.051756225526332855,
-0.03555896133184433,
-0.00031610255246050656,
-0.018538421019911766,
-0.01282759103924036,
-0.018803218379616737,
0.04462200403213501,
0.007797227241098881,
-0.13798317313194275,
0.0736360177397728,
-0.1948769986629486,
-0.12136805057525635,
0.06871796399354935,
0.024572543799877167,
-0.05656551569700241,
-0.06051652505993843,
-0.018660984933376312,
0.0010924177477136254,
-0.027215540409088135,
-0.06542275846004486,
0.1920386254787445,
0.09651057422161102,
0.10623572766780853,
-0.027567150071263313,
-0.020645754411816597,
-0.042741160839796066,
-0.03229638561606407,
-0.038990575820207596,
0.10415954142808914,
-0.03412296622991562,
-0.12023118883371353,
0.04163217544555664,
0.08352716267108917,
-0.06325386464595795,
0.09703770279884338,
-0.003525740932673216,
-0.07572168856859207,
-0.054949209094047546,
0.047003988176584244,
0.05025581642985344,
-0.010614609345793724,
-0.10002535581588745,
0.010770564898848534,
0.06617330759763718,
0.013041778467595577,
0.008397947996854782,
-0.06789258122444153,
0.04869547858834267,
0.0547015555202961,
0.0031366560142487288,
0.08861584216356277,
0.03339516371488571,
0.0002972422807943076,
0.05527421459555626,
0.015806201845407486,
0.014603231102228165,
-0.05476117506623268,
-0.05013309419155121,
-0.10882597416639328,
0.17765776813030243,
-0.12088995426893234,
-0.22749818861484528,
-0.16858214139938354,
0.006476914510130882,
-0.05132761597633362,
0.021579429507255554,
0.08232889324426651,
-0.0651826560497284,
-0.08677203208208084,
-0.06619998812675476,
0.08003915101289749,
0.08432669192552567,
-0.03746723011136055,
0.0023268130607903004,
0.0312868095934391,
0.015212327241897583,
-0.12854760885238647,
-0.017008496448397636,
-0.004215491469949484,
-0.07363799214363098,
0.0035147990565747023,
0.004424326587468386,
0.07310367375612259,
0.11758273839950562,
0.0501069501042366,
-0.016300402581691742,
-0.0008359971106983721,
0.22012978792190552,
-0.09086496382951736,
0.03591971471905708,
0.1909574717283249,
0.01040365919470787,
0.06422970443964005,
0.08863437920808792,
0.03276008367538452,
-0.06933021545410156,
0.05351965129375458,
0.06127447262406349,
-0.018146255984902382,
-0.1472010314464569,
-0.1155695915222168,
-0.07318482547998428,
0.014839787036180496,
0.12864290177822113,
0.032638922333717346,
0.0012804929865524173,
0.054562631994485855,
-0.018047399818897247,
0.005290799308568239,
0.06993598490953445,
0.1083655059337616,
0.13544131815433502,
-0.01646878570318222,
0.1031072735786438,
-0.04321121796965599,
-0.05437132716178894,
0.06572026759386063,
-0.009476922452449799,
0.14453577995300293,
0.015475922264158726,
0.18594473600387573,
0.07312027364969254,
-0.02349456585943699,
-0.020581211894750595,
0.08598175644874573,
-0.02690776064991951,
-0.0006659919745288789,
-0.010370498523116112,
-0.09182015806436539,
-0.03158793970942497,
0.08566989749670029,
0.07938945293426514,
-0.023927947506308556,
-0.04290495440363884,
0.0801193043589592,
0.1151956170797348,
0.13901998102664948,
0.0940537378191948,
-0.24730880558490753,
-0.0634012371301651,
0.02128141187131405,
-0.07099765539169312,
-0.0649133175611496,
-0.00953133124858141,
0.03511137142777443,
-0.11712729930877686,
0.029282473027706146,
-0.018020112067461014,
0.09822861105203629,
-0.09600255638360977,
0.02426176331937313,
-0.04844663292169571,
0.03489050641655922,
-0.00306483986787498,
0.0710567906498909,
-0.21902097761631012,
0.08695832639932632,
0.03755568712949753,
0.049961481243371964,
-0.04916546121239662,
0.03395926207304001,
0.06114562973380089,
0.014825135469436646,
0.16927307844161987,
-0.02557886764407158,
0.03692593052983284,
-0.02278408594429493,
-0.07803167402744293,
-0.010102184489369392,
0.053239606320858,
-0.12036707252264023,
0.09337425231933594,
-0.047026362270116806,
-0.02202032506465912,
-0.027324479073286057,
0.0013681837590411305,
-0.03717746213078499,
-0.15581414103507996,
0.020571406930685043,
0.02264152094721794,
0.021978307515382767,
-0.018750905990600586,
-0.007163387723267078,
0.0047360984608531,
0.21769003570079803,
-0.11623603850603104,
-0.06723305583000183,
-0.1281837522983551,
0.0023766360245645046,
0.0964154452085495,
-0.09816465526819229,
0.011899095959961414,
-0.0035145480651408434,
0.14635069668293,
-0.04391014203429222,
-0.05009210482239723,
0.06425903737545013,
-0.062138304114341736,
-0.06674368679523468,
-0.03948085382580757,
0.10304950177669525,
0.05515245348215103,
0.056163445115089417,
0.04313388839364052,
0.0552770271897316,
-0.04723088815808296,
-0.10168702900409698,
-0.08232150971889496,
0.10181325674057007,
-0.00637424923479557,
0.08385413885116577,
-0.0840178057551384,
-0.056939538568258286,
-0.10254967957735062,
0.046418190002441406,
0.19380585849285126,
0.20872564613819122,
-0.06884066760540009,
0.05931032449007034,
0.10881736874580383,
-0.07585255056619644,
-0.2267378568649292,
-0.05628907307982445,
0.04530568793416023,
0.04748491197824478,
0.07323240488767624,
-0.12075257301330566,
0.08742653578519821,
0.06269743293523788,
-0.013354603201150894,
-0.030641166493296623,
-0.2598472833633423,
-0.14311537146568298,
0.12250945717096329,
0.020169774070382118,
-0.038858503103256226,
-0.09051228314638138,
-0.06243910640478134,
-0.06104700267314911,
-0.06259017437696457,
0.06879632920026779,
-0.021075677126646042,
0.09356866776943207,
0.028619442135095596,
0.06719657778739929,
0.06812863051891327,
0.0013372321845963597,
0.13687419891357422,
0.07301744818687439,
0.03970707580447197,
-0.040795065462589264,
-0.0023648561909794807,
0.08364765346050262,
-0.08562593907117844,
0.15994423627853394,
-0.06983894109725952,
0.03611509129405022,
-0.12852859497070312,
-0.03058011457324028,
-0.029837515205144882,
0.02486809343099594,
-0.045497436076402664,
-0.05006963014602661,
-0.024119701236486435,
0.038473911583423615,
0.07550349831581116,
0.0017124296864494681,
0.027654938399791718,
-0.07843705266714096,
0.017545564100146294,
0.1650729477405548,
0.14173239469528198,
0.03580069541931152,
-0.18125513195991516,
0.02066313475370407,
0.014808663167059422,
0.059679239988327026,
-0.12616080045700073,
0.07510237395763397,
0.08898505568504333,
0.0015983711928129196,
0.14313249289989471,
0.018878674134612083,
-0.0747033879160881,
0.013261057436466217,
0.0698365718126297,
-0.06333703547716141,
-0.18734443187713623,
-0.03408372029662132,
-0.009677342139184475,
-0.13405770063400269,
-0.03289305418729782,
0.15876318514347076,
-0.009063014760613441,
0.01203126460313797,
0.04195781424641609,
0.04144541174173355,
-0.03157304972410202,
0.13109475374221802,
-0.030762873589992523,
0.06061723828315735,
-0.06348858028650284,
0.06284395605325699,
0.08582104742527008,
-0.07591576874256134,
0.007912794128060341,
0.12114468216896057,
-0.0721375122666359,
-0.10201383382081985,
-0.07759906351566315,
0.0932651236653328,
-0.10685268044471741,
0.033838577568531036,
-0.04503658786416054,
-0.061439525336027145,
0.016580455005168915,
0.01578119955956936,
0.053537268191576004,
0.06437574326992035,
-0.08594709634780884,
-0.030480708926916122,
-0.08021411299705505,
0.07679448276758194,
0.07150299102067947,
0.02058039978146553,
-0.040380388498306274,
0.07649743556976318,
-0.03662806749343872,
0.008710023947060108,
-0.018390612676739693,
-0.04555843770503998,
-0.05590733885765076,
-0.0026566749438643456,
-0.05399114266037941,
-0.013037953525781631,
-0.10259812325239182,
-0.00414239289239049,
0.03430414944887161,
0.0420418381690979,
-0.012792463414371014,
-0.006351537071168423,
-0.051396097987890244,
-0.07676640152931213,
-0.04409217834472656,
0.10586821287870407,
-0.14142131805419922,
0.0023756269365549088,
0.03565637394785881,
-0.08423903584480286,
0.09436754882335663,
-0.017743336036801338,
-0.013971901498734951,
0.030816523358225822,
-0.01896558329463005,
-0.03048778511583805,
0.03471871837973595,
0.037883248180150986,
0.07237241417169571,
-0.08307532221078873,
0.009346836246550083,
-0.04584672302007675,
0.015439247712492943,
0.014661526307463646,
0.05651719868183136,
-0.10537692904472351,
0.024658603593707085,
-0.026203781366348267,
-0.0005707234959118068,
-0.10648427158594131,
0.033390771597623825,
0.01727019064128399,
0.043177343904972076,
0.17048954963684082,
-0.04360058903694153,
0.07517474889755249,
-0.12863993644714355,
0.0034376061521470547,
0.01451747678220272,
-0.02786795236170292,
0.07023291289806366,
-0.11449778825044632,
0.05722140148282051,
-0.04629897326231003,
0.033845480531454086,
-0.0013971009757369757,
0.06673228740692139,
0.05596444755792618,
0.04931845888495445,
-0.010957867838442326,
0.004875611048191786,
0.04688391461968422,
0.055112261325120926,
-0.008205563761293888,
-0.04154649004340172,
0.03454208746552467,
0.01631253957748413,
-0.030836746096611023,
0.07877548784017563,
0.06826332956552505,
0.012157941237092018,
0.09220312535762787,
0.05562588572502136,
-0.009022966958582401,
-0.14023220539093018,
0.02282777428627014,
-0.06121077761054039,
0.07056600600481033,
-0.0362112857401371,
0.053882237523794174,
0.1570797711610794,
-0.15402154624462128,
0.1080617755651474,
0.011004569008946419,
-0.056669265031814575,
-0.0699586346745491,
-0.15395769476890564,
-0.06994781643152237,
-0.058197878301143646,
-0.009034647606313229,
-0.1251956969499588,
-0.0034026443026959896,
-0.031247353181242943,
0.006815245375037193,
-0.017803391441702843,
0.13839435577392578,
-0.11993388086557388,
-0.09830091148614883,
0.08272141963243484,
-0.02729884907603264,
0.051556557416915894,
0.02219795435667038,
0.0352339893579483,
0.010036138817667961,
0.055771660059690475,
0.0619753934442997,
0.053564053028821945,
0.03704754635691643,
0.031376007944345474,
-0.08543398976325989,
-0.0894874632358551,
-0.009803549386560917,
0.007699674926698208,
-0.04638650268316269,
0.08677230775356293,
0.044050876051187515,
-0.05962984636425972,
-0.01508188433945179,
0.21973411738872528,
-0.08790311962366104,
-0.07146235555410385,
-0.16822081804275513,
0.20125918090343475,
0.047528062015771866,
0.03407474234700203,
-0.0315132774412632,
-0.09502489864826202,
-0.005659253802150488,
0.13813598453998566,
0.14569246768951416,
-0.08175171911716461,
0.02264772169291973,
0.023344727233052254,
0.014693322591483593,
-0.014161838218569756,
0.03030199185013771,
0.05337221547961235,
0.21380597352981567,
-0.043843694031238556,
0.09242994338274002,
-0.01270107552409172,
-0.06146779656410217,
-0.05599001422524452,
0.07047515362501144,
0.012198050506412983,
0.03575747832655907,
-0.018512822687625885,
0.12103350460529327,
-0.03335782513022423,
-0.07614529877901077,
-0.013013186864554882,
-0.09101972728967667,
-0.11458764970302582,
-0.03102567419409752,
0.01384491752833128,
0.024798739701509476,
0.10543253272771835,
0.023766590282320976,
-0.04744845628738403,
0.12022384256124496,
-0.027169600129127502,
-0.062465373426675797,
-0.03484802320599556,
0.023258019238710403,
-0.03248760849237442,
0.16221411526203156,
0.0003353543288540095,
-0.03637558966875076,
0.12106844782829285,
0.015653766691684723,
-0.0725463405251503,
0.0787738487124443,
0.04242309555411339,
-0.07769075036048889,
0.12961570918560028,
0.08008687198162079,
-0.013973704539239407,
0.09497789293527603,
0.07967343181371689,
-0.19053959846496582,
0.045124221593141556,
-0.043147098273038864,
-0.01049027405679226,
-0.07245718687772751,
0.04215720295906067,
-0.07508179545402527,
0.12084834277629852,
0.17909060418605804,
-0.019685296341776848,
-0.013425150886178017,
0.0007080037030391395,
0.014258173294365406,
0.020985295996069908,
0.07981828600168228,
-0.05442317947745323,
-0.09759190678596497,
0.00877254270017147,
0.008903831243515015,
0.010350458323955536,
-0.276169091463089,
-0.1020388975739479,
0.014886582270264626,
-0.011359727941453457,
-0.02274061180651188,
0.12612661719322205,
0.07302337139844894,
-0.01049651950597763,
-0.02979194186627865,
-0.218644380569458,
0.03036980889737606,
0.11501974612474442,
-0.11852820962667465,
-0.08762335032224655
] |
null | null |
sentence-transformers
|
# aditeyabaral/sentencetransformer-indic-bert
This is a [sentence-transformers](https://www.SBERT.net) model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.
<!--- Describe your model here -->
## Usage (Sentence-Transformers)
Using this model becomes easy when you have [sentence-transformers](https://www.SBERT.net) installed:
```
pip install -U sentence-transformers
```
Then you can use the model like this:
```python
from sentence_transformers import SentenceTransformer
sentences = ["This is an example sentence", "Each sentence is converted"]
model = SentenceTransformer('aditeyabaral/sentencetransformer-indic-bert')
embeddings = model.encode(sentences)
print(embeddings)
```
## Usage (HuggingFace Transformers)
Without [sentence-transformers](https://www.SBERT.net), you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.
```python
from transformers import AutoTokenizer, AutoModel
import torch
#Mean Pooling - Take attention mask into account for correct averaging
def mean_pooling(model_output, attention_mask):
token_embeddings = model_output[0] #First element of model_output contains all token embeddings
input_mask_expanded = attention_mask.unsqueeze(-1).expand(token_embeddings.size()).float()
return torch.sum(token_embeddings * input_mask_expanded, 1) / torch.clamp(input_mask_expanded.sum(1), min=1e-9)
# Sentences we want sentence embeddings for
sentences = ['This is an example sentence', 'Each sentence is converted']
# Load model from HuggingFace Hub
tokenizer = AutoTokenizer.from_pretrained('aditeyabaral/sentencetransformer-indic-bert')
model = AutoModel.from_pretrained('aditeyabaral/sentencetransformer-indic-bert')
# Tokenize sentences
encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt')
# Compute token embeddings
with torch.no_grad():
model_output = model(**encoded_input)
# Perform pooling. In this case, mean pooling.
sentence_embeddings = mean_pooling(model_output, encoded_input['attention_mask'])
print("Sentence embeddings:")
print(sentence_embeddings)
```
## Evaluation Results
<!--- Describe how your model was evaluated -->
For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: [https://seb.sbert.net](https://seb.sbert.net?model_name=aditeyabaral/sentencetransformer-indic-bert)
## Training
The model was trained with the parameters:
**DataLoader**:
`torch.utils.data.dataloader.DataLoader` of length 9234 with parameters:
```
{'batch_size': 16, 'sampler': 'torch.utils.data.sampler.RandomSampler', 'batch_sampler': 'torch.utils.data.sampler.BatchSampler'}
```
**Loss**:
`sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss`
Parameters of the fit()-Method:
```
{
"epochs": 10,
"evaluation_steps": 0,
"evaluator": "NoneType",
"max_grad_norm": 1,
"optimizer_class": "<class 'transformers.optimization.AdamW'>",
"optimizer_params": {
"lr": 2e-05
},
"scheduler": "WarmupLinear",
"steps_per_epoch": null,
"warmup_steps": 100,
"weight_decay": 0.01
}
```
## Full Model Architecture
```
SentenceTransformer(
(0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: AlbertModel
(1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False})
)
```
## Citing & Authors
<!--- Describe where people can find more information -->
|
{"tags": ["sentence-transformers", "feature-extraction", "sentence-similarity", "transformers"], "pipeline_tag": "sentence-similarity"}
|
sentence-similarity
|
aditeyabaral/sentencetransformer-indic-bert
|
[
"sentence-transformers",
"pytorch",
"albert",
"feature-extraction",
"sentence-similarity",
"transformers",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#sentence-transformers #pytorch #albert #feature-extraction #sentence-similarity #transformers #endpoints_compatible #region-us
|
# aditeyabaral/sentencetransformer-indic-bert
This is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.
## Usage (Sentence-Transformers)
Using this model becomes easy when you have sentence-transformers installed:
Then you can use the model like this:
## Usage (HuggingFace Transformers)
Without sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.
## Evaluation Results
For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL
## Training
The model was trained with the parameters:
DataLoader:
'URL.dataloader.DataLoader' of length 9234 with parameters:
Loss:
'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss'
Parameters of the fit()-Method:
## Full Model Architecture
## Citing & Authors
|
[
"# aditeyabaral/sentencetransformer-indic-bert\n\nThis is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.",
"## Usage (Sentence-Transformers)\n\nUsing this model becomes easy when you have sentence-transformers installed:\n\n\n\nThen you can use the model like this:",
"## Usage (HuggingFace Transformers)\nWithout sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.",
"## Evaluation Results\n\n\n\nFor an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL",
"## Training\nThe model was trained with the parameters:\n\nDataLoader:\n\n'URL.dataloader.DataLoader' of length 9234 with parameters:\n\n\nLoss:\n\n'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss' \n\nParameters of the fit()-Method:",
"## Full Model Architecture",
"## Citing & Authors"
] |
[
"TAGS\n#sentence-transformers #pytorch #albert #feature-extraction #sentence-similarity #transformers #endpoints_compatible #region-us \n",
"# aditeyabaral/sentencetransformer-indic-bert\n\nThis is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.",
"## Usage (Sentence-Transformers)\n\nUsing this model becomes easy when you have sentence-transformers installed:\n\n\n\nThen you can use the model like this:",
"## Usage (HuggingFace Transformers)\nWithout sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.",
"## Evaluation Results\n\n\n\nFor an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL",
"## Training\nThe model was trained with the parameters:\n\nDataLoader:\n\n'URL.dataloader.DataLoader' of length 9234 with parameters:\n\n\nLoss:\n\n'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss' \n\nParameters of the fit()-Method:",
"## Full Model Architecture",
"## Citing & Authors"
] |
[
43,
59,
38,
64,
29,
78,
5,
6
] |
[
"passage: TAGS\n#sentence-transformers #pytorch #albert #feature-extraction #sentence-similarity #transformers #endpoints_compatible #region-us \n# aditeyabaral/sentencetransformer-indic-bert\n\nThis is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.## Usage (Sentence-Transformers)\n\nUsing this model becomes easy when you have sentence-transformers installed:\n\n\n\nThen you can use the model like this:## Usage (HuggingFace Transformers)\nWithout sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.## Evaluation Results\n\n\n\nFor an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL## Training\nThe model was trained with the parameters:\n\nDataLoader:\n\n'URL.dataloader.DataLoader' of length 9234 with parameters:\n\n\nLoss:\n\n'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss' \n\nParameters of the fit()-Method:## Full Model Architecture## Citing & Authors"
] |
[
-0.037762876600027084,
0.12158206850290298,
-0.006772677414119244,
0.0491899810731411,
0.10764706879854202,
0.024814488366246223,
0.13785827159881592,
0.0924442857503891,
0.01367173995822668,
0.08007838577032089,
0.010725298896431923,
0.1030655950307846,
0.005154451820999384,
0.0047795879654586315,
0.02701515518128872,
-0.2798440456390381,
0.024739811196923256,
-0.036659352481365204,
0.027868863195180893,
0.0679108276963234,
0.11597888171672821,
-0.07122227549552917,
0.05870179086923599,
0.020862117409706116,
-0.039203643798828125,
0.012131113559007645,
-0.02752729132771492,
-0.03502687066793442,
0.0911879912018776,
0.06461573392152786,
0.04736578091979027,
0.0065796771086752415,
0.006728977896273136,
-0.20339827239513397,
0.012198027223348618,
0.06567154824733734,
-0.008493299596011639,
0.06195705384016037,
0.03572636842727661,
-0.04062607139348984,
0.14898362755775452,
-0.08307674527168274,
0.07095885276794434,
0.050304148346185684,
-0.12539516389369965,
-0.07679615914821625,
-0.04620717838406563,
-0.009391717612743378,
0.13868077099323273,
0.09351418912410736,
-0.06315496563911438,
0.11284686625003815,
-0.042683716863393784,
0.07870089262723923,
0.10906758159399033,
-0.27973780035972595,
-0.03036423958837986,
0.013416963629424572,
0.06935016065835953,
0.014135248959064484,
-0.10314879566431046,
0.019451739266514778,
-0.020202921703457832,
0.03504781052470207,
0.07176920771598816,
-0.044332534074783325,
0.06892113387584686,
-0.0026364456862211227,
-0.10908956080675125,
0.005351184401661158,
0.1619081348180771,
0.035004809498786926,
-0.017336146906018257,
-0.20014919340610504,
-0.07406467199325562,
0.0703350231051445,
-0.05222037807106972,
-0.03524163365364075,
0.03141944482922554,
0.04548263922333717,
-0.007544476073235273,
-0.09217612445354462,
-0.09685400128364563,
-0.005996096413582563,
-0.062121473252773285,
0.015972226858139038,
-0.013680540956556797,
-0.057318493723869324,
0.009567530825734138,
0.06416130065917969,
-0.07864152640104294,
-0.1162761002779007,
-0.018746959045529366,
-0.02760239876806736,
-0.12077482044696808,
-0.03407438099384308,
-0.06712076812982559,
-0.08628366142511368,
0.024796517565846443,
0.14002983272075653,
0.08770755678415298,
0.011978136375546455,
-0.014760304242372513,
0.05491841584444046,
0.022537438198924065,
0.19324475526809692,
-0.05374744161963463,
-0.07976718991994858,
-0.05002027377486229,
0.03452305123209953,
-0.003690088400617242,
-0.027923578396439552,
-0.04278208687901497,
-0.00370399234816432,
0.04046308994293213,
0.05812976881861687,
0.054851870983839035,
0.05378513038158417,
-0.05992143601179123,
-0.04699108004570007,
0.05337035655975342,
-0.12266828119754791,
0.039822693914175034,
0.0038119752425700426,
-0.051678117364645004,
0.022319059818983078,
0.08129065483808517,
-0.00867349375039339,
-0.07399721443653107,
0.02241506613790989,
-0.10521787405014038,
-0.007765373680740595,
-0.05621585622429848,
-0.131046324968338,
-0.007286432199180126,
-0.0205055084079504,
-0.034960754215717316,
-0.09653403609991074,
-0.1236681193113327,
-0.07530522346496582,
0.033022236078977585,
-0.04520401731133461,
-0.013925896026194096,
-0.12092740833759308,
-0.00738985650241375,
0.007508036680519581,
-0.0025672561023384333,
-0.06953459233045578,
-0.004391842056065798,
0.009591876529157162,
-0.05415220558643341,
0.05826491490006447,
0.0588306300342083,
0.04256284609436989,
-0.1156010776758194,
0.028476933017373085,
-0.12835630774497986,
0.16201458871364594,
-0.03870312124490738,
0.05824742093682289,
-0.13643398880958557,
0.030566105619072914,
0.029169343411922455,
0.06792206317186356,
0.00707409018650651,
0.1478462666273117,
-0.22344927489757538,
-0.08204546570777893,
0.12359663099050522,
-0.03619792312383652,
-0.08710604161024094,
0.1093742772936821,
-0.035784762352705,
0.11573237925767899,
0.11769991368055344,
0.12410352379083633,
0.0979178324341774,
-0.05824108421802521,
-0.011849476955831051,
0.02396128512918949,
-0.03998251259326935,
0.14699704945087433,
0.043701644986867905,
-0.06591090559959412,
0.09000390768051147,
-0.005229718051850796,
-0.04670385643839836,
0.004207770340144634,
-0.00272601330652833,
-0.0553521066904068,
0.01922735571861267,
-0.033478230237960815,
0.06675226986408234,
-0.03517864644527435,
-0.0012660800712183118,
0.012200850062072277,
-0.11514847725629807,
0.10378426313400269,
0.060522206127643585,
-0.07004803419113159,
0.018048260360956192,
-0.09031036496162415,
0.01616225391626358,
-0.00854215957224369,
0.014681545086205006,
-0.20469225943088531,
-0.1149909570813179,
0.02541673555970192,
0.0029733332339674234,
0.11175017803907394,
0.05011415481567383,
0.05766891688108444,
0.038576889783144,
-0.019272727891802788,
-0.018537579104304314,
0.03651343658566475,
-0.000994631089270115,
-0.10024869441986084,
-0.1164608895778656,
-0.0018046160694211721,
-0.03684055805206299,
0.09720763564109802,
-0.114267498254776,
0.019381564110517502,
0.006626899819821119,
0.08002061396837234,
0.049189019948244095,
-0.02258877269923687,
-0.009425344876945019,
-0.025770410895347595,
-0.008639717474579811,
-0.02848379872739315,
0.053408049046993256,
0.01284523494541645,
-0.15132229030132294,
0.11051590740680695,
-0.20448601245880127,
-0.14461351931095123,
0.07412123680114746,
-0.002269477816298604,
-0.056623149663209915,
-0.050171107053756714,
-0.020251242443919182,
0.0003986798401456326,
-0.03024408034980297,
-0.07159215956926346,
0.22070389986038208,
0.0894819051027298,
0.11384406685829163,
-0.03348832204937935,
-0.02804483287036419,
-0.05245267227292061,
-0.040324222296476364,
-0.05130048096179962,
0.10840136557817459,
-0.03820011392235756,
-0.13996019959449768,
0.04866401106119156,
0.08756644278764725,
-0.04863717779517174,
0.10479509830474854,
-0.008896403014659882,
-0.07411446422338486,
-0.06408608704805374,
0.012712335214018822,
0.03942185267806053,
-0.005673592910170555,
-0.06913364678621292,
0.00916532427072525,
0.06014946103096008,
0.011476260609924793,
0.016605840995907784,
-0.06393597275018692,
0.0499020554125309,
0.06341781467199326,
0.010716920718550682,
0.10684125125408173,
0.025519011542201042,
0.0037482173647731543,
0.06564947217702866,
0.013422532007098198,
0.005028705112636089,
-0.05105239152908325,
-0.045008234679698944,
-0.10468356311321259,
0.16716046631336212,
-0.12858910858631134,
-0.21813185513019562,
-0.1566287875175476,
-0.0057847085408866405,
-0.048350874334573746,
0.027850186452269554,
0.08048173785209656,
-0.059231240302324295,
-0.06946564465761185,
-0.06915312260389328,
0.07643820345401764,
0.08876974880695343,
-0.045122187584638596,
0.01056002825498581,
0.022080274298787117,
0.01855386234819889,
-0.1197652593255043,
-0.017225245013833046,
0.000522964692208916,
-0.06705594062805176,
-0.008470285683870316,
-0.024375909939408302,
0.06326300650835037,
0.122780941426754,
0.06149089336395264,
-0.014540287666022778,
-0.007254842668771744,
0.2318527102470398,
-0.09354455769062042,
0.05879819765686989,
0.15448732674121857,
-0.016962839290499687,
0.06593450158834457,
0.09604829549789429,
0.019880147650837898,
-0.06065690517425537,
0.042216766625642776,
0.07616841793060303,
-0.0083566103130579,
-0.14200955629348755,
-0.1119580790400505,
-0.06287068873643875,
0.0012224914971739054,
0.1299397498369217,
0.03291613236069679,
0.03450837358832359,
0.03444722294807434,
-0.023997755721211433,
0.018266381695866585,
0.09799560904502869,
0.11214818060398102,
0.1268112063407898,
-0.028121396899223328,
0.10350710153579712,
-0.03990371525287628,
-0.07834213972091675,
0.05389223247766495,
-0.0059679667465388775,
0.15941943228244781,
0.03798757120966911,
0.16167804598808289,
0.07220405340194702,
-0.06084800139069557,
-0.01313281524926424,
0.07622328400611877,
-0.03418274223804474,
0.012645622715353966,
-0.03964655101299286,
-0.09104190021753311,
-0.0009802310960367322,
0.08771820366382599,
0.10120663791894913,
-0.0370805598795414,
-0.04434977471828461,
0.05952076241374016,
0.12742483615875244,
0.13432757556438446,
0.08855338394641876,
-0.23085980117321014,
-0.037788622081279755,
0.04111229628324509,
-0.08027913421392441,
-0.06960898637771606,
-0.005091243889182806,
0.033087391406297684,
-0.10359062254428864,
0.03864653781056404,
-0.015815163031220436,
0.1015300303697586,
-0.09253817051649094,
0.03330698981881142,
-0.052829477936029434,
0.02973843738436699,
0.004181166645139456,
0.06749926507472992,
-0.21618936955928802,
0.10353755950927734,
0.039462536573410034,
0.044717833399772644,
-0.053134504705667496,
0.02484354004263878,
0.07268736511468887,
0.022844016551971436,
0.17935095727443695,
-0.030453240498900414,
-0.006220476236194372,
-0.014219194650650024,
-0.07287534326314926,
-0.009395165368914604,
0.04808685928583145,
-0.12867127358913422,
0.08967739343643188,
-0.05233199521899223,
-0.03366920351982117,
-0.013824131339788437,
0.04555699974298477,
-0.044204361736774445,
-0.17911309003829956,
0.015756873413920403,
0.01413247361779213,
-0.0038454949390143156,
-0.022037971764802933,
-0.013584751635789871,
0.006452045403420925,
0.1886657178401947,
-0.1011883020401001,
-0.056151773780584335,
-0.12019040435552597,
-0.019061295315623283,
0.10960129648447037,
-0.09001903235912323,
0.003063047770410776,
0.0026218954008072615,
0.15789656341075897,
-0.062263499945402145,
-0.06543156504631042,
0.07486133277416229,
-0.0494367778301239,
-0.05890863016247749,
-0.05120572820305824,
0.1053038239479065,
0.052574243396520615,
0.06181550398468971,
0.042982470244169235,
0.08247347176074982,
-0.025529315695166588,
-0.08333975076675415,
-0.06362605094909668,
0.11156214028596878,
-0.014069066382944584,
0.05927648767828941,
-0.13489417731761932,
-0.031775474548339844,
-0.11543764919042587,
0.053964246064424515,
0.19778573513031006,
0.23066405951976776,
-0.07248736917972565,
0.06489912420511246,
0.14085087180137634,
-0.10287301987409592,
-0.2239394634962082,
-0.07586126774549484,
0.026241851970553398,
0.046349480748176575,
0.07395023107528687,
-0.13444840908050537,
0.06698060035705566,
0.05059847608208656,
-0.008507749065756798,
-0.06638117134571075,
-0.2352728545665741,
-0.14587971568107605,
0.13862955570220947,
0.004493709187954664,
-0.044226277619600296,
-0.09437168389558792,
-0.058568961918354034,
-0.07150667160749435,
-0.02838362753391266,
0.08505215495824814,
-0.04356292262673378,
0.10660296678543091,
0.050813037902116776,
0.021662520244717598,
0.06092280149459839,
0.011051816865801811,
0.14697523415088654,
0.06991028785705566,
0.037329066544771194,
-0.032082900404930115,
-0.03451362997293472,
0.05691609904170036,
-0.08563492447137833,
0.13258925080299377,
-0.061305511742830276,
0.03150889277458191,
-0.13159222900867462,
-0.034965384751558304,
-0.03714781999588013,
0.02697162702679634,
-0.05077873542904854,
-0.05056506395339966,
-0.00830273050814867,
0.05062378942966461,
0.09854263812303543,
-0.00785195454955101,
0.01824628934264183,
-0.08384504169225693,
0.037275541573762894,
0.13914066553115845,
0.11922243237495422,
0.048648037016391754,
-0.16166478395462036,
0.02053374983370304,
0.007485372480005026,
0.058506131172180176,
-0.12027993053197861,
0.07383579760789871,
0.08044816553592682,
-0.011679275892674923,
0.16161908209323883,
0.014188671484589577,
-0.0849703848361969,
-0.021617846563458443,
0.04548010230064392,
-0.0816677138209343,
-0.1582600176334381,
-0.05278387293219566,
0.0002556403633207083,
-0.09977409988641739,
-0.06399687379598618,
0.15433061122894287,
-0.010445162653923035,
0.0006984920473769307,
0.0434596985578537,
0.03988596796989441,
-0.02457764372229576,
0.10557565093040466,
-0.020095262676477432,
0.05573193356394768,
-0.048496320843696594,
0.08913135528564453,
0.09185024350881577,
-0.10569950193166733,
0.03172360733151436,
0.11638973653316498,
-0.06396003067493439,
-0.08959335088729858,
-0.07095857709646225,
0.1541001945734024,
-0.07227789610624313,
0.033502787351608276,
-0.058352917432785034,
-0.0679301768541336,
0.022682391107082367,
0.044946905225515366,
0.050342582166194916,
0.05779648572206497,
-0.09298787266016006,
-0.021947236731648445,
-0.08500101417303085,
0.08639281988143921,
0.09368069469928741,
0.015941642224788666,
-0.02259807474911213,
0.06484879553318024,
-0.021405775099992752,
0.0054407245479524136,
-0.02785523049533367,
-0.04190020635724068,
-0.06659755110740662,
-0.011416582390666008,
-0.03822411224246025,
-0.015208195894956589,
-0.08899439871311188,
-0.007571739610284567,
0.027531931176781654,
0.042043495923280716,
-0.000994606059975922,
-0.017460234463214874,
-0.05089539662003517,
-0.07394035905599594,
-0.047189053148031235,
0.10409969091415405,
-0.13337339460849762,
-0.020825104787945747,
0.04673830792307854,
-0.1010638102889061,
0.0807105228304863,
-0.02245260216295719,
-0.027934731915593147,
0.03376767039299011,
-0.01551607996225357,
-0.044116996228694916,
0.025791047140955925,
0.03504176065325737,
0.06523105502128601,
-0.11332522332668304,
0.016308344900608063,
-0.06120634078979492,
0.029624376446008682,
0.005463337525725365,
0.03166349604725838,
-0.09557054936885834,
0.007939418777823448,
-0.008533171378076077,
-0.031161267310380936,
-0.10087262839078903,
0.02548080123960972,
0.031820692121982574,
0.0400581993162632,
0.15871572494506836,
-0.05361872911453247,
0.08030959218740463,
-0.12914328277111053,
0.011186927556991577,
0.007992248982191086,
-0.05476396158337593,
0.10966142266988754,
-0.12304117530584335,
0.05355391278862953,
-0.04801642522215843,
0.06453121453523636,
-0.014391536824405193,
0.060553792864084244,
0.06986355781555176,
0.037490036338567734,
-0.017774034291505814,
0.025455044582486153,
0.05497584864497185,
0.04958818107843399,
-0.007065849844366312,
-0.06160702183842659,
0.029985077679157257,
0.014546428807079792,
-0.006796653848141432,
0.045851487666368484,
0.09134677052497864,
0.041768159717321396,
0.09148009121417999,
0.07848881185054779,
-0.00479641929268837,
-0.09788288176059723,
0.02596021071076393,
-0.025098487734794617,
0.07154905050992966,
-0.037020567804574966,
0.06251654773950577,
0.14462056756019592,
-0.1622430682182312,
0.12343304604291916,
0.004052603617310524,
-0.062053702771663666,
-0.0807838886976242,
-0.12507787346839905,
-0.07155110687017441,
-0.025293147191405296,
-0.01756623201072216,
-0.12149044126272202,
-0.008406770415604115,
-0.014349277131259441,
0.005581531673669815,
-0.007852821610867977,
0.1338537186384201,
-0.1256391853094101,
-0.09216582775115967,
0.1004306823015213,
-0.020900510251522064,
0.04753853380680084,
0.0016843650955706835,
0.028389988467097282,
0.013241108506917953,
0.08051147311925888,
0.036897435784339905,
0.04455789178609848,
0.037248801440000534,
0.027312563732266426,
-0.0863395631313324,
-0.09008687734603882,
-0.0005292418645694852,
-0.01616993173956871,
-0.04903583228588104,
0.07643485069274902,
0.03074284829199314,
-0.07379239797592163,
-0.01119039487093687,
0.2312036007642746,
-0.09612616896629333,
-0.09950512647628784,
-0.18041281402111053,
0.1864146739244461,
0.03708551079034805,
0.034988220781087875,
-0.024950796738266945,
-0.09155431389808655,
-0.02500721625983715,
0.1421542465686798,
0.15465593338012695,
-0.08585456758737564,
0.015867803245782852,
0.03773948550224304,
0.02274841070175171,
0.004610505886375904,
0.016396179795265198,
0.052581820636987686,
0.18856528401374817,
-0.04977530613541603,
0.08408419787883759,
-0.01619342528283596,
-0.06477183848619461,
-0.0657038539648056,
0.10777086019515991,
0.03190649673342705,
0.03360047563910484,
-0.018719684332609177,
0.10907690227031708,
-0.03523728623986244,
-0.09503443539142609,
-0.041681502014398575,
-0.09752225130796432,
-0.12008173018693924,
-0.03417227789759636,
0.035717256367206573,
0.02372773364186287,
0.10535068809986115,
0.04038481041789055,
-0.0354088693857193,
0.13082431256771088,
-0.010794345289468765,
-0.049747053533792496,
-0.015232470817863941,
0.025105956941843033,
-0.03633724898099899,
0.1593831479549408,
0.002643952611833811,
-0.02577901817858219,
0.12400243431329727,
0.010827030055224895,
-0.057312604039907455,
0.07683119922876358,
0.046030037105083466,
-0.06861517578363419,
0.11911375820636749,
0.08378361165523529,
-0.015813712030649185,
0.07494350522756577,
0.0780726969242096,
-0.19393010437488556,
0.06395600736141205,
-0.0582832433283329,
-0.04211233928799629,
-0.05871610343456268,
0.06235721334815025,
-0.07814972847700119,
0.12091609090566635,
0.1801399439573288,
-0.01643386296927929,
-0.014495974406599998,
-0.0046935477294027805,
-0.006866746582090855,
0.0331249013543129,
0.054771341383457184,
-0.05357080325484276,
-0.08569430559873581,
0.003764294320717454,
-0.003629543585702777,
0.020510269328951836,
-0.2830742299556732,
-0.11690398305654526,
0.018954899162054062,
-0.023022491484880447,
-0.025384003296494484,
0.12672455608844757,
0.0855216383934021,
-0.005705162882804871,
-0.03397196903824806,
-0.2168988138437271,
0.03981568291783333,
0.10329856723546982,
-0.12104617804288864,
-0.07767164707183838
] |
null | null |
sentence-transformers
|
# aditeyabaral/sentencetransformer-roberta-base
This is a [sentence-transformers](https://www.SBERT.net) model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.
<!--- Describe your model here -->
## Usage (Sentence-Transformers)
Using this model becomes easy when you have [sentence-transformers](https://www.SBERT.net) installed:
```
pip install -U sentence-transformers
```
Then you can use the model like this:
```python
from sentence_transformers import SentenceTransformer
sentences = ["This is an example sentence", "Each sentence is converted"]
model = SentenceTransformer('aditeyabaral/sentencetransformer-roberta-base')
embeddings = model.encode(sentences)
print(embeddings)
```
## Usage (HuggingFace Transformers)
Without [sentence-transformers](https://www.SBERT.net), you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.
```python
from transformers import AutoTokenizer, AutoModel
import torch
#Mean Pooling - Take attention mask into account for correct averaging
def mean_pooling(model_output, attention_mask):
token_embeddings = model_output[0] #First element of model_output contains all token embeddings
input_mask_expanded = attention_mask.unsqueeze(-1).expand(token_embeddings.size()).float()
return torch.sum(token_embeddings * input_mask_expanded, 1) / torch.clamp(input_mask_expanded.sum(1), min=1e-9)
# Sentences we want sentence embeddings for
sentences = ['This is an example sentence', 'Each sentence is converted']
# Load model from HuggingFace Hub
tokenizer = AutoTokenizer.from_pretrained('aditeyabaral/sentencetransformer-roberta-base')
model = AutoModel.from_pretrained('aditeyabaral/sentencetransformer-roberta-base')
# Tokenize sentences
encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt')
# Compute token embeddings
with torch.no_grad():
model_output = model(**encoded_input)
# Perform pooling. In this case, mean pooling.
sentence_embeddings = mean_pooling(model_output, encoded_input['attention_mask'])
print("Sentence embeddings:")
print(sentence_embeddings)
```
## Evaluation Results
<!--- Describe how your model was evaluated -->
For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: [https://seb.sbert.net](https://seb.sbert.net?model_name=aditeyabaral/sentencetransformer-roberta-base)
## Training
The model was trained with the parameters:
**DataLoader**:
`torch.utils.data.dataloader.DataLoader` of length 9234 with parameters:
```
{'batch_size': 16, 'sampler': 'torch.utils.data.sampler.RandomSampler', 'batch_sampler': 'torch.utils.data.sampler.BatchSampler'}
```
**Loss**:
`sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss`
Parameters of the fit()-Method:
```
{
"epochs": 10,
"evaluation_steps": 0,
"evaluator": "NoneType",
"max_grad_norm": 1,
"optimizer_class": "<class 'transformers.optimization.AdamW'>",
"optimizer_params": {
"lr": 2e-05
},
"scheduler": "WarmupLinear",
"steps_per_epoch": null,
"warmup_steps": 100,
"weight_decay": 0.01
}
```
## Full Model Architecture
```
SentenceTransformer(
(0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: RobertaModel
(1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False})
)
```
## Citing & Authors
<!--- Describe where people can find more information -->
|
{"tags": ["sentence-transformers", "feature-extraction", "sentence-similarity", "transformers"], "pipeline_tag": "sentence-similarity"}
|
sentence-similarity
|
aditeyabaral/sentencetransformer-roberta-base
|
[
"sentence-transformers",
"pytorch",
"roberta",
"feature-extraction",
"sentence-similarity",
"transformers",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#sentence-transformers #pytorch #roberta #feature-extraction #sentence-similarity #transformers #endpoints_compatible #region-us
|
# aditeyabaral/sentencetransformer-roberta-base
This is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.
## Usage (Sentence-Transformers)
Using this model becomes easy when you have sentence-transformers installed:
Then you can use the model like this:
## Usage (HuggingFace Transformers)
Without sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.
## Evaluation Results
For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL
## Training
The model was trained with the parameters:
DataLoader:
'URL.dataloader.DataLoader' of length 9234 with parameters:
Loss:
'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss'
Parameters of the fit()-Method:
## Full Model Architecture
## Citing & Authors
|
[
"# aditeyabaral/sentencetransformer-roberta-base\n\nThis is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.",
"## Usage (Sentence-Transformers)\n\nUsing this model becomes easy when you have sentence-transformers installed:\n\n\n\nThen you can use the model like this:",
"## Usage (HuggingFace Transformers)\nWithout sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.",
"## Evaluation Results\n\n\n\nFor an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL",
"## Training\nThe model was trained with the parameters:\n\nDataLoader:\n\n'URL.dataloader.DataLoader' of length 9234 with parameters:\n\n\nLoss:\n\n'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss' \n\nParameters of the fit()-Method:",
"## Full Model Architecture",
"## Citing & Authors"
] |
[
"TAGS\n#sentence-transformers #pytorch #roberta #feature-extraction #sentence-similarity #transformers #endpoints_compatible #region-us \n",
"# aditeyabaral/sentencetransformer-roberta-base\n\nThis is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.",
"## Usage (Sentence-Transformers)\n\nUsing this model becomes easy when you have sentence-transformers installed:\n\n\n\nThen you can use the model like this:",
"## Usage (HuggingFace Transformers)\nWithout sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.",
"## Evaluation Results\n\n\n\nFor an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL",
"## Training\nThe model was trained with the parameters:\n\nDataLoader:\n\n'URL.dataloader.DataLoader' of length 9234 with parameters:\n\n\nLoss:\n\n'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss' \n\nParameters of the fit()-Method:",
"## Full Model Architecture",
"## Citing & Authors"
] |
[
43,
59,
38,
64,
29,
78,
5,
6
] |
[
"passage: TAGS\n#sentence-transformers #pytorch #roberta #feature-extraction #sentence-similarity #transformers #endpoints_compatible #region-us \n# aditeyabaral/sentencetransformer-roberta-base\n\nThis is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.## Usage (Sentence-Transformers)\n\nUsing this model becomes easy when you have sentence-transformers installed:\n\n\n\nThen you can use the model like this:## Usage (HuggingFace Transformers)\nWithout sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.## Evaluation Results\n\n\n\nFor an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL## Training\nThe model was trained with the parameters:\n\nDataLoader:\n\n'URL.dataloader.DataLoader' of length 9234 with parameters:\n\n\nLoss:\n\n'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss' \n\nParameters of the fit()-Method:## Full Model Architecture## Citing & Authors"
] |
[
-0.031243465840816498,
0.1215367466211319,
-0.007021860219538212,
0.042639944702386856,
0.10780841112136841,
0.02738785371184349,
0.1319824755191803,
0.09512674808502197,
-0.01461021974682808,
0.08207300305366516,
0.009940502233803272,
0.10957707464694977,
0.006555213127285242,
0.006037251092493534,
0.03240814432501793,
-0.27760887145996094,
0.029745863750576973,
-0.04120730236172676,
0.022276893258094788,
0.07115739583969116,
0.11635943502187729,
-0.06973142176866531,
0.05416020378470421,
0.017896201461553574,
-0.0482633076608181,
0.014655740931630135,
-0.02825193852186203,
-0.029519161209464073,
0.09029091894626617,
0.05537542700767517,
0.048303086310625076,
0.007559970952570438,
0.009205866605043411,
-0.2049872875213623,
0.015003585256636143,
0.07054190337657928,
-0.013283791020512581,
0.06260477006435394,
0.024799605831503868,
-0.05113844573497772,
0.16764375567436218,
-0.0797407403588295,
0.06708253920078278,
0.05141361057758331,
-0.12251149863004684,
-0.07895313203334808,
-0.049830563366413116,
-0.003256602445617318,
0.12628787755966187,
0.0993308573961258,
-0.06295701861381531,
0.11866123974323273,
-0.04646492376923561,
0.07894334197044373,
0.10736645013093948,
-0.269997239112854,
-0.03241410106420517,
0.025699879974126816,
0.06624636799097061,
0.01477538701146841,
-0.10575943440198898,
0.01253841258585453,
-0.0231656264513731,
0.035225044935941696,
0.06219533085823059,
-0.0458412766456604,
0.05736738070845604,
-0.0011869523441419005,
-0.10823968797922134,
0.003670371836051345,
0.1713632494211197,
0.02891739085316658,
-0.017436839640140533,
-0.1917790025472641,
-0.07008298486471176,
0.07320773601531982,
-0.05118046700954437,
-0.03254622593522072,
0.02589084394276142,
0.051183924078941345,
-0.023915359750390053,
-0.09942103177309036,
-0.10255321860313416,
-0.011471838690340519,
-0.06592676788568497,
0.013784154318273067,
-0.011710849590599537,
-0.05457070842385292,
0.009627042338252068,
0.06532344967126846,
-0.08246553689241409,
-0.11191780865192413,
-0.019392646849155426,
-0.03116486966609955,
-0.11998496949672699,
-0.030687406659126282,
-0.06220649927854538,
-0.09274743497371674,
0.04140012711286545,
0.15037254989147186,
0.07718838006258011,
0.009818959049880505,
-0.0149738360196352,
0.056043416261672974,
0.019878465682268143,
0.17800381779670715,
-0.05327771231532097,
-0.08179811388254166,
-0.04324512928724289,
0.029377851635217667,
-0.0012223189696669579,
-0.02033895254135132,
-0.04203476384282112,
-0.01170405838638544,
0.03577885404229164,
0.06280221045017242,
0.06551651656627655,
0.058001186698675156,
-0.050668686628341675,
-0.039709560573101044,
0.05032111331820488,
-0.11680866032838821,
0.02766742743551731,
0.004355476703494787,
-0.04930170997977257,
0.015368019230663776,
0.08475197851657867,
-0.0065364232286810875,
-0.07184801250696182,
0.01184586901217699,
-0.10645059496164322,
-0.014788641594350338,
-0.057142361998558044,
-0.13022162020206451,
-0.009537314996123314,
-0.024123070761561394,
-0.0333106704056263,
-0.09442999213933945,
-0.13422738015651703,
-0.07926768064498901,
0.02616989240050316,
-0.04583749547600746,
-0.01561452355235815,
-0.12424450367689133,
-0.013364319689571857,
0.006654217839241028,
-0.00018996853032149374,
-0.0574704147875309,
-0.0036366390995681286,
0.017353929579257965,
-0.054291702806949615,
0.05573214218020439,
0.0528961606323719,
0.04094618931412697,
-0.12337721139192581,
0.025863487273454666,
-0.1313147246837616,
0.16133250296115875,
-0.03605905547738075,
0.07053712755441666,
-0.1402013599872589,
0.02678797021508217,
0.013730639591813087,
0.06502480059862137,
0.0060552917420864105,
0.1435338407754898,
-0.21270933747291565,
-0.07860797643661499,
0.13334974646568298,
-0.04714732989668846,
-0.08307458460330963,
0.10101494193077087,
-0.03703150153160095,
0.13734400272369385,
0.11440586298704147,
0.12802347540855408,
0.11212317645549774,
-0.049559034407138824,
-0.007195219397544861,
0.018517697229981422,
-0.06103817746043205,
0.13767565786838531,
0.037285998463630676,
-0.07360539585351944,
0.0988502949476242,
-0.003161473199725151,
-0.0445079542696476,
0.004875749349594116,
-0.002825446892529726,
-0.05799573287367821,
0.016362624242901802,
-0.035004545003175735,
0.049706265330314636,
-0.03302471712231636,
-0.0015520000597462058,
0.009066496975719929,
-0.11166267096996307,
0.09602323919534683,
0.07281063497066498,
-0.07389812171459198,
0.020369188860058784,
-0.0837629958987236,
0.016280516982078552,
-0.0036994649562984705,
0.013989663682878017,
-0.20689906179904938,
-0.11571627855300903,
0.017567355185747147,
0.021027661859989166,
0.1173362284898758,
0.04874071478843689,
0.05825892835855484,
0.030771490186452866,
-0.017735470086336136,
-0.015650751069188118,
0.0447256937623024,
-0.001904110424220562,
-0.09268458187580109,
-0.1250937581062317,
0.002578440122306347,
-0.03673259913921356,
0.09595411270856857,
-0.11164644360542297,
0.020848337560892105,
0.009553966112434864,
0.08311969041824341,
0.042382482439279556,
-0.02216881513595581,
-0.005471323616802692,
-0.02571798302233219,
-0.008146323263645172,
-0.02945712022483349,
0.05437196046113968,
0.014292536303400993,
-0.14565645158290863,
0.10796474665403366,
-0.2073436826467514,
-0.15553024411201477,
0.0763130784034729,
0.003504855092614889,
-0.0651644766330719,
-0.05718996375799179,
-0.0233316570520401,
0.0001295542169827968,
-0.019979223608970642,
-0.0674210712313652,
0.2008524090051651,
0.08123768121004105,
0.10818367451429367,
-0.036542344838380814,
-0.028401978313922882,
-0.052324917167425156,
-0.03744466230273247,
-0.04919978231191635,
0.11220470070838928,
-0.03900644928216934,
-0.15351460874080658,
0.05353103205561638,
0.06801722943782806,
-0.05220431089401245,
0.11273985356092453,
-0.010670067742466927,
-0.07174741476774216,
-0.06247364729642868,
0.0187373049557209,
0.038876403123140335,
-0.0032025284599512815,
-0.074161596596241,
0.009547647088766098,
0.058616846799850464,
0.01427088025957346,
0.025286467745900154,
-0.06135500594973564,
0.045069340616464615,
0.05373166501522064,
0.006706417538225651,
0.10717957466840744,
0.03171336650848389,
0.003692097030580044,
0.0660131424665451,
0.014414462260901928,
0.012042444199323654,
-0.05152194947004318,
-0.044491246342659,
-0.10792429000139236,
0.16571900248527527,
-0.13327999413013458,
-0.2034437358379364,
-0.15746232867240906,
0.005707697477191687,
-0.038840629160404205,
0.024653825908899307,
0.08053823560476303,
-0.06097958981990814,
-0.06506738066673279,
-0.06500888615846634,
0.07957983762025833,
0.07566782087087631,
-0.045618217438459396,
-0.0028063759673386812,
0.042178090661764145,
0.01336628943681717,
-0.1225142553448677,
-0.012181290425360203,
-0.004411411006003618,
-0.0756969004869461,
-0.005756382364779711,
-0.03292682766914368,
0.06460127979516983,
0.120637908577919,
0.06752514839172363,
-0.012703263200819492,
-0.011370876803994179,
0.22648748755455017,
-0.09608131647109985,
0.05563368275761604,
0.16165435314178467,
-0.003110503312200308,
0.06499084830284119,
0.09949181973934174,
0.013068292289972305,
-0.05406537279486656,
0.047368600964546204,
0.07309726625680923,
-0.014207140542566776,
-0.13878528773784637,
-0.10741285234689713,
-0.06364010274410248,
-0.0044515361078083515,
0.12093092501163483,
0.03613985329866409,
0.039057858288288116,
0.04235582798719406,
-0.024900108575820923,
-0.0013002888299524784,
0.09911085665225983,
0.11600464582443237,
0.12641990184783936,
-0.025360506027936935,
0.10476347804069519,
-0.04694531112909317,
-0.07614348083734512,
0.05479336529970169,
-0.005802270025014877,
0.16133110225200653,
0.026576027274131775,
0.15926600992679596,
0.07925381511449814,
-0.03836412355303764,
-0.01946810446679592,
0.0717211663722992,
-0.03505934774875641,
0.015713319182395935,
-0.0360034815967083,
-0.09206940978765488,
-0.0029396461322903633,
0.08385860174894333,
0.10049372166395187,
-0.03670975938439369,
-0.0379020981490612,
0.060102492570877075,
0.13047431409358978,
0.13241754472255707,
0.0885901153087616,
-0.22941699624061584,
-0.04247651249170303,
0.03839680552482605,
-0.07670080661773682,
-0.06279584765434265,
-0.011715288273990154,
0.03170096501708031,
-0.10959689319133759,
0.03588270768523216,
-0.008626231923699379,
0.09881812334060669,
-0.08547564595937729,
0.030363183468580246,
-0.0656084194779396,
0.039810843765735626,
-0.00021726808336097747,
0.0704852044582367,
-0.2112669199705124,
0.10367234796285629,
0.037770505994558334,
0.051816049963235855,
-0.05002710223197937,
0.01934990845620632,
0.06869485229253769,
0.014550953172147274,
0.17688454687595367,
-0.033370811492204666,
-0.018862344324588776,
-0.004084692802280188,
-0.06851813197135925,
-0.005538702942430973,
0.054498087614774704,
-0.12654808163642883,
0.09593982249498367,
-0.055114831775426865,
-0.02996821328997612,
-0.010935474187135696,
0.034946419298648834,
-0.040724948048591614,
-0.1749870777130127,
0.010542241856455803,
0.011620145291090012,
-0.006275281775742769,
-0.02252514474093914,
-0.003692936385050416,
0.020731402561068535,
0.19649213552474976,
-0.10534737259149551,
-0.05856410041451454,
-0.1243225485086441,
-0.014372047036886215,
0.12210773676633835,
-0.08850552886724472,
0.006669180002063513,
-0.006555662024766207,
0.14670330286026,
-0.06061050295829773,
-0.06837151199579239,
0.06526247411966324,
-0.046882521361112595,
-0.06293871253728867,
-0.04493047669529915,
0.10931482911109924,
0.0552188977599144,
0.055688049644231796,
0.04567651450634003,
0.08085159212350845,
-0.03521043062210083,
-0.08567013591527939,
-0.06173454597592354,
0.09654619544744492,
0.0020745862275362015,
0.06486126035451889,
-0.12957678735256195,
-0.035636257380247116,
-0.11656413972377777,
0.04843151196837425,
0.19636061787605286,
0.22469067573547363,
-0.07373035699129105,
0.06835223734378815,
0.1363377869129181,
-0.10232851654291153,
-0.2230861634016037,
-0.07620285451412201,
0.023814519867300987,
0.043698493391275406,
0.06347502022981644,
-0.13284741342067719,
0.059209778904914856,
0.0488106831908226,
-0.005016551353037357,
-0.07363822311162949,
-0.24011069536209106,
-0.14370191097259521,
0.13327506184577942,
0.001901068608276546,
-0.04044291377067566,
-0.09699929505586624,
-0.06004589796066284,
-0.06922333687543869,
-0.03182695433497429,
0.0879138931632042,
-0.061245597898960114,
0.11008261144161224,
0.04754479601979256,
0.03219687193632126,
0.06020698696374893,
0.007440952118486166,
0.14137059450149536,
0.06518831849098206,
0.039430610835552216,
-0.038148608058691025,
-0.03564896062016487,
0.07445981353521347,
-0.08001647144556046,
0.1371648609638214,
-0.04941481724381447,
0.03588437661528587,
-0.12055818736553192,
-0.031805992126464844,
-0.04485229402780533,
0.027467800304293633,
-0.04944740608334541,
-0.05134357139468193,
-0.020830992609262466,
0.054306283593177795,
0.09124036878347397,
-0.008844119496643543,
0.0182977132499218,
-0.09108281135559082,
0.04371534660458565,
0.13919323682785034,
0.11788618564605713,
0.06876705586910248,
-0.15233920514583588,
0.026381513103842735,
0.005242491140961647,
0.05066239833831787,
-0.1210891529917717,
0.06811993569135666,
0.07587135583162308,
-0.011791677214205265,
0.15278685092926025,
0.016850190237164497,
-0.0929308831691742,
-0.013963492587208748,
0.05265194550156593,
-0.07923588901758194,
-0.1545131653547287,
-0.04037678241729736,
-0.022276077419519424,
-0.10520943254232407,
-0.05849398300051689,
0.15634390711784363,
-0.005491473712027073,
-0.0005708429380320013,
0.0405692495405674,
0.0417235791683197,
-0.032413363456726074,
0.10749359428882599,
-0.010583104565739632,
0.051584988832473755,
-0.04846430569887161,
0.0912439227104187,
0.08696997165679932,
-0.08651930093765259,
0.032561201602220535,
0.12202101945877075,
-0.06945683807134628,
-0.08966252952814102,
-0.06506983935832977,
0.13735580444335938,
-0.08803759515285492,
0.03093309886753559,
-0.059121184051036835,
-0.065097875893116,
0.021216409280896187,
0.05397486314177513,
0.049339305609464645,
0.060575589537620544,
-0.08501540869474411,
-0.01601588726043701,
-0.09234602749347687,
0.07924920320510864,
0.08749502152204514,
0.00971205998212099,
-0.019773932173848152,
0.08220212161540985,
-0.025396350771188736,
0.005030397325754166,
-0.02723083458840847,
-0.04143249988555908,
-0.07419086247682571,
-0.006782440468668938,
-0.030437877401709557,
-0.019925221800804138,
-0.09831786155700684,
-0.013338441960513592,
0.031727951020002365,
0.03668586164712906,
-0.006290100980550051,
-0.014939178712666035,
-0.04745171591639519,
-0.07267170399427414,
-0.0500858910381794,
0.09888546913862228,
-0.1378554105758667,
-0.015496950596570969,
0.04583394154906273,
-0.09608965367078781,
0.08268333226442337,
-0.009487454779446125,
-0.022636689245700836,
0.03656129911541939,
-0.027373945340514183,
-0.04115421697497368,
0.02356831729412079,
0.028800446540117264,
0.05929439514875412,
-0.10743522644042969,
0.01198919303715229,
-0.059813108295202255,
0.028522394597530365,
0.011038019321858883,
0.03814849629998207,
-0.09665939956903458,
0.016667349264025688,
-0.014286404475569725,
-0.02626042254269123,
-0.10157519578933716,
0.027772581204771996,
0.03276888653635979,
0.04045950248837471,
0.15705101191997528,
-0.053240031003952026,
0.07601803541183472,
-0.13198047876358032,
0.010592712089419365,
0.0029607207980006933,
-0.05448903143405914,
0.11417533457279205,
-0.11348579823970795,
0.05471043288707733,
-0.04318876564502716,
0.06002609804272652,
-0.01949279196560383,
0.05434749647974968,
0.06775568425655365,
0.044503193348646164,
-0.005995375569909811,
0.027690168470144272,
0.054394930601119995,
0.0538969486951828,
-0.00631148973479867,
-0.05961480364203453,
0.03127436339855194,
0.010657550767064095,
-0.027259264141321182,
0.04475976899266243,
0.08978897333145142,
0.05595970153808594,
0.0887056365609169,
0.08387786895036697,
-0.0074761095456779,
-0.07864625006914139,
0.036882802844047546,
-0.02361091412603855,
0.05073186755180359,
-0.03805232420563698,
0.05326363071799278,
0.16773299872875214,
-0.15948523581027985,
0.12309076637029648,
0.00849172379821539,
-0.062357157468795776,
-0.08082811534404755,
-0.13281510770320892,
-0.07132362574338913,
-0.024679820984601974,
-0.014722433872520924,
-0.12280426174402237,
-0.01123859267681837,
-0.012683205306529999,
0.003703932510688901,
-0.006716626230627298,
0.12370810657739639,
-0.10890280455350876,
-0.09874507039785385,
0.09019137173891068,
-0.01880417950451374,
0.04966873675584793,
0.010185481980443,
0.03236091509461403,
0.01872708462178707,
0.07704439014196396,
0.04385654628276825,
0.04542868956923485,
0.03999149426817894,
0.0282784104347229,
-0.09308110922574997,
-0.08259651809930801,
0.00003175625897711143,
-0.007969189435243607,
-0.05081886425614357,
0.07328769564628601,
0.03440699726343155,
-0.07689647376537323,
-0.010383576154708862,
0.24282532930374146,
-0.09094975143671036,
-0.11270897835493088,
-0.1822509467601776,
0.18390528857707977,
0.03565957024693489,
0.03241999074816704,
-0.027059068903326988,
-0.0941522866487503,
-0.012778247706592083,
0.1501079648733139,
0.17370766401290894,
-0.0833977609872818,
0.01850043050944805,
0.0418817363679409,
0.019790448248386383,
0.00840934831649065,
0.009964631870388985,
0.05654029920697212,
0.19141490757465363,
-0.052622415125370026,
0.08787289261817932,
-0.017249759286642075,
-0.05773082375526428,
-0.06888483464717865,
0.09810038655996323,
0.0285257026553154,
0.03103446587920189,
-0.014092124998569489,
0.11345504969358444,
-0.04252712428569794,
-0.09335285425186157,
-0.03602711483836174,
-0.09066198766231537,
-0.11966404318809509,
-0.03311088681221008,
0.026598870754241943,
0.019711999222636223,
0.10013150423765182,
0.03474711254239082,
-0.034059494733810425,
0.11926938593387604,
-0.009771537035703659,
-0.05591856315732002,
-0.02189428173005581,
0.027975739911198616,
-0.025615263730287552,
0.15336020290851593,
-0.009627597406506538,
-0.025813033804297447,
0.12744635343551636,
0.004824611358344555,
-0.05775099992752075,
0.07827726006507874,
0.046076562255620956,
-0.06428925693035126,
0.12203046679496765,
0.07976655662059784,
-0.015771348029375076,
0.07693219184875488,
0.08029903471469879,
-0.1910112202167511,
0.06685574352741241,
-0.05002622306346893,
-0.03621833771467209,
-0.05918298289179802,
0.06049426272511482,
-0.08337859064340591,
0.12383998930454254,
0.17442439496517181,
-0.020788559690117836,
-0.006175722926855087,
-0.009907952509820461,
-0.008784717880189419,
0.03204631805419922,
0.062360431998968124,
-0.05466374382376671,
-0.08510229736566544,
0.001004120334982872,
0.007488601841032505,
0.012829550541937351,
-0.28875595331192017,
-0.11584672331809998,
0.022373203188180923,
-0.02157602459192276,
-0.02570604905486107,
0.13153496384620667,
0.08289404213428497,
-0.00448341341689229,
-0.03321704640984535,
-0.20281833410263062,
0.03656034171581268,
0.10497485101222992,
-0.12470410764217377,
-0.08041355758905411
] |
null | null |
sentence-transformers
|
# aditeyabaral/sentencetransformer-roberta-hinglish-big
This is a [sentence-transformers](https://www.SBERT.net) model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.
<!--- Describe your model here -->
## Usage (Sentence-Transformers)
Using this model becomes easy when you have [sentence-transformers](https://www.SBERT.net) installed:
```
pip install -U sentence-transformers
```
Then you can use the model like this:
```python
from sentence_transformers import SentenceTransformer
sentences = ["This is an example sentence", "Each sentence is converted"]
model = SentenceTransformer('aditeyabaral/sentencetransformer-roberta-hinglish-big')
embeddings = model.encode(sentences)
print(embeddings)
```
## Usage (HuggingFace Transformers)
Without [sentence-transformers](https://www.SBERT.net), you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.
```python
from transformers import AutoTokenizer, AutoModel
import torch
#Mean Pooling - Take attention mask into account for correct averaging
def mean_pooling(model_output, attention_mask):
token_embeddings = model_output[0] #First element of model_output contains all token embeddings
input_mask_expanded = attention_mask.unsqueeze(-1).expand(token_embeddings.size()).float()
return torch.sum(token_embeddings * input_mask_expanded, 1) / torch.clamp(input_mask_expanded.sum(1), min=1e-9)
# Sentences we want sentence embeddings for
sentences = ['This is an example sentence', 'Each sentence is converted']
# Load model from HuggingFace Hub
tokenizer = AutoTokenizer.from_pretrained('aditeyabaral/sentencetransformer-roberta-hinglish-big')
model = AutoModel.from_pretrained('aditeyabaral/sentencetransformer-roberta-hinglish-big')
# Tokenize sentences
encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt')
# Compute token embeddings
with torch.no_grad():
model_output = model(**encoded_input)
# Perform pooling. In this case, mean pooling.
sentence_embeddings = mean_pooling(model_output, encoded_input['attention_mask'])
print("Sentence embeddings:")
print(sentence_embeddings)
```
## Evaluation Results
<!--- Describe how your model was evaluated -->
For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: [https://seb.sbert.net](https://seb.sbert.net?model_name=aditeyabaral/sentencetransformer-roberta-hinglish-big)
## Training
The model was trained with the parameters:
**DataLoader**:
`torch.utils.data.dataloader.DataLoader` of length 4617 with parameters:
```
{'batch_size': 32, 'sampler': 'torch.utils.data.sampler.RandomSampler', 'batch_sampler': 'torch.utils.data.sampler.BatchSampler'}
```
**Loss**:
`sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss`
Parameters of the fit()-Method:
```
{
"epochs": 10,
"evaluation_steps": 0,
"evaluator": "NoneType",
"max_grad_norm": 1,
"optimizer_class": "<class 'transformers.optimization.AdamW'>",
"optimizer_params": {
"lr": 2e-05
},
"scheduler": "WarmupLinear",
"steps_per_epoch": null,
"warmup_steps": 100,
"weight_decay": 0.01
}
```
## Full Model Architecture
```
SentenceTransformer(
(0): Transformer({'max_seq_length': 128, 'do_lower_case': False}) with Transformer model: RobertaModel
(1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False})
)
```
## Citing & Authors
<!--- Describe where people can find more information -->
|
{"tags": ["sentence-transformers", "feature-extraction", "sentence-similarity", "transformers"], "pipeline_tag": "sentence-similarity"}
|
sentence-similarity
|
aditeyabaral/sentencetransformer-roberta-hinglish-big
|
[
"sentence-transformers",
"pytorch",
"roberta",
"feature-extraction",
"sentence-similarity",
"transformers",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#sentence-transformers #pytorch #roberta #feature-extraction #sentence-similarity #transformers #endpoints_compatible #region-us
|
# aditeyabaral/sentencetransformer-roberta-hinglish-big
This is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.
## Usage (Sentence-Transformers)
Using this model becomes easy when you have sentence-transformers installed:
Then you can use the model like this:
## Usage (HuggingFace Transformers)
Without sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.
## Evaluation Results
For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL
## Training
The model was trained with the parameters:
DataLoader:
'URL.dataloader.DataLoader' of length 4617 with parameters:
Loss:
'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss'
Parameters of the fit()-Method:
## Full Model Architecture
## Citing & Authors
|
[
"# aditeyabaral/sentencetransformer-roberta-hinglish-big\n\nThis is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.",
"## Usage (Sentence-Transformers)\n\nUsing this model becomes easy when you have sentence-transformers installed:\n\n\n\nThen you can use the model like this:",
"## Usage (HuggingFace Transformers)\nWithout sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.",
"## Evaluation Results\n\n\n\nFor an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL",
"## Training\nThe model was trained with the parameters:\n\nDataLoader:\n\n'URL.dataloader.DataLoader' of length 4617 with parameters:\n\n\nLoss:\n\n'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss' \n\nParameters of the fit()-Method:",
"## Full Model Architecture",
"## Citing & Authors"
] |
[
"TAGS\n#sentence-transformers #pytorch #roberta #feature-extraction #sentence-similarity #transformers #endpoints_compatible #region-us \n",
"# aditeyabaral/sentencetransformer-roberta-hinglish-big\n\nThis is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.",
"## Usage (Sentence-Transformers)\n\nUsing this model becomes easy when you have sentence-transformers installed:\n\n\n\nThen you can use the model like this:",
"## Usage (HuggingFace Transformers)\nWithout sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.",
"## Evaluation Results\n\n\n\nFor an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL",
"## Training\nThe model was trained with the parameters:\n\nDataLoader:\n\n'URL.dataloader.DataLoader' of length 4617 with parameters:\n\n\nLoss:\n\n'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss' \n\nParameters of the fit()-Method:",
"## Full Model Architecture",
"## Citing & Authors"
] |
[
43,
62,
38,
64,
29,
78,
5,
6
] |
[
"passage: TAGS\n#sentence-transformers #pytorch #roberta #feature-extraction #sentence-similarity #transformers #endpoints_compatible #region-us \n# aditeyabaral/sentencetransformer-roberta-hinglish-big\n\nThis is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.## Usage (Sentence-Transformers)\n\nUsing this model becomes easy when you have sentence-transformers installed:\n\n\n\nThen you can use the model like this:## Usage (HuggingFace Transformers)\nWithout sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.## Evaluation Results\n\n\n\nFor an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL## Training\nThe model was trained with the parameters:\n\nDataLoader:\n\n'URL.dataloader.DataLoader' of length 4617 with parameters:\n\n\nLoss:\n\n'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss' \n\nParameters of the fit()-Method:## Full Model Architecture## Citing & Authors"
] |
[
-0.039217960089445114,
0.09403430670499802,
-0.0064859832637012005,
0.045874472707509995,
0.10667537897825241,
0.02373676933348179,
0.1412488967180252,
0.08640608936548233,
-0.0021962535101920366,
0.09332776814699173,
0.024829229339957237,
0.08388838171958923,
0.010860798880457878,
0.025801576673984528,
0.030476247891783714,
-0.27844807505607605,
0.014981779269874096,
-0.04693959280848503,
0.027277277782559395,
0.07505036890506744,
0.11317280679941177,
-0.081280916929245,
0.06694966554641724,
0.027011407539248466,
-0.04648422449827194,
0.002924007596448064,
-0.032302629202604294,
-0.024286454543471336,
0.08254438638687134,
0.063386932015419,
0.04393821209669113,
0.010656074620783329,
0.017924977466464043,
-0.1918095350265503,
0.018534235656261444,
0.059582918882369995,
-0.00986593309789896,
0.06805335730314255,
0.03544158115983009,
-0.037224989384412766,
0.1790006309747696,
-0.0742170512676239,
0.05317554250359535,
0.06070752069354057,
-0.11326064169406891,
-0.06086742505431175,
-0.055079177021980286,
-0.018927309662103653,
0.12840192019939423,
0.10616070032119751,
-0.06240815669298172,
0.10715891420841217,
-0.06844514608383179,
0.0714673176407814,
0.09815075993537903,
-0.2681254744529724,
-0.03052963688969612,
0.03816884383559227,
0.062452830374240875,
0.027868002653121948,
-0.10356146097183228,
0.004376917611807585,
-0.018611140549182892,
0.028952578082680702,
0.06864210218191147,
-0.046194788068532944,
0.07202114909887314,
-0.007898748852312565,
-0.10972879081964493,
0.004293830133974552,
0.16139332950115204,
0.0456581711769104,
-0.02122337929904461,
-0.1930176019668579,
-0.06012736260890961,
0.037692926824092865,
-0.06355199217796326,
-0.03913147747516632,
0.03187844157218933,
0.039020925760269165,
-0.007436410523951054,
-0.09753450751304626,
-0.116266168653965,
-0.014674989506602287,
-0.0817502811551094,
0.00252302223816514,
-0.021659396588802338,
-0.049516431987285614,
-0.005799273028969765,
0.05324558541178703,
-0.10261514782905579,
-0.11612315475940704,
-0.039924927055835724,
-0.019100425764918327,
-0.12384270876646042,
-0.04420701041817665,
-0.054585050791502,
-0.09846191108226776,
0.04455742985010147,
0.1393902748823166,
0.07619873434305191,
0.003063678042963147,
-0.011251389980316162,
0.061862051486968994,
0.02205621637403965,
0.1850237399339676,
-0.06391721963882446,
-0.07947570085525513,
-0.03596187010407448,
0.027880162000656128,
0.004801830276846886,
-0.013437359593808651,
-0.049239154905080795,
-0.009531533345580101,
0.007212802767753601,
0.060616347938776016,
0.05054226145148277,
0.04876831918954849,
-0.05403350293636322,
-0.03330696001648903,
0.06774741411209106,
-0.10896456241607666,
0.029394075274467468,
0.015087184496223927,
-0.057602658867836,
0.02897082082927227,
0.06700681149959564,
-0.009441972710192204,
-0.06468803435564041,
0.026144374161958694,
-0.10704363137483597,
-0.006952900905162096,
-0.06042472645640373,
-0.13046112656593323,
-0.003726526629179716,
0.001400141161866486,
-0.026925066486001015,
-0.09982021152973175,
-0.13041304051876068,
-0.0843118205666542,
0.017620285972952843,
-0.05300522968173027,
-0.00619636382907629,
-0.12612055242061615,
-0.008105400018393993,
0.009421060793101788,
-0.006560234352946281,
-0.04844523221254349,
-0.012160501442849636,
0.00946144387125969,
-0.04843275621533394,
0.059615764766931534,
0.046190064400434494,
0.04233207553625107,
-0.1259738951921463,
0.02154429256916046,
-0.13016822934150696,
0.1550859659910202,
-0.02337542362511158,
0.08416973799467087,
-0.15285702049732208,
0.009048392996191978,
0.0008894439088180661,
0.06422514468431473,
0.00883685052394867,
0.1458386331796646,
-0.20165209472179413,
-0.07227146625518799,
0.1318047195672989,
-0.03762602061033249,
-0.10482759773731232,
0.118828184902668,
-0.024380095303058624,
0.13509152829647064,
0.10502460598945618,
0.10909292101860046,
0.12814223766326904,
-0.040004245936870575,
-0.008668594062328339,
0.030702292919158936,
-0.06177567318081856,
0.11364245414733887,
0.04790644720196724,
-0.06046895310282707,
0.09939870983362198,
-0.00932918582111597,
-0.041861992329359055,
0.006910702213644981,
-0.000147620914503932,
-0.06347917765378952,
0.023749856278300285,
-0.035020072013139725,
0.032674819231033325,
-0.036768533289432526,
0.00513700395822525,
0.012841175310313702,
-0.10270751267671585,
0.06273329257965088,
0.08188294619321823,
-0.06188975274562836,
0.01155683770775795,
-0.09344621747732162,
0.019547224044799805,
-0.005765797104686499,
0.020388685166835785,
-0.19514676928520203,
-0.09822236001491547,
0.010897312313318253,
0.026591887697577477,
0.12019074708223343,
0.06791923940181732,
0.060762591660022736,
0.030501263216137886,
-0.0028025959618389606,
-0.014922802336513996,
0.05340518057346344,
-0.028229065239429474,
-0.09972243010997772,
-0.10891475528478622,
0.007040748372673988,
-0.03194814175367355,
0.09619761258363724,
-0.12070760875940323,
0.011636504903435707,
0.020685506984591484,
0.03598529100418091,
0.04926265403628349,
-0.018701648339629173,
0.002888962859287858,
-0.023249050602316856,
-0.01324831135571003,
-0.022208940237760544,
0.047822799533605576,
0.006954401731491089,
-0.1543581336736679,
0.09650149196386337,
-0.21330416202545166,
-0.12535005807876587,
0.07559037208557129,
0.01425309106707573,
-0.06038108468055725,
-0.0760326087474823,
-0.02003668062388897,
0.005047143902629614,
-0.02142542600631714,
-0.05577101185917854,
0.196050226688385,
0.08893881738185883,
0.10966534167528152,
-0.034833043813705444,
-0.018038569018244743,
-0.047112468630075455,
-0.03311474248766899,
-0.034904845058918,
0.11574734002351761,
-0.02947714738547802,
-0.1407308578491211,
0.04166021570563316,
0.07704481482505798,
-0.06921809911727905,
0.10528678447008133,
-0.0019533378072082996,
-0.07798592001199722,
-0.060654591768980026,
0.03795132413506508,
0.046344079077243805,
-0.015204062685370445,
-0.07353199273347855,
0.014690854586660862,
0.06389389932155609,
0.00966240931302309,
0.014146481640636921,
-0.056685224175453186,
0.0457870289683342,
0.05847090482711792,
-0.0004058766644448042,
0.09527432173490524,
0.030623165890574455,
-0.0034967337269335985,
0.06491585075855255,
0.025291474536061287,
-0.002356556709855795,
-0.05814758315682411,
-0.04548092558979988,
-0.10740317404270172,
0.17699679732322693,
-0.10958952456712723,
-0.20180994272232056,
-0.16365055739879608,
0.002839147113263607,
-0.042696282267570496,
0.029698338359594345,
0.0770106315612793,
-0.06322511285543442,
-0.07247170060873032,
-0.07723681628704071,
0.08262673020362854,
0.08245523273944855,
-0.035437196493148804,
-0.016305744647979736,
0.04391516372561455,
0.00835780706256628,
-0.12042318284511566,
-0.013309603556990623,
-0.00759406853467226,
-0.0808844268321991,
-0.001954828854650259,
-0.017927246168255806,
0.06123422831296921,
0.11937333643436432,
0.06011326238512993,
-0.013569456525146961,
-0.012338470667600632,
0.20957860350608826,
-0.10098090022802353,
0.045206617563962936,
0.18889334797859192,
0.01732601411640644,
0.05727093666791916,
0.10186159610748291,
0.028055353090167046,
-0.056067537516355515,
0.05053463950753212,
0.057519104331731796,
-0.022662490606307983,
-0.13557416200637817,
-0.10987181216478348,
-0.07462877035140991,
0.011835390701889992,
0.13253994286060333,
0.03377661108970642,
0.010865786112844944,
0.061711836606264114,
-0.02651563659310341,
0.010064282454550266,
0.08275789022445679,
0.10616660118103027,
0.10728077590465546,
-0.014863962307572365,
0.10568737983703613,
-0.04825306311249733,
-0.07455689460039139,
0.0632529929280281,
-0.007300218101590872,
0.13952159881591797,
0.018733734264969826,
0.19809392094612122,
0.06912226974964142,
-0.034158699214458466,
-0.019876183941960335,
0.08213604241609573,
-0.03526991978287697,
-0.006684232968837023,
-0.029699066653847694,
-0.09670083224773407,
-0.008506720885634422,
0.09903798252344131,
0.09419634193181992,
-0.030361315235495567,
-0.03888501599431038,
0.07031907141208649,
0.11786573380231857,
0.12979263067245483,
0.09103109687566757,
-0.2574978470802307,
-0.04782288894057274,
0.0316895954310894,
-0.06785129755735397,
-0.059188101440668106,
-0.00759884063154459,
0.039316918700933456,
-0.10624037683010101,
0.031485024839639664,
-0.0021094775293022394,
0.10223580151796341,
-0.08558578789234161,
0.03177005052566528,
-0.07797347754240036,
0.05613597854971886,
-0.0023680871818214655,
0.07784800976514816,
-0.22755631804466248,
0.09701960533857346,
0.03966137394309044,
0.04812061786651611,
-0.06126800552010536,
0.02085929550230503,
0.07236974686384201,
0.01067445334047079,
0.17490889132022858,
-0.03653474524617195,
0.007019511889666319,
-0.022196168079972267,
-0.07610125094652176,
-0.001707567716948688,
0.04983755201101303,
-0.12828955054283142,
0.09461060911417007,
-0.046743713319301605,
-0.028516860678792,
-0.023327002301812172,
0.026309600099921227,
-0.023560181260108948,
-0.16737185418605804,
0.004482225980609655,
0.029409069567918777,
0.006307083182036877,
-0.014603652060031891,
0.002143110381439328,
0.013808304443955421,
0.20420190691947937,
-0.1083284541964531,
-0.0510549396276474,
-0.12653817236423492,
0.00880185142159462,
0.10501348972320557,
-0.09875091910362244,
0.0007312067318707705,
-0.003536319825798273,
0.15602895617485046,
-0.05269613116979599,
-0.0562547892332077,
0.07111809402704239,
-0.055443618446588516,
-0.0574704073369503,
-0.03976607695221901,
0.10234001278877258,
0.05295558273792267,
0.06699348241090775,
0.04518258571624756,
0.06393483281135559,
-0.052973050624132156,
-0.09527237713336945,
-0.07729081064462662,
0.09104901552200317,
-0.00013846781803295016,
0.07239306718111038,
-0.10300788283348083,
-0.02534814178943634,
-0.09969937056303024,
0.048391193151474,
0.20929314196109772,
0.21783700585365295,
-0.06866183876991272,
0.06227369233965874,
0.1265047937631607,
-0.07724147289991379,
-0.2359348088502884,
-0.06502454727888107,
0.03976735472679138,
0.06332292407751083,
0.07382518798112869,
-0.12519995868206024,
0.0727691799402237,
0.06296035647392273,
-0.010755475610494614,
-0.059228263795375824,
-0.24450050294399261,
-0.14651040732860565,
0.1322304755449295,
0.0032214676029980183,
-0.03669489920139313,
-0.08761759847402573,
-0.05251564458012581,
-0.07955735921859741,
-0.008248468860983849,
0.07409416884183884,
-0.054233942180871964,
0.10564223676919937,
0.04133607819676399,
0.047489654272794724,
0.06585564464330673,
0.005648559425026178,
0.13559366762638092,
0.05991879850625992,
0.03943776339292526,
-0.04283959046006203,
-0.006226960103958845,
0.07250223308801651,
-0.08808564394712448,
0.16520866751670837,
-0.084873266518116,
0.027351822704076767,
-0.11997127532958984,
-0.033388614654541016,
-0.03468530997633934,
0.022499442100524902,
-0.037161484360694885,
-0.050091154873371124,
-0.03138510137796402,
0.04149523377418518,
0.078473299741745,
-0.0020041679963469505,
0.025338899344205856,
-0.09165602922439575,
0.01955227367579937,
0.1475752592086792,
0.15074914693832397,
0.037074629217386246,
-0.1590128391981125,
0.03264693543314934,
0.014012272469699383,
0.05858408287167549,
-0.13893096148967743,
0.0684734359383583,
0.07923407852649689,
-0.0143125643953681,
0.14324907958507538,
0.018461184576153755,
-0.07913703471422195,
0.0024940401781350374,
0.06669976562261581,
-0.05253927782177925,
-0.17993979156017303,
-0.039644766598939896,
0.002384740859270096,
-0.135774165391922,
-0.05637138709425926,
0.15546192228794098,
-0.008922683075070381,
0.0089934216812253,
0.045674972236156464,
0.042817071080207825,
-0.036256469786167145,
0.12033829092979431,
-0.021795352920889854,
0.06003944203257561,
-0.06064976751804352,
0.057413890957832336,
0.09070276468992233,
-0.08974040299654007,
0.022833218798041344,
0.12804734706878662,
-0.06727397441864014,
-0.10474633425474167,
-0.05917276814579964,
0.1087070181965828,
-0.09413784742355347,
0.035574283450841904,
-0.05028285086154938,
-0.07239478826522827,
0.019209587946534157,
0.01914530247449875,
0.050049420446157455,
0.05684370920062065,
-0.08542684465646744,
-0.0269046351313591,
-0.09578821808099747,
0.07411716133356094,
0.06551460921764374,
0.013685126788914204,
-0.020572375506162643,
0.07673969864845276,
-0.03516031429171562,
0.011240503750741482,
-0.025654302909970284,
-0.039363034069538116,
-0.06420119106769562,
-0.003386249067261815,
-0.035490963608026505,
-0.012443842366337776,
-0.10406877845525742,
-0.006818349938839674,
0.03036915883421898,
0.0486321747303009,
-0.015685373917222023,
-0.01766606792807579,
-0.054360996931791306,
-0.07093727588653564,
-0.057981111109256744,
0.1077236458659172,
-0.12799689173698425,
-0.0021118740551173687,
0.03384397178888321,
-0.0855078399181366,
0.09306304901838303,
-0.022403739392757416,
-0.019197961315512657,
0.01761193573474884,
-0.03151377663016319,
-0.04013543948531151,
0.03134841471910477,
0.03704249486327171,
0.06612749397754669,
-0.0930216982960701,
0.014552625827491283,
-0.05119277536869049,
0.019841577857732773,
0.010444225743412971,
0.033602695912122726,
-0.09614097326993942,
0.02923872321844101,
-0.030821871012449265,
-0.006517795845866203,
-0.1099822074174881,
0.031390849500894547,
0.009604931809008121,
0.03638991340994835,
0.1620263010263443,
-0.04647860303521156,
0.07162600010633469,
-0.13464315235614777,
0.012428385205566883,
0.00370935071259737,
-0.04483010619878769,
0.08052509278059006,
-0.12539803981781006,
0.06333263218402863,
-0.039639849215745926,
0.050833866000175476,
-0.008366398513317108,
0.05719636380672455,
0.06209297850728035,
0.05502903461456299,
0.022999636828899384,
0.015516750514507294,
0.04948723688721657,
0.06335515528917313,
-0.013727443292737007,
-0.049427617341279984,
0.03609905764460564,
0.021694684401154518,
-0.014747687615454197,
0.05836649239063263,
0.0731833353638649,
0.033900849521160126,
0.09457087516784668,
0.07573369890451431,
-0.009651142172515392,
-0.08366633951663971,
0.028435662388801575,
-0.043039191514253616,
0.05299604684114456,
-0.030937906354665756,
0.05346003174781799,
0.1807321459054947,
-0.15017668902873993,
0.1068677082657814,
0.012174859642982483,
-0.054933030158281326,
-0.0762598067522049,
-0.1474275439977646,
-0.07853484898805618,
-0.045030612498521805,
-0.013749973848462105,
-0.12311951071023941,
-0.014876963570713997,
-0.011082201264798641,
0.014056127518415451,
-0.008512411266565323,
0.13225731253623962,
-0.11115989089012146,
-0.10197564959526062,
0.08766055107116699,
-0.025694776326417923,
0.05208463594317436,
0.01783076860010624,
0.029768124222755432,
0.023453980684280396,
0.051866572350263596,
0.05861865356564522,
0.04989597946405411,
0.04828432574868202,
0.03331783413887024,
-0.0958300232887268,
-0.08812110126018524,
-0.010187684558331966,
0.00009337512165075168,
-0.03945942595601082,
0.07369325309991837,
0.03251640126109123,
-0.07815966010093689,
-0.014721140265464783,
0.21993131935596466,
-0.08528722077608109,
-0.08893738687038422,
-0.16951878368854523,
0.1973051130771637,
0.045472029596567154,
0.03463635593652725,
-0.04621422290802002,
-0.08386572450399399,
-0.012658197432756424,
0.15382398664951324,
0.1761956512928009,
-0.09363824129104614,
0.015757350251078606,
0.028634874150156975,
0.017383579164743423,
-0.010937380604445934,
0.023826122283935547,
0.05435224995017052,
0.21250919997692108,
-0.05271889269351959,
0.09672259539365768,
-0.0022332423832267523,
-0.05544644221663475,
-0.06171854957938194,
0.08637820929288864,
0.03104497119784355,
0.03922673314809799,
-0.0053634666837751865,
0.11430481821298599,
-0.04152199625968933,
-0.05976050719618797,
-0.03013421781361103,
-0.08282595127820969,
-0.12078791111707687,
-0.03237923979759216,
0.013005695305764675,
0.03339572623372078,
0.10773000866174698,
0.02326005883514881,
-0.04063168913125992,
0.0887133777141571,
-0.02234259806573391,
-0.06862404942512512,
-0.026887957006692886,
0.015992041677236557,
-0.017822932451963425,
0.1534835249185562,
-0.007477119565010071,
-0.030004393309354782,
0.12953458726406097,
0.004027162212878466,
-0.07193037867546082,
0.08058111369609833,
0.04136626422405243,
-0.06822256743907928,
0.14277976751327515,
0.0769982859492302,
-0.018360214307904243,
0.07446105033159256,
0.08819881826639175,
-0.19730345904827118,
0.05566268041729927,
-0.06545253843069077,
-0.01941964589059353,
-0.06574899703264236,
0.04887307435274124,
-0.07040002942085266,
0.12443669885396957,
0.17421232163906097,
-0.02321460284292698,
-0.014142255298793316,
-0.0019331693183630705,
0.008186065591871738,
0.02980327606201172,
0.07013595104217529,
-0.0521119125187397,
-0.08892057836055756,
0.0024357677903026342,
0.0028262261766940355,
0.0041606673039495945,
-0.29918527603149414,
-0.10511387139558792,
0.01438164059072733,
-0.007629558444023132,
-0.024966737255454063,
0.12445095926523209,
0.0832984447479248,
-0.005720366258174181,
-0.031601790338754654,
-0.22230729460716248,
0.035326287150382996,
0.09971972554922104,
-0.12627968192100525,
-0.09310225397348404
] |
null | null |
sentence-transformers
|
# aditeyabaral/sentencetransformer-roberta-hinglish-small
This is a [sentence-transformers](https://www.SBERT.net) model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.
<!--- Describe your model here -->
## Usage (Sentence-Transformers)
Using this model becomes easy when you have [sentence-transformers](https://www.SBERT.net) installed:
```
pip install -U sentence-transformers
```
Then you can use the model like this:
```python
from sentence_transformers import SentenceTransformer
sentences = ["This is an example sentence", "Each sentence is converted"]
model = SentenceTransformer('aditeyabaral/sentencetransformer-roberta-hinglish-small')
embeddings = model.encode(sentences)
print(embeddings)
```
## Usage (HuggingFace Transformers)
Without [sentence-transformers](https://www.SBERT.net), you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.
```python
from transformers import AutoTokenizer, AutoModel
import torch
#Mean Pooling - Take attention mask into account for correct averaging
def mean_pooling(model_output, attention_mask):
token_embeddings = model_output[0] #First element of model_output contains all token embeddings
input_mask_expanded = attention_mask.unsqueeze(-1).expand(token_embeddings.size()).float()
return torch.sum(token_embeddings * input_mask_expanded, 1) / torch.clamp(input_mask_expanded.sum(1), min=1e-9)
# Sentences we want sentence embeddings for
sentences = ['This is an example sentence', 'Each sentence is converted']
# Load model from HuggingFace Hub
tokenizer = AutoTokenizer.from_pretrained('aditeyabaral/sentencetransformer-roberta-hinglish-small')
model = AutoModel.from_pretrained('aditeyabaral/sentencetransformer-roberta-hinglish-small')
# Tokenize sentences
encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt')
# Compute token embeddings
with torch.no_grad():
model_output = model(**encoded_input)
# Perform pooling. In this case, mean pooling.
sentence_embeddings = mean_pooling(model_output, encoded_input['attention_mask'])
print("Sentence embeddings:")
print(sentence_embeddings)
```
## Evaluation Results
<!--- Describe how your model was evaluated -->
For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: [https://seb.sbert.net](https://seb.sbert.net?model_name=aditeyabaral/sentencetransformer-roberta-hinglish-small)
## Training
The model was trained with the parameters:
**DataLoader**:
`torch.utils.data.dataloader.DataLoader` of length 4617 with parameters:
```
{'batch_size': 32, 'sampler': 'torch.utils.data.sampler.RandomSampler', 'batch_sampler': 'torch.utils.data.sampler.BatchSampler'}
```
**Loss**:
`sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss`
Parameters of the fit()-Method:
```
{
"epochs": 10,
"evaluation_steps": 0,
"evaluator": "NoneType",
"max_grad_norm": 1,
"optimizer_class": "<class 'transformers.optimization.AdamW'>",
"optimizer_params": {
"lr": 2e-05
},
"scheduler": "WarmupLinear",
"steps_per_epoch": null,
"warmup_steps": 100,
"weight_decay": 0.01
}
```
## Full Model Architecture
```
SentenceTransformer(
(0): Transformer({'max_seq_length': 128, 'do_lower_case': False}) with Transformer model: RobertaModel
(1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False})
)
```
## Citing & Authors
<!--- Describe where people can find more information -->
|
{"tags": ["sentence-transformers", "feature-extraction", "sentence-similarity", "transformers"], "pipeline_tag": "sentence-similarity"}
|
sentence-similarity
|
aditeyabaral/sentencetransformer-roberta-hinglish-small
|
[
"sentence-transformers",
"pytorch",
"roberta",
"feature-extraction",
"sentence-similarity",
"transformers",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#sentence-transformers #pytorch #roberta #feature-extraction #sentence-similarity #transformers #endpoints_compatible #region-us
|
# aditeyabaral/sentencetransformer-roberta-hinglish-small
This is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.
## Usage (Sentence-Transformers)
Using this model becomes easy when you have sentence-transformers installed:
Then you can use the model like this:
## Usage (HuggingFace Transformers)
Without sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.
## Evaluation Results
For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL
## Training
The model was trained with the parameters:
DataLoader:
'URL.dataloader.DataLoader' of length 4617 with parameters:
Loss:
'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss'
Parameters of the fit()-Method:
## Full Model Architecture
## Citing & Authors
|
[
"# aditeyabaral/sentencetransformer-roberta-hinglish-small\n\nThis is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.",
"## Usage (Sentence-Transformers)\n\nUsing this model becomes easy when you have sentence-transformers installed:\n\n\n\nThen you can use the model like this:",
"## Usage (HuggingFace Transformers)\nWithout sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.",
"## Evaluation Results\n\n\n\nFor an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL",
"## Training\nThe model was trained with the parameters:\n\nDataLoader:\n\n'URL.dataloader.DataLoader' of length 4617 with parameters:\n\n\nLoss:\n\n'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss' \n\nParameters of the fit()-Method:",
"## Full Model Architecture",
"## Citing & Authors"
] |
[
"TAGS\n#sentence-transformers #pytorch #roberta #feature-extraction #sentence-similarity #transformers #endpoints_compatible #region-us \n",
"# aditeyabaral/sentencetransformer-roberta-hinglish-small\n\nThis is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.",
"## Usage (Sentence-Transformers)\n\nUsing this model becomes easy when you have sentence-transformers installed:\n\n\n\nThen you can use the model like this:",
"## Usage (HuggingFace Transformers)\nWithout sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.",
"## Evaluation Results\n\n\n\nFor an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL",
"## Training\nThe model was trained with the parameters:\n\nDataLoader:\n\n'URL.dataloader.DataLoader' of length 4617 with parameters:\n\n\nLoss:\n\n'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss' \n\nParameters of the fit()-Method:",
"## Full Model Architecture",
"## Citing & Authors"
] |
[
43,
63,
38,
64,
29,
78,
5,
6
] |
[
"passage: TAGS\n#sentence-transformers #pytorch #roberta #feature-extraction #sentence-similarity #transformers #endpoints_compatible #region-us \n# aditeyabaral/sentencetransformer-roberta-hinglish-small\n\nThis is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.## Usage (Sentence-Transformers)\n\nUsing this model becomes easy when you have sentence-transformers installed:\n\n\n\nThen you can use the model like this:## Usage (HuggingFace Transformers)\nWithout sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.## Evaluation Results\n\n\n\nFor an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL## Training\nThe model was trained with the parameters:\n\nDataLoader:\n\n'URL.dataloader.DataLoader' of length 4617 with parameters:\n\n\nLoss:\n\n'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss' \n\nParameters of the fit()-Method:## Full Model Architecture## Citing & Authors"
] |
[
-0.04299965500831604,
0.11114983260631561,
-0.00624728761613369,
0.0421704538166523,
0.10934340208768845,
0.023500284180045128,
0.1436261236667633,
0.08840030431747437,
0.0053031365387141705,
0.09671217948198318,
0.023494131863117218,
0.08862143754959106,
0.011694014072418213,
0.021742548793554306,
0.022344980388879776,
-0.27055439352989197,
0.018942037597298622,
-0.04646943137049675,
0.030394652858376503,
0.07407721132040024,
0.10660477727651596,
-0.08381592482328415,
0.07416592538356781,
0.025453368201851845,
-0.04686211422085762,
-0.00013728013436775655,
-0.024435946717858315,
-0.02721463516354561,
0.08154233545064926,
0.06271583586931229,
0.049817584455013275,
0.008388330228626728,
0.0234826747328043,
-0.197647824883461,
0.015539737418293953,
0.06198085471987724,
-0.012699178420007229,
0.07063364237546921,
0.033082835376262665,
-0.039381857961416245,
0.17827077209949493,
-0.056599684059619904,
0.05720410495996475,
0.06756390631198883,
-0.11830534040927887,
-0.0476149246096611,
-0.05782841518521309,
-0.025085225701332092,
0.13977405428886414,
0.10599702596664429,
-0.06367610394954681,
0.11340661346912384,
-0.06098010390996933,
0.0704573541879654,
0.09556538611650467,
-0.2675437927246094,
-0.034813474863767624,
0.04747316241264343,
0.05581113323569298,
0.02822544239461422,
-0.10158906877040863,
0.014761943370103836,
-0.010848239064216614,
0.029659101739525795,
0.06562994420528412,
-0.05417950823903084,
0.07399383932352066,
-0.010169235989451408,
-0.10744760185480118,
-0.003982218913733959,
0.16917376220226288,
0.041292376816272736,
-0.0253613144159317,
-0.17822833359241486,
-0.05854949355125427,
0.03371883183717728,
-0.06325121968984604,
-0.03887461498379707,
0.028042716905474663,
0.043751154094934464,
-0.008879365399479866,
-0.08914767950773239,
-0.11799988150596619,
-0.019657708704471588,
-0.06999646872282028,
0.005328570958226919,
-0.024550626054406166,
-0.04729054123163223,
-0.006004764698445797,
0.058089129626750946,
-0.10904234647750854,
-0.11188441514968872,
-0.041989296674728394,
-0.016297796741127968,
-0.12619206309318542,
-0.04094928875565529,
-0.05117800831794739,
-0.12160201370716095,
0.04141368344426155,
0.14202308654785156,
0.07762256264686584,
0.007924683392047882,
-0.011206750757992268,
0.05672311782836914,
0.018772199749946594,
0.18490782380104065,
-0.0452057421207428,
-0.0821334645152092,
-0.04472002759575844,
0.021138884127140045,
-0.002269146963953972,
-0.01880136877298355,
-0.04715497046709061,
-0.0024081815499812365,
0.011803992092609406,
0.06497090309858322,
0.053709883242845535,
0.048849932849407196,
-0.05621346831321716,
-0.033420465886592865,
0.07576186209917068,
-0.10665275901556015,
0.029470710083842278,
0.009567619301378727,
-0.04608305171132088,
0.02019544690847397,
0.07493572682142258,
-0.0072970204055309296,
-0.0654422864317894,
0.03230777755379677,
-0.1113613024353981,
-0.012573054991662502,
-0.06325749307870865,
-0.13317231833934784,
-0.007746517658233643,
0.000011279114914941601,
-0.03153166174888611,
-0.10588947683572769,
-0.1295386552810669,
-0.08342703431844711,
0.02079503796994686,
-0.048171572387218475,
0.001788581721484661,
-0.1238313764333725,
-0.005099281668663025,
0.012728855945169926,
-0.0031723391730338335,
-0.05371328070759773,
-0.011545134708285332,
0.0032513595651835203,
-0.04515724629163742,
0.055992115288972855,
0.04428144544363022,
0.043293822556734085,
-0.12526880204677582,
0.028403451666235924,
-0.13139955699443817,
0.14971299469470978,
-0.03342674300074577,
0.09573375433683395,
-0.15274624526500702,
0.012742644175887108,
0.000006256403139559552,
0.058952342718839645,
0.007844456471502781,
0.1486487239599228,
-0.20246674120426178,
-0.06679652631282806,
0.12725301086902618,
-0.05104566365480423,
-0.10690639913082123,
0.11206835508346558,
-0.01980958878993988,
0.13674402236938477,
0.11015214025974274,
0.10946206003427505,
0.13981273770332336,
-0.05194283649325371,
-0.011159567162394524,
0.0285955760627985,
-0.05242384597659111,
0.10975692421197891,
0.04470451548695564,
-0.07330352067947388,
0.09808269888162613,
-0.009258910082280636,
-0.038042448461055756,
0.00033362896647304296,
-0.0036315571051090956,
-0.06362558156251907,
0.022722378373146057,
-0.039103977382183075,
0.03923482447862625,
-0.03319419175386429,
0.0003350577608216554,
0.01442219503223896,
-0.0992538258433342,
0.058682601898908615,
0.08139733225107193,
-0.0641079843044281,
0.0124561982229352,
-0.09329941123723984,
0.016302350908517838,
-0.0034563892986625433,
0.016789674758911133,
-0.19774241745471954,
-0.10647496581077576,
0.016058210283517838,
0.01972063258290291,
0.11527926474809647,
0.0720113143324852,
0.0562591478228569,
0.032124392688274384,
0.0003593603032641113,
-0.014609280042350292,
0.05192463845014572,
-0.0266515351831913,
-0.10339184105396271,
-0.09749291092157364,
0.007162181194871664,
-0.03090745583176613,
0.08230263739824295,
-0.1240430697798729,
0.015909437090158463,
0.02365114912390709,
0.03478390350937843,
0.05600687861442566,
-0.02860911376774311,
0.0014816878829151392,
-0.024534784257411957,
-0.0153461042791605,
-0.022205837070941925,
0.04319262132048607,
0.008433748036623001,
-0.14323432743549347,
0.09187187254428864,
-0.21221773326396942,
-0.1324108988046646,
0.0694156065583229,
0.021680695936083794,
-0.06352393329143524,
-0.06688486039638519,
-0.015489017590880394,
0.0027965775225311518,
-0.026409830898046494,
-0.05950714275240898,
0.19261567294597626,
0.09189808368682861,
0.10924266278743744,
-0.03227425739169121,
-0.014770520851016045,
-0.040960099548101425,
-0.03971273452043533,
-0.0341043546795845,
0.10787970572710037,
-0.029407359659671783,
-0.13448628783226013,
0.03968338668346405,
0.07054482400417328,
-0.06455538421869278,
0.10307367891073227,
0.0021547761280089617,
-0.08007138222455978,
-0.055790193378925323,
0.03513446822762489,
0.054392315447330475,
-0.007702625822275877,
-0.08378899842500687,
0.009472505189478397,
0.06336935609579086,
0.014013892039656639,
0.011361056007444859,
-0.05951665714383125,
0.04657341539859772,
0.05840054154396057,
0.0038272482343018055,
0.09387725591659546,
0.030608806759119034,
-0.012707866728305817,
0.05974208563566208,
0.021205320954322815,
0.005492651369422674,
-0.06496679037809372,
-0.04821282997727394,
-0.11252565681934357,
0.18024897575378418,
-0.10895711183547974,
-0.20699122548103333,
-0.1684010624885559,
-0.00244613247923553,
-0.03933020308613777,
0.03568825125694275,
0.07936878502368927,
-0.06420393288135529,
-0.08099397271871567,
-0.07687030732631683,
0.07981627434492111,
0.07357682287693024,
-0.03618086129426956,
-0.013436303474009037,
0.043219514191150665,
0.008776392787694931,
-0.11580083519220352,
-0.014084030874073505,
-0.008079553954303265,
-0.07435858249664307,
-0.0029027618002146482,
-0.017770890146493912,
0.07159863412380219,
0.12526944279670715,
0.05568099021911621,
-0.014328844845294952,
-0.008321421220898628,
0.2117495834827423,
-0.09677980095148087,
0.04386216029524803,
0.1877141296863556,
0.018264560028910637,
0.05872553959488869,
0.10219532251358032,
0.02885817363858223,
-0.06293756514787674,
0.054489538073539734,
0.058621201664209366,
-0.02441750280559063,
-0.1405567079782486,
-0.1101088896393776,
-0.07313933223485947,
0.021856172010302544,
0.14011646807193756,
0.030492467805743217,
0.01739482581615448,
0.06496348977088928,
-0.025057921186089516,
0.01673542521893978,
0.07230883836746216,
0.11232540756464005,
0.11070076376199722,
-0.015613501891493797,
0.10539950430393219,
-0.04609844088554382,
-0.06173507496714592,
0.0663825124502182,
-0.008932585828006268,
0.14716710150241852,
0.02444441057741642,
0.19403019547462463,
0.0679357647895813,
-0.030830780044198036,
-0.01804175041615963,
0.08166995644569397,
-0.02989455685019493,
-0.007997970096766949,
-0.024157952517271042,
-0.09339290112257004,
-0.01782863214612007,
0.10088703781366348,
0.0806383490562439,
-0.03438715636730194,
-0.03857718035578728,
0.0808611586689949,
0.11204458773136139,
0.13281214237213135,
0.09904147684574127,
-0.25177159905433655,
-0.06225542724132538,
0.026103947311639786,
-0.0700301080942154,
-0.06454060226678848,
-0.006022228859364986,
0.0414777472615242,
-0.11213502287864685,
0.026770543307065964,
-0.01230156421661377,
0.10593900084495544,
-0.08551857620477676,
0.030760828405618668,
-0.07042761147022247,
0.048242297023534775,
-0.00988322589546442,
0.07707658410072327,
-0.2215639054775238,
0.10607288032770157,
0.04182732477784157,
0.04992033913731575,
-0.05402613431215286,
0.027773715555667877,
0.0669533982872963,
0.02296251617372036,
0.1723637878894806,
-0.031200749799609184,
0.0001440967171220109,
-0.02128043957054615,
-0.07924298197031021,
-0.006146443076431751,
0.05178503692150116,
-0.13108010590076447,
0.09263159334659576,
-0.044345248490571976,
-0.026479579508304596,
-0.02293214201927185,
0.008134141564369202,
-0.033413540571928024,
-0.1607491374015808,
0.012549193575978279,
0.02990906499326229,
0.016133075580000877,
-0.01309574767947197,
0.006704299245029688,
0.026568671688437462,
0.20268340408802032,
-0.10460762679576874,
-0.052310097962617874,
-0.1292799562215805,
0.006959476508200169,
0.09761320054531097,
-0.10131684690713882,
0.007819915190339088,
-0.00896978285163641,
0.1566067487001419,
-0.05397077277302742,
-0.05296594277024269,
0.06903832405805588,
-0.0632796660065651,
-0.05331476032733917,
-0.04270334169268608,
0.09691525995731354,
0.056191422045230865,
0.062029145658016205,
0.04458698630332947,
0.06316279619932175,
-0.05203693360090256,
-0.0982731506228447,
-0.07765170931816101,
0.08203189820051193,
0.0003635805333033204,
0.0727342963218689,
-0.09050624817609787,
-0.04422694817185402,
-0.10514329373836517,
0.042576633393764496,
0.19393271207809448,
0.21504156291484833,
-0.0687926858663559,
0.058216895908117294,
0.12233331054449081,
-0.07718697935342789,
-0.2348816990852356,
-0.06646820902824402,
0.038070496171712875,
0.052746739238500595,
0.07778240740299225,
-0.12637895345687866,
0.07419475167989731,
0.05802333354949951,
-0.014714600518345833,
-0.051603272557258606,
-0.24536024034023285,
-0.14645826816558838,
0.13338744640350342,
0.010697601363062859,
-0.04652852565050125,
-0.08849450200796127,
-0.060019202530384064,
-0.0752609595656395,
-0.034198153764009476,
0.0646807998418808,
-0.039184246212244034,
0.09845976531505585,
0.038680583238601685,
0.058403998613357544,
0.06449130922555923,
0.005164553876966238,
0.1382836550474167,
0.06781858205795288,
0.03953556343913078,
-0.04000093787908554,
-0.007062805350869894,
0.07660438120365143,
-0.08792168647050858,
0.16244493424892426,
-0.08176898956298828,
0.03336874023079872,
-0.11853733658790588,
-0.02972007729113102,
-0.030043233186006546,
0.019834518432617188,
-0.03934592753648758,
-0.055124420672655106,
-0.032010145485401154,
0.03540061041712761,
0.07639728486537933,
0.0006518326699733734,
0.02991260588169098,
-0.0950721725821495,
0.022670593112707138,
0.16422173380851746,
0.142496719956398,
0.03219551220536232,
-0.1599513441324234,
0.025622721761465073,
0.011497994884848595,
0.06228313967585564,
-0.1491885483264923,
0.06675685942173004,
0.0751892477273941,
-0.010950052179396152,
0.14382751286029816,
0.015664489939808846,
-0.08155816793441772,
0.008398927748203278,
0.0667780339717865,
-0.05841291695833206,
-0.17300643026828766,
-0.037950027734041214,
0.005057114642113447,
-0.1380334347486496,
-0.047040559351444244,
0.1581878364086151,
-0.014986471273005009,
0.012802069075405598,
0.043001044541597366,
0.040480952709913254,
-0.03871787711977959,
0.12300749868154526,
-0.027528535574674606,
0.060740113258361816,
-0.058581702411174774,
0.06061026081442833,
0.08781911432743073,
-0.08252711594104767,
0.015358720906078815,
0.12603099644184113,
-0.07235056906938553,
-0.10589421540498734,
-0.06909909844398499,
0.10870610177516937,
-0.11227215081453323,
0.03538366034626961,
-0.04925244674086571,
-0.06253987550735474,
0.016807014122605324,
0.021416978910565376,
0.04778585955500603,
0.05896223336458206,
-0.08503373712301254,
-0.027521979063749313,
-0.09085572510957718,
0.06996558606624603,
0.06591232866048813,
0.017097655683755875,
-0.02706548385322094,
0.07797016203403473,
-0.03560935705900192,
0.006070384755730629,
-0.023214612156152725,
-0.04246066138148308,
-0.0606195367872715,
-0.003146021394059062,
-0.038967400789260864,
-0.011291936039924622,
-0.10616029798984528,
-0.01034233346581459,
0.033063847571611404,
0.044865138828754425,
-0.01736883819103241,
-0.0167409535497427,
-0.052211977541446686,
-0.06997096538543701,
-0.05561874061822891,
0.1066027358174324,
-0.13102726638317108,
0.005287178326398134,
0.036129824817180634,
-0.08002457022666931,
0.08757080137729645,
-0.02016061171889305,
-0.013419191353023052,
0.025573406368494034,
-0.03325895592570305,
-0.02696513757109642,
0.03001539781689644,
0.035003047436475754,
0.06699507683515549,
-0.07787076383829117,
0.005143837537616491,
-0.051093652844429016,
0.01884295977652073,
0.012039845809340477,
0.03697114437818527,
-0.1006661057472229,
0.02848280407488346,
-0.0251464881002903,
-0.005059000104665756,
-0.1094878539443016,
0.03600220009684563,
0.010210816748440266,
0.03490826115012169,
0.15532176196575165,
-0.04826392978429794,
0.07479674369096756,
-0.13465909659862518,
0.010114702396094799,
0.01253109984099865,
-0.03504125401377678,
0.07881255447864532,
-0.12376381456851959,
0.058242809027433395,
-0.04348558932542801,
0.0417991578578949,
-0.0006393126677721739,
0.06536763161420822,
0.06018625572323799,
0.05579064041376114,
0.015291107818484306,
0.010882425121963024,
0.052722856402397156,
0.06369630247354507,
-0.011543067172169685,
-0.05021223425865173,
0.0418282188475132,
0.017449986189603806,
-0.014599030837416649,
0.06192018836736679,
0.06495598703622818,
0.02978052943944931,
0.09381815791130066,
0.06395217031240463,
-0.003487933659926057,
-0.10493573546409607,
0.03331229090690613,
-0.052960123866796494,
0.06066488102078438,
-0.02951204776763916,
0.03987649083137512,
0.17891055345535278,
-0.1550983041524887,
0.10556641221046448,
0.012388478964567184,
-0.05718476325273514,
-0.07517777383327484,
-0.15801608562469482,
-0.07263926416635513,
-0.056884657591581345,
-0.011937967501580715,
-0.12578339874744415,
-0.0065723261795938015,
-0.01081228069961071,
0.006102949380874634,
-0.006887882947921753,
0.13869734108448029,
-0.11832553148269653,
-0.09976931661367416,
0.08365784585475922,
-0.027982641011476517,
0.05021858215332031,
0.022008338943123817,
0.03311089426279068,
0.020453469827771187,
0.04923013225197792,
0.058808717876672745,
0.047004036605358124,
0.04477790370583534,
0.03820611536502838,
-0.08769027143716812,
-0.09021531790494919,
-0.009348914958536625,
0.0031154663302004337,
-0.04710295796394348,
0.08157564699649811,
0.03898579627275467,
-0.06908614188432693,
-0.013206850737333298,
0.22826159000396729,
-0.09053952246904373,
-0.08180072903633118,
-0.18009869754314423,
0.1990937739610672,
0.04148130863904953,
0.03682912513613701,
-0.04798857495188713,
-0.08735247701406479,
-0.008018331602215767,
0.15676221251487732,
0.1642189770936966,
-0.08473414927721024,
0.016886474564671516,
0.022892583161592484,
0.01654638908803463,
-0.01050423365086317,
0.024643631651997566,
0.04273950308561325,
0.2060413807630539,
-0.05284157767891884,
0.08834390342235565,
-0.0022113663144409657,
-0.06062690541148186,
-0.06259431689977646,
0.0767432227730751,
0.02419222891330719,
0.036024946719408035,
-0.007966711185872555,
0.12356921285390854,
-0.03523954376578331,
-0.06572891771793365,
-0.02202727645635605,
-0.08402620255947113,
-0.10912303626537323,
-0.02852480672299862,
0.011198854073882103,
0.033537186682224274,
0.11144206672906876,
0.01825772039592266,
-0.04093093425035477,
0.10357119143009186,
-0.02406051941215992,
-0.06585504114627838,
-0.03891664743423462,
0.015810195356607437,
-0.021438447758555412,
0.1552361100912094,
-0.006774090696126223,
-0.043690018355846405,
0.1248617172241211,
0.0064840842969715595,
-0.0663871318101883,
0.08273006975650787,
0.04308054596185684,
-0.06821359694004059,
0.13627833127975464,
0.08288291841745377,
-0.021239671856164932,
0.09065347164869308,
0.08112017065286636,
-0.20118200778961182,
0.05271844565868378,
-0.055689357221126556,
-0.014215067960321903,
-0.06042493134737015,
0.041725777089595795,
-0.07079732418060303,
0.12655432522296906,
0.17182037234306335,
-0.02136457897722721,
-0.014589179307222366,
0.005031139589846134,
0.012150338850915432,
0.026226602494716644,
0.06568785756826401,
-0.05706195533275604,
-0.08769252151250839,
0.000830118777230382,
0.010855170898139477,
0.0017652582610026002,
-0.30105623602867126,
-0.0973278358578682,
0.01154777780175209,
-0.007459268905222416,
-0.024447591975331306,
0.1263418048620224,
0.08159323781728745,
-0.010170362889766693,
-0.03364693373441696,
-0.22857171297073364,
0.03887106850743294,
0.10944411903619766,
-0.11667106300592422,
-0.0976913645863533
] |
null | null |
sentence-transformers
|
# aditeyabaral/sentencetransformer-xlm-roberta-base
This is a [sentence-transformers](https://www.SBERT.net) model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.
<!--- Describe your model here -->
## Usage (Sentence-Transformers)
Using this model becomes easy when you have [sentence-transformers](https://www.SBERT.net) installed:
```
pip install -U sentence-transformers
```
Then you can use the model like this:
```python
from sentence_transformers import SentenceTransformer
sentences = ["This is an example sentence", "Each sentence is converted"]
model = SentenceTransformer('aditeyabaral/sentencetransformer-xlm-roberta-base')
embeddings = model.encode(sentences)
print(embeddings)
```
## Usage (HuggingFace Transformers)
Without [sentence-transformers](https://www.SBERT.net), you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.
```python
from transformers import AutoTokenizer, AutoModel
import torch
#Mean Pooling - Take attention mask into account for correct averaging
def mean_pooling(model_output, attention_mask):
token_embeddings = model_output[0] #First element of model_output contains all token embeddings
input_mask_expanded = attention_mask.unsqueeze(-1).expand(token_embeddings.size()).float()
return torch.sum(token_embeddings * input_mask_expanded, 1) / torch.clamp(input_mask_expanded.sum(1), min=1e-9)
# Sentences we want sentence embeddings for
sentences = ['This is an example sentence', 'Each sentence is converted']
# Load model from HuggingFace Hub
tokenizer = AutoTokenizer.from_pretrained('aditeyabaral/sentencetransformer-xlm-roberta-base')
model = AutoModel.from_pretrained('aditeyabaral/sentencetransformer-xlm-roberta-base')
# Tokenize sentences
encoded_input = tokenizer(sentences, padding=True, truncation=True, return_tensors='pt')
# Compute token embeddings
with torch.no_grad():
model_output = model(**encoded_input)
# Perform pooling. In this case, mean pooling.
sentence_embeddings = mean_pooling(model_output, encoded_input['attention_mask'])
print("Sentence embeddings:")
print(sentence_embeddings)
```
## Evaluation Results
<!--- Describe how your model was evaluated -->
For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: [https://seb.sbert.net](https://seb.sbert.net?model_name=aditeyabaral/sentencetransformer-xlm-roberta-base)
## Training
The model was trained with the parameters:
**DataLoader**:
`torch.utils.data.dataloader.DataLoader` of length 9234 with parameters:
```
{'batch_size': 16, 'sampler': 'torch.utils.data.sampler.RandomSampler', 'batch_sampler': 'torch.utils.data.sampler.BatchSampler'}
```
**Loss**:
`sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss`
Parameters of the fit()-Method:
```
{
"epochs": 10,
"evaluation_steps": 0,
"evaluator": "NoneType",
"max_grad_norm": 1,
"optimizer_class": "<class 'transformers.optimization.AdamW'>",
"optimizer_params": {
"lr": 2e-05
},
"scheduler": "WarmupLinear",
"steps_per_epoch": null,
"warmup_steps": 100,
"weight_decay": 0.01
}
```
## Full Model Architecture
```
SentenceTransformer(
(0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: XLMRobertaModel
(1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False})
)
```
## Citing & Authors
<!--- Describe where people can find more information -->
|
{"tags": ["sentence-transformers", "feature-extraction", "sentence-similarity", "transformers"], "pipeline_tag": "sentence-similarity"}
|
sentence-similarity
|
aditeyabaral/sentencetransformer-xlm-roberta-base
|
[
"sentence-transformers",
"pytorch",
"xlm-roberta",
"feature-extraction",
"sentence-similarity",
"transformers",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#sentence-transformers #pytorch #xlm-roberta #feature-extraction #sentence-similarity #transformers #endpoints_compatible #region-us
|
# aditeyabaral/sentencetransformer-xlm-roberta-base
This is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.
## Usage (Sentence-Transformers)
Using this model becomes easy when you have sentence-transformers installed:
Then you can use the model like this:
## Usage (HuggingFace Transformers)
Without sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.
## Evaluation Results
For an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL
## Training
The model was trained with the parameters:
DataLoader:
'URL.dataloader.DataLoader' of length 9234 with parameters:
Loss:
'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss'
Parameters of the fit()-Method:
## Full Model Architecture
## Citing & Authors
|
[
"# aditeyabaral/sentencetransformer-xlm-roberta-base\n\nThis is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.",
"## Usage (Sentence-Transformers)\n\nUsing this model becomes easy when you have sentence-transformers installed:\n\n\n\nThen you can use the model like this:",
"## Usage (HuggingFace Transformers)\nWithout sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.",
"## Evaluation Results\n\n\n\nFor an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL",
"## Training\nThe model was trained with the parameters:\n\nDataLoader:\n\n'URL.dataloader.DataLoader' of length 9234 with parameters:\n\n\nLoss:\n\n'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss' \n\nParameters of the fit()-Method:",
"## Full Model Architecture",
"## Citing & Authors"
] |
[
"TAGS\n#sentence-transformers #pytorch #xlm-roberta #feature-extraction #sentence-similarity #transformers #endpoints_compatible #region-us \n",
"# aditeyabaral/sentencetransformer-xlm-roberta-base\n\nThis is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.",
"## Usage (Sentence-Transformers)\n\nUsing this model becomes easy when you have sentence-transformers installed:\n\n\n\nThen you can use the model like this:",
"## Usage (HuggingFace Transformers)\nWithout sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.",
"## Evaluation Results\n\n\n\nFor an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL",
"## Training\nThe model was trained with the parameters:\n\nDataLoader:\n\n'URL.dataloader.DataLoader' of length 9234 with parameters:\n\n\nLoss:\n\n'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss' \n\nParameters of the fit()-Method:",
"## Full Model Architecture",
"## Citing & Authors"
] |
[
46,
62,
38,
64,
29,
78,
5,
6
] |
[
"passage: TAGS\n#sentence-transformers #pytorch #xlm-roberta #feature-extraction #sentence-similarity #transformers #endpoints_compatible #region-us \n# aditeyabaral/sentencetransformer-xlm-roberta-base\n\nThis is a sentence-transformers model: It maps sentences & paragraphs to a 768 dimensional dense vector space and can be used for tasks like clustering or semantic search.## Usage (Sentence-Transformers)\n\nUsing this model becomes easy when you have sentence-transformers installed:\n\n\n\nThen you can use the model like this:## Usage (HuggingFace Transformers)\nWithout sentence-transformers, you can use the model like this: First, you pass your input through the transformer model, then you have to apply the right pooling-operation on-top of the contextualized word embeddings.## Evaluation Results\n\n\n\nFor an automated evaluation of this model, see the *Sentence Embeddings Benchmark*: URL## Training\nThe model was trained with the parameters:\n\nDataLoader:\n\n'URL.dataloader.DataLoader' of length 9234 with parameters:\n\n\nLoss:\n\n'sentence_transformers.losses.CosineSimilarityLoss.CosineSimilarityLoss' \n\nParameters of the fit()-Method:## Full Model Architecture## Citing & Authors"
] |
[
-0.04945223033428192,
0.14511147141456604,
-0.00609963946044445,
0.05453749746084213,
0.12288752943277359,
0.029074087738990784,
0.1335906833410263,
0.09533077478408813,
0.01673322170972824,
0.09464097023010254,
0.005577178206294775,
0.11061614006757736,
0.0036950523499399424,
0.03976291045546532,
0.019873006269335747,
-0.2288055419921875,
0.016340767964720726,
-0.04746033996343613,
0.050942178815603256,
0.08198750764131546,
0.09664928168058395,
-0.0846119374036789,
0.06754066050052643,
0.018913254141807556,
-0.052401017397642136,
-0.0006301040411926806,
-0.01668727397918701,
-0.018307680264115334,
0.08394859731197357,
0.05461598560214043,
0.04642846807837486,
0.006716558244079351,
0.024984687566757202,
-0.20398667454719543,
0.015498802997171879,
0.0549238920211792,
-0.010276935994625092,
0.06102898344397545,
0.02578457072377205,
-0.04393776133656502,
0.17350055277347565,
-0.06470149010419846,
0.048336632549762726,
0.06041080504655838,
-0.10569357126951218,
-0.061365678906440735,
-0.07118511199951172,
-0.014338580891489983,
0.14078088104724884,
0.105154849588871,
-0.059086140245199203,
0.11697672307491302,
-0.056422363966703415,
0.07825388759374619,
0.10688287764787674,
-0.27656689286231995,
-0.022426804527640343,
0.030335163697600365,
0.058967918157577515,
0.022528810426592827,
-0.09910530596971512,
0.010591118596494198,
-0.012606971897184849,
0.020227497443556786,
0.06415533274412155,
-0.06435814499855042,
0.03210107237100601,
-0.009905376471579075,
-0.10415322333574295,
-0.00259291916154325,
0.1772136390209198,
0.04248787835240364,
-0.03852427005767822,
-0.17412392795085907,
-0.05184245854616165,
0.024201232939958572,
-0.05318956449627876,
-0.024946877732872963,
0.029501821845769882,
0.033533964306116104,
0.0019865359645336866,
-0.08287487924098969,
-0.11609829217195511,
-0.023403475061058998,
-0.06983412802219391,
0.02541206032037735,
-0.018929338082671165,
-0.04447529464960098,
0.0069488766603171825,
0.0758357122540474,
-0.09273811429738998,
-0.10362306982278824,
-0.045115794986486435,
-0.008946040645241737,
-0.12283287942409515,
-0.03151353821158409,
-0.048520464450120926,
-0.09639175981283188,
0.04509367421269417,
0.14671672880649567,
0.08614784479141235,
0.00477669108659029,
-0.0031980606727302074,
0.05078405514359474,
0.019454598426818848,
0.18993821740150452,
-0.03855625167489052,
-0.09262305498123169,
-0.03291609138250351,
0.028048459440469742,
0.0050928848795592785,
-0.02030928246676922,
-0.040375180542469025,
-0.0034746171440929174,
0.02692725881934166,
0.07745108753442764,
0.05419034883379936,
0.04635019227862358,
-0.0727442279458046,
-0.024912778288125992,
0.060765236616134644,
-0.11829369515180588,
0.03561946377158165,
0.013874728232622147,
-0.05608393996953964,
0.011166376061737537,
0.09515766054391861,
-0.012079416774213314,
-0.0685620978474617,
0.018935872241854668,
-0.10978731513023376,
-0.00405557407066226,
-0.0678877979516983,
-0.14113806188106537,
-0.0051033073104918,
-0.02119174599647522,
-0.03349830210208893,
-0.09912315011024475,
-0.14147645235061646,
-0.09497741609811783,
0.009237001650035381,
-0.028873294591903687,
0.00033722614170983434,
-0.12718717753887177,
-0.006904129404574633,
0.014329551719129086,
-0.013340041041374207,
-0.0591527558863163,
-0.015473873354494572,
0.011280390433967113,
-0.03635556995868683,
0.05002262815833092,
0.03520156070590019,
0.04435690492391586,
-0.11629383265972137,
0.03327659144997597,
-0.10302793979644775,
0.14472448825836182,
-0.042136140167713165,
0.08418269455432892,
-0.14322197437286377,
0.005984300747513771,
0.031874723732471466,
0.058209747076034546,
0.018952691927552223,
0.1441064178943634,
-0.21397541463375092,
-0.05919691175222397,
0.12245932221412659,
-0.045410871505737305,
-0.09847939014434814,
0.09083018451929092,
-0.021721767261624336,
0.1331203132867813,
0.11186845600605011,
0.09804736822843552,
0.15129375457763672,
-0.05842047557234764,
-0.03437194973230362,
0.020662670955061913,
-0.040832214057445526,
0.08344769477844238,
0.03757159784436226,
-0.0627613514661789,
0.09615707397460938,
0.001837644842453301,
-0.07522042095661163,
-0.0015448472695425153,
-0.006673561874777079,
-0.06391678005456924,
0.01656990684568882,
-0.03934517502784729,
0.04622185602784157,
-0.03161059319972992,
-0.007408936507999897,
0.004147752188146114,
-0.10198234021663666,
0.0846901535987854,
0.08403895795345306,
-0.06901554763317108,
0.010139426216483116,
-0.09968863427639008,
0.030736872926354408,
-0.007515506818890572,
0.015443215146660805,
-0.2031417042016983,
-0.1326814740896225,
0.013900735415518284,
0.0037468706723302603,
0.10487420111894608,
0.07461104542016983,
0.04671621695160866,
0.025792330503463745,
0.012653633952140808,
-0.023270199075341225,
0.027777161449193954,
-0.02699393220245838,
-0.09668926149606705,
-0.09321027994155884,
0.0032676137052476406,
-0.030861789360642433,
0.08078870922327042,
-0.12108136713504791,
0.016509855166077614,
0.02073824591934681,
0.020842622965574265,
0.04600837081670761,
-0.03540161997079849,
0.004105671774595976,
-0.010849488899111748,
-0.013670225627720356,
-0.020662514492869377,
0.0496571846306324,
0.009533368982374668,
-0.14397330582141876,
0.08576549589633942,
-0.21778357028961182,
-0.12137145549058914,
0.07313662767410278,
0.040666550397872925,
-0.05917466804385185,
-0.04369345307350159,
-0.01837686263024807,
0.004122762940824032,
-0.021092452108860016,
-0.056557707488536835,
0.1884635090827942,
0.08832055330276489,
0.10633758455514908,
-0.02834300324320793,
-0.02233823947608471,
-0.04739063233137131,
-0.03417271375656128,
-0.03198990598320961,
0.10946105420589447,
-0.017602697014808655,
-0.10440513491630554,
0.036150362342596054,
0.07423085719347,
-0.07078272104263306,
0.09033171832561493,
0.002339291851967573,
-0.07600411772727966,
-0.06413432210683823,
0.041248857975006104,
0.04446423053741455,
-0.0010854376014322042,
-0.08932964503765106,
0.007419532630592585,
0.06522148102521896,
0.008029933087527752,
0.012748329900205135,
-0.07750679552555084,
0.040328942239284515,
0.04746372252702713,
-0.007284592371433973,
0.07800066471099854,
0.03617478534579277,
-0.0038696639239788055,
0.05694984644651413,
0.016264185309410095,
0.0075569250620901585,
-0.06273813545703888,
-0.05041419342160225,
-0.11366710811853409,
0.18032994866371155,
-0.1231813132762909,
-0.23077896237373352,
-0.1713324934244156,
0.0070687588304281235,
-0.05032587796449661,
0.0191147830337286,
0.077789306640625,
-0.05620177090167999,
-0.08804312348365784,
-0.07215800881385803,
0.06978629529476166,
0.09060126543045044,
-0.031747374683618546,
-0.006015986204147339,
0.03532526642084122,
0.012579744681715965,
-0.13124273717403412,
-0.018115412443876266,
-0.011860044673085213,
-0.0787385031580925,
-0.00020690218661911786,
0.0018022585427388549,
0.08563239127397537,
0.12545275688171387,
0.046208109706640244,
-0.019765742123126984,
0.002925169887021184,
0.20579971373081207,
-0.08417913317680359,
0.03329821676015854,
0.19571000337600708,
0.004192677792161703,
0.06346829235553741,
0.08897583931684494,
0.023941079154610634,
-0.06917326152324677,
0.05671531707048416,
0.06739285588264465,
-0.02449747733771801,
-0.14981018006801605,
-0.10607762634754181,
-0.07253551483154297,
0.0024271581787616014,
0.11468974500894547,
0.033766571432352066,
-0.010408352129161358,
0.05313991755247116,
-0.008913875557482243,
-0.005644088611006737,
0.06025983765721321,
0.10852817445993423,
0.12388907372951508,
-0.02044663392007351,
0.10775548964738846,
-0.05611070990562439,
-0.05222456157207489,
0.0628751590847969,
0.006937950849533081,
0.1482894867658615,
0.01558433473110199,
0.17537006735801697,
0.09008017927408218,
-0.025320490822196007,
-0.025073129683732986,
0.08157078921794891,
-0.02924712561070919,
0.006630579475313425,
-0.010401532053947449,
-0.09005380421876907,
-0.03098405711352825,
0.08180668950080872,
0.0679275318980217,
-0.023272477090358734,
-0.047625474631786346,
0.0755603164434433,
0.10941499471664429,
0.12417140603065491,
0.09663364291191101,
-0.2553131878376007,
-0.053152523934841156,
0.028050681576132774,
-0.06550434231758118,
-0.06192946434020996,
-0.02071494236588478,
0.04179117828607559,
-0.11302869766950607,
0.023611484095454216,
-0.003948533907532692,
0.10149119794368744,
-0.10525991022586823,
0.013801898807287216,
-0.051897082477808,
0.03528020530939102,
0.000023986283849808387,
0.06897648423910141,
-0.1986633837223053,
0.08844270557165146,
0.03880905359983444,
0.055503133684396744,
-0.037036407738924026,
0.027501540258526802,
0.061384979635477066,
0.007188515737652779,
0.17197643220424652,
-0.02909846603870392,
0.019529933109879494,
-0.03378046303987503,
-0.07896015793085098,
0.0025523288641124964,
0.0488918274641037,
-0.1216203048825264,
0.1091160774230957,
-0.04664308577775955,
-0.022676922380924225,
-0.0222780779004097,
0.00006250551814446226,
-0.0337127260863781,
-0.1543387621641159,
0.011401008814573288,
0.03339781612157822,
0.008953811600804329,
-0.01674158126115799,
-0.006949601694941521,
0.0009874863317236304,
0.2291739583015442,
-0.1254379004240036,
-0.0634780153632164,
-0.1225099265575409,
-0.006142313592135906,
0.10068781673908234,
-0.10088779777288437,
0.01197745930403471,
-0.00625963881611824,
0.1438382863998413,
-0.04091258719563484,
-0.05224265158176422,
0.05767027661204338,
-0.05759571120142937,
-0.05985359475016594,
-0.03961635380983353,
0.10230137407779694,
0.05467040836811066,
0.05172431468963623,
0.046361371874809265,
0.05553246662020683,
-0.04913587495684624,
-0.10939902812242508,
-0.0808277279138565,
0.11574967205524445,
0.002240711124613881,
0.0793975293636322,
-0.10333909839391708,
-0.061686091125011444,
-0.10098165273666382,
0.047456223517656326,
0.19435684382915497,
0.20895223319530487,
-0.0725681409239769,
0.05860469862818718,
0.09640654176473618,
-0.0898529514670372,
-0.22705894708633423,
-0.05199607461690903,
0.03916267678141594,
0.060960374772548676,
0.061783984303474426,
-0.11731737107038498,
0.07754848897457123,
0.05722997710108757,
-0.010517027229070663,
-0.021005230024456978,
-0.2565304934978485,
-0.14527283608913422,
0.12340263277292252,
0.028351059183478355,
-0.03121284395456314,
-0.09066218137741089,
-0.05643494799733162,
-0.06609085202217102,
-0.06140407547354698,
0.08873894065618515,
-0.030823780223727226,
0.10506954044103622,
0.024928448721766472,
0.07367028295993805,
0.06499122083187103,
0.0037574509624391794,
0.13490134477615356,
0.07968045771121979,
0.04680381715297699,
-0.03759371116757393,
-0.009416197426617146,
0.08034653961658478,
-0.09300072491168976,
0.16901300847530365,
-0.08017458021640778,
0.03852732479572296,
-0.14187434315681458,
-0.0332554429769516,
-0.031850963830947876,
0.027901319786906242,
-0.04330670088529587,
-0.04356604069471359,
-0.021876264363527298,
0.03139002248644829,
0.06827225536108017,
0.003956337459385395,
0.005461147055029869,
-0.08836324512958527,
0.015871338546276093,
0.14742648601531982,
0.1343413144350052,
0.06190139055252075,
-0.17341594398021698,
0.02896069549024105,
0.011893995106220245,
0.052139636129140854,
-0.13742491602897644,
0.06894131749868393,
0.09051382541656494,
0.00255907466635108,
0.14243313670158386,
0.014662342146039009,
-0.07322247326374054,
0.017879867926239967,
0.0771026462316513,
-0.06556127220392227,
-0.16481709480285645,
-0.0254945307970047,
-0.026646040380001068,
-0.1396407186985016,
-0.03379109874367714,
0.1581648588180542,
-0.0007490115240216255,
0.007845654152333736,
0.03774179518222809,
0.04005851224064827,
-0.03154029697179794,
0.12854737043380737,
-0.014226019382476807,
0.057530634105205536,
-0.06141611933708191,
0.059080664068460464,
0.06962529569864273,
-0.06436745077371597,
0.015372802503407001,
0.11208388954401016,
-0.07676653563976288,
-0.09768203645944595,
-0.06679948419332504,
0.11120518296957016,
-0.11703319847583771,
0.027900198474526405,
-0.05937640741467476,
-0.05497018247842789,
0.01614176295697689,
0.026437463238835335,
0.05541779845952988,
0.06878113746643066,
-0.07747706770896912,
-0.03858130797743797,
-0.0794357880949974,
0.07945867627859116,
0.07454931735992432,
0.015806037932634354,
-0.03836702182888985,
0.07847677916288376,
-0.026143573224544525,
0.01645699515938759,
-0.01615479215979576,
-0.04198134317994118,
-0.05850302055478096,
-0.0026468581054359674,
-0.04795091226696968,
-0.010007391683757305,
-0.10220424830913544,
-0.0022477193269878626,
0.03178684040904045,
0.042491890490055084,
-0.021854525431990623,
-0.006469447165727615,
-0.05367107689380646,
-0.08242408186197281,
-0.05052804574370384,
0.1022828221321106,
-0.140973761677742,
-0.005514400079846382,
0.03509870171546936,
-0.08441361039876938,
0.09181051701307297,
-0.01939406991004944,
-0.006384850479662418,
0.036030855029821396,
-0.026556793600320816,
-0.020528092980384827,
0.04039366543292999,
0.039185378700494766,
0.06781837344169617,
-0.08904500305652618,
0.018226057291030884,
-0.05020946264266968,
0.024845873937010765,
0.00992319080978632,
0.05473821237683296,
-0.10937769711017609,
0.015782929956912994,
-0.026737693697214127,
0.001048718229867518,
-0.10502374172210693,
0.04002575948834419,
0.03179379180073738,
0.0553218275308609,
0.1642524152994156,
-0.04092844948172569,
0.07522210478782654,
-0.12664178013801575,
0.004472408909350634,
0.010099169798195362,
-0.026108885183930397,
0.06233355402946472,
-0.11579614877700806,
0.05877254903316498,
-0.04349933937191963,
0.04131750017404556,
0.0019013487035408616,
0.07025454938411713,
0.04606463015079498,
0.04913749918341637,
0.003904361044988036,
0.007097404915839434,
0.04905211925506592,
0.05003246292471886,
-0.005860069766640663,
-0.0368402823805809,
0.033774882555007935,
0.011641253717243671,
-0.06258939951658249,
0.07804390043020248,
0.06865216046571732,
0.03670740872621536,
0.09743113815784454,
0.06617512553930283,
-0.014852041378617287,
-0.12671975791454315,
0.02070564404129982,
-0.06903143227100372,
0.06563626974821091,
-0.035366252064704895,
0.06648590415716171,
0.15586169064044952,
-0.1539984494447708,
0.10499685257673264,
0.02237490564584732,
-0.056878142058849335,
-0.07495788484811783,
-0.1408679187297821,
-0.07615332305431366,
-0.04332929104566574,
-0.0060833352617919445,
-0.12521861493587494,
-0.010765040293335915,
-0.02857445552945137,
0.0005060388357378542,
-0.01623712107539177,
0.12107443064451218,
-0.11125043034553528,
-0.10020194202661514,
0.07683185487985611,
-0.02235460840165615,
0.047624025493860245,
0.02216038852930069,
0.042455773800611496,
0.0229551512748003,
0.05791298300027847,
0.06352891027927399,
0.05274968966841698,
0.045827288180589676,
0.038237009197473526,
-0.093392513692379,
-0.08048620820045471,
-0.01495262049138546,
0.0116401556879282,
-0.026515044271945953,
0.0813671201467514,
0.04649561643600464,
-0.07036712765693665,
-0.012482994236052036,
0.21851155161857605,
-0.08121803402900696,
-0.0717691034078598,
-0.16700631380081177,
0.19866548478603363,
0.04014457017183304,
0.032167475670576096,
-0.03501593694090843,
-0.08639184385538101,
-0.0034876589197665453,
0.1409943550825119,
0.14481371641159058,
-0.07981424778699875,
0.019904552027583122,
0.020506687462329865,
0.012936222366988659,
-0.011280098930001259,
0.031054440885782242,
0.05760343372821808,
0.2232082486152649,
-0.041332539170980453,
0.08032338321208954,
-0.013213234022259712,
-0.052495356649160385,
-0.04077047482132912,
0.07286164909601212,
0.009500286541879177,
0.032922204583883286,
-0.02164936438202858,
0.11408104002475739,
-0.03140407055616379,
-0.07051071524620056,
-0.006392324343323708,
-0.08899124711751938,
-0.12180499732494354,
-0.024609895423054695,
0.005743464920669794,
0.017185857519507408,
0.09938827902078629,
0.017052337527275085,
-0.0419401116669178,
0.12833575904369354,
-0.029223749414086342,
-0.07586102187633514,
-0.03818010166287422,
0.012450847774744034,
-0.011474912986159325,
0.1604883074760437,
-0.007406061049550772,
-0.023475974798202515,
0.12442126125097275,
0.010858826339244843,
-0.07358136028051376,
0.09476935118436813,
0.038647692650556564,
-0.07113487273454666,
0.13792183995246887,
0.06609218567609787,
-0.018631890416145325,
0.08840776234865189,
0.08054415136575699,
-0.15950486063957214,
0.047986481338739395,
-0.0669059231877327,
-0.021034540608525276,
-0.08197959512472153,
0.03957930579781532,
-0.08292482793331146,
0.1308508962392807,
0.1758018285036087,
-0.02414499595761299,
-0.010861659422516823,
0.0011695175198838115,
0.013885974884033203,
0.020159168168902397,
0.07273823767900467,
-0.04473203048110008,
-0.08960048854351044,
0.014803014695644379,
0.01590266078710556,
0.0054568336345255375,
-0.29838985204696655,
-0.09872817993164062,
0.010792700573801994,
-0.016451425850391388,
-0.01930578052997589,
0.12725788354873657,
0.06476631760597229,
-0.0058643631637096405,
-0.03273540735244751,
-0.2304689735174179,
0.03636899217963219,
0.11218360811471939,
-0.1227734163403511,
-0.10058657079935074
] |
null | null |
transformers
|
T5 model
This is a sentence-transformers mode
|
{}
|
text2text-generation
|
aditi2222/t5-paraphrase
|
[
"transformers",
"pytorch",
"t5",
"text2text-generation",
"autotrain_compatible",
"endpoints_compatible",
"has_space",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #t5 #text2text-generation #autotrain_compatible #endpoints_compatible #has_space #text-generation-inference #region-us
|
T5 model
This is a sentence-transformers mode
|
[] |
[
"TAGS\n#transformers #pytorch #t5 #text2text-generation #autotrain_compatible #endpoints_compatible #has_space #text-generation-inference #region-us \n"
] |
[
52
] |
[
"passage: TAGS\n#transformers #pytorch #t5 #text2text-generation #autotrain_compatible #endpoints_compatible #has_space #text-generation-inference #region-us \n"
] |
[
0.0019048319663852453,
-0.0037844888865947723,
-0.004846867639571428,
0.016280045732855797,
0.15925085544586182,
0.01139024831354618,
0.09671302884817123,
0.14897321164608002,
-0.04899326339364052,
-0.0070660836063325405,
0.1372009962797165,
0.1665002405643463,
-0.01529724895954132,
0.10409914702177048,
-0.08531556278467178,
-0.2723868191242218,
0.041050851345062256,
0.05884578078985214,
0.006763700861483812,
0.12957002222537994,
0.08201905339956284,
-0.07262634485960007,
0.09340167045593262,
-0.03144669905304909,
-0.1715334951877594,
0.06047246605157852,
0.04992982745170593,
-0.1397479921579361,
0.11693664640188217,
0.04595603793859482,
0.11842931061983109,
0.037635594606399536,
-0.05701420456171036,
-0.10396002233028412,
0.0293845534324646,
0.04007481783628464,
-0.07444078475236893,
0.06619872897863388,
0.10958369076251984,
-0.08855950087308884,
0.11609432846307755,
0.014332751743495464,
-0.01690986007452011,
0.0453108474612236,
-0.14978627860546112,
-0.0115242013707757,
-0.017286943271756172,
0.009983301162719727,
0.03019011951982975,
0.10428427159786224,
-0.025953028351068497,
0.13894023001194,
-0.11443798243999481,
0.1249191015958786,
0.1755915880203247,
-0.3218105733394623,
0.0008721072808839381,
0.07177659124135971,
0.0954747423529625,
0.08060836046934128,
-0.017075497657060623,
0.049279846251010895,
0.03689901903271675,
0.03278602287173271,
0.05441928654909134,
-0.05992512032389641,
-0.1813686043024063,
0.0644410029053688,
-0.10202427208423615,
-0.05823950096964836,
0.24249997735023499,
-0.06648484617471695,
0.08229529857635498,
-0.0315452516078949,
-0.13439735770225525,
-0.09164708107709885,
0.0146126514300704,
0.011662522330880165,
-0.05338037386536598,
0.059678707271814346,
0.014187299646437168,
-0.06957265734672546,
-0.1573067158460617,
0.007388781290501356,
-0.20604579150676727,
0.12037379294633865,
-0.0022145791444927454,
0.04438640549778938,
-0.21971583366394043,
0.10112471133470535,
0.01954837702214718,
-0.10843217372894287,
0.07814245671033859,
-0.08621292561292648,
0.01956222951412201,
-0.009294086135923862,
-0.09783979505300522,
-0.1523585468530655,
0.05506528168916702,
0.06974567472934723,
-0.016469081863760948,
-0.0013621869729831815,
-0.05712024122476578,
0.08282897621393204,
0.026742184534668922,
0.09264246374368668,
-0.039046403020620346,
-0.03138374537229538,
0.026040101423859596,
-0.10890994966030121,
0.004530361853539944,
-0.08059261739253998,
-0.16648244857788086,
-0.08678217977285385,
0.08933352679014206,
0.06936588138341904,
0.0427776537835598,
0.10737014561891556,
-0.01597854308784008,
-0.017108382657170296,
0.02555423602461815,
-0.08640097081661224,
0.002975356997922063,
-0.0034396217670291662,
0.010684369131922722,
0.11860962957143784,
0.037888817489147186,
0.004787639249116182,
-0.1508452594280243,
0.05129704251885414,
-0.08456986397504807,
-0.004201768897473812,
-0.0386526882648468,
-0.11637408286333084,
0.02888437733054161,
-0.11346439272165298,
0.0019459343748167157,
-0.1819058209657669,
-0.10794739425182343,
0.013547050766646862,
-0.003563873004168272,
-0.02874688245356083,
-0.05685725063085556,
-0.015953734517097473,
-0.060097914189100266,
0.07545679062604904,
-0.07323034107685089,
0.033851344138383865,
-0.04320577159523964,
0.10142723470926285,
-0.05439102277159691,
0.08872391283512115,
-0.15116755664348602,
0.08348871767520905,
-0.11385006457567215,
-0.026207635179162025,
-0.06486699730157852,
0.05204048007726669,
0.037419021129608154,
0.10208529978990555,
-0.023686395958065987,
-0.04558229446411133,
-0.09794006496667862,
0.05766747146844864,
-0.010321461595594883,
0.17991861701011658,
-0.1119103655219078,
-0.08101113885641098,
0.2097437083721161,
-0.0516032800078392,
-0.14293880760669708,
0.08471720665693283,
0.014890666119754314,
0.04416865482926369,
0.05551055818796158,
0.21729257702827454,
0.03978389874100685,
-0.025182973593473434,
0.06714358180761337,
0.11787192523479462,
-0.10201745480298996,
-0.07721184939146042,
0.013209090568125248,
-0.014080026187002659,
-0.07248388230800629,
0.03355661779642105,
0.10408741980791092,
0.07307033240795135,
-0.04470960423350334,
-0.03571781888604164,
-0.06436226516962051,
-0.008570763282477856,
0.11809982359409332,
-0.001294085755944252,
0.13646534085273743,
-0.07103265821933746,
-0.04472484812140465,
0.027163831517100334,
-0.02947130613029003,
-0.02112656459212303,
0.059094879776239395,
-0.005023777950555086,
0.13337215781211853,
-0.03037242405116558,
0.03793719783425331,
-0.20518949627876282,
-0.08568139374256134,
-0.027907684445381165,
0.1735931932926178,
0.008581404574215412,
0.14691676199436188,
0.05126412957906723,
-0.038134533911943436,
-0.01040117908269167,
-0.006090906914323568,
0.12786434590816498,
0.009149910882115364,
-0.08664630353450775,
-0.04733043536543846,
0.0470752976834774,
-0.06807634979486465,
-0.042599860578775406,
-0.06205863505601883,
0.030012013390660286,
0.03637655824422836,
0.12896181643009186,
0.013993069529533386,
0.06803536415100098,
-0.008032101206481457,
0.02921360358595848,
-0.10367264598608017,
0.013655728660523891,
0.08631832152605057,
-0.013092545792460442,
-0.05325587838888168,
0.23344485461711884,
-0.22350485622882843,
0.23208631575107574,
0.21852712333202362,
-0.2824490964412689,
-0.0005818685167469084,
-0.036813754588365555,
-0.03419727459549904,
0.020097149536013603,
0.03740953654050827,
-0.051420778036117554,
0.03739655017852783,
-0.024677753448486328,
0.1943303644657135,
-0.06214848905801773,
-0.04623418301343918,
-0.003677424043416977,
-0.051297854632139206,
-0.028216710314154625,
0.05521993711590767,
0.05055946111679077,
-0.14452490210533142,
0.17969056963920593,
0.26677438616752625,
0.015687527135014534,
0.19574841856956482,
0.005315606482326984,
-0.04260627552866936,
0.07293140143156052,
-0.017192890867590904,
-0.05518830940127373,
-0.08852692693471909,
-0.1755826324224472,
-0.030800864100456238,
0.08786051720380783,
0.043464839458465576,
0.10033265501260757,
-0.11051545292139053,
-0.0307454951107502,
0.011240042746067047,
0.006229858845472336,
-0.0198612529784441,
0.1188352108001709,
0.0916038304567337,
0.14202114939689636,
-0.0043418025597929955,
-0.005152907222509384,
0.10298898071050644,
0.026397770270705223,
-0.10556304454803467,
0.17032410204410553,
-0.14493925869464874,
-0.3398903012275696,
-0.14305217564105988,
-0.1283554881811142,
-0.029550181701779366,
0.0481751449406147,
0.126591756939888,
-0.100038543343544,
-0.015225457958877087,
-0.041716817766427994,
0.07587543874979019,
-0.08503478020429611,
0.048016101121902466,
-0.1092776209115982,
0.05474958196282387,
-0.05765443667769432,
-0.0870586484670639,
-0.04337740316987038,
-0.0012005938915535808,
-0.042839743196964264,
0.1490975320339203,
-0.09574799239635468,
0.06705930083990097,
0.20642054080963135,
-0.021416770294308662,
0.04550161957740784,
-0.0442466177046299,
0.19283124804496765,
-0.07007778435945511,
0.028251156210899353,
0.2250925600528717,
-0.032153669744729996,
0.07288595288991928,
0.15226730704307556,
-0.022663576528429985,
-0.03787560015916824,
0.04395657777786255,
-0.026733849197626114,
-0.09023230522871017,
-0.2322804033756256,
-0.11889786273241043,
-0.14082705974578857,
0.07175826281309128,
0.05754096433520317,
0.05656171590089798,
0.14769874513149261,
0.056594591587781906,
-0.0090337498113513,
0.03087019920349121,
-0.009781209751963615,
0.07582470029592514,
0.2039855271577835,
-0.029816200956702232,
0.1504884958267212,
-0.0629919245839119,
-0.11681561917066574,
0.09275535494089127,
0.06313134729862213,
0.10160941630601883,
0.02251110039651394,
0.026960816234350204,
0.010898066684603691,
0.09511017054319382,
0.13752858340740204,
0.1379387527704239,
0.03932773321866989,
-0.006873026490211487,
-0.02523285523056984,
-0.03255249559879303,
0.00002126315848727245,
0.054425422102212906,
0.045209161937236786,
-0.1523430347442627,
-0.09001755714416504,
-0.10640548169612885,
0.08357469737529755,
0.08635000139474869,
0.10505597293376923,
-0.22463750839233398,
0.023784343153238297,
0.07206699997186661,
-0.03994642570614815,
-0.11989019811153412,
0.07098391652107239,
0.05299854651093483,
-0.1024707555770874,
0.046546246856451035,
-0.031229954212903976,
0.11318814754486084,
0.022051602602005005,
0.10550439357757568,
-0.053203485906124115,
-0.09618035703897476,
0.009638491086661816,
0.10481701791286469,
-0.30516061186790466,
0.20729410648345947,
-0.007429636083543301,
-0.09295950829982758,
-0.11187880486249924,
-0.018728306517004967,
0.0025576867628842592,
0.12164662778377533,
0.07349345833063126,
0.001384903327561915,
-0.06238449364900589,
-0.07514331489801407,
0.018887696787714958,
0.005249361507594585,
0.13861951231956482,
-0.00704901572316885,
0.013407570309937,
-0.061070434749126434,
-0.0075938948430120945,
0.011643931269645691,
0.0650859847664833,
0.0014231993118301034,
-0.17192824184894562,
0.07567143440246582,
0.05458882823586464,
0.04693356528878212,
0.01592378132045269,
-0.033653389662504196,
-0.10869722068309784,
0.19578181207180023,
-0.017877740785479546,
-0.08899964392185211,
-0.1214602142572403,
-0.048978541046381,
0.06916724890470505,
-0.0666830986738205,
0.05096784606575966,
-0.0720483809709549,
0.02991739846765995,
-0.06392006576061249,
-0.2284102737903595,
0.12943212687969208,
-0.07299447804689407,
-0.04307695850729942,
-0.0322282649576664,
0.15847639739513397,
-0.12298990786075592,
0.01999419741332531,
0.01866958849132061,
0.02402704581618309,
-0.12511111795902252,
-0.06121746823191643,
-0.011848983354866505,
-0.028766747564077377,
0.07283125072717667,
0.03457988426089287,
-0.08438634872436523,
-0.06576003134250641,
-0.0019198559457436204,
-0.0033527102787047625,
0.3267287611961365,
0.10421230643987656,
-0.08351656049489975,
0.1654823273420334,
0.06374907493591309,
-0.07078107446432114,
-0.32191118597984314,
-0.07873771339654922,
-0.10027248412370682,
-0.005710241850465536,
0.01002343650907278,
-0.1268543303012848,
0.04427546262741089,
-0.029947733506560326,
-0.0038497543428093195,
0.08380930125713348,
-0.24521243572235107,
-0.09818269312381744,
0.13724389672279358,
-0.020412998273968697,
0.3158648908138275,
-0.12522555887699127,
-0.07609208673238754,
-0.03624487668275833,
-0.109134241938591,
0.17277181148529053,
-0.1096419245004654,
0.09867527335882187,
-0.026042291894555092,
0.12853428721427917,
0.06134048104286194,
-0.04277019575238228,
0.06854512542486191,
-0.01833241991698742,
0.008027022704482079,
-0.13450272381305695,
-0.06482996791601181,
0.07512287050485611,
-0.030992954969406128,
0.047261811792850494,
-0.07413084805011749,
0.042952511459589005,
-0.1277935802936554,
-0.01882348023355007,
-0.11093898862600327,
0.061318669468164444,
0.022534433752298355,
-0.0656307116150856,
-0.017191549763083458,
-0.06914012879133224,
0.025176333263516426,
-0.01658732071518898,
0.19708415865898132,
-0.06335504353046417,
0.1823943704366684,
0.1869804412126541,
0.09760983288288116,
-0.10386087000370026,
0.04452233761548996,
-0.032749176025390625,
-0.06730343401432037,
0.07338140159845352,
-0.14253178238868713,
0.052002448588609695,
0.10644328594207764,
-0.050541721284389496,
0.05814075469970703,
0.1106620654463768,
0.014361315406858921,
-0.013961630873382092,
0.15362074971199036,
-0.2562159597873688,
0.005441658664494753,
-0.0962069109082222,
-0.04770717769861221,
0.03046395815908909,
0.03032153844833374,
0.18230471014976501,
0.01120354700833559,
-0.026954354718327522,
-0.005538483150303364,
-0.0018120072782039642,
-0.053923096507787704,
0.05512459948658943,
0.036821864545345306,
0.03323718532919884,
-0.11189843714237213,
0.06060293689370155,
0.05815809965133667,
-0.1448555886745453,
0.029695017263293266,
0.19576966762542725,
-0.11733770370483398,
-0.13482601940631866,
-0.0004756708804052323,
0.0677732452750206,
-0.14123328030109406,
-0.009141461923718452,
-0.05016467347741127,
-0.11345820873975754,
0.08574269711971283,
0.19972220063209534,
0.05710255727171898,
0.08909596502780914,
-0.033956222236156464,
-0.058453518897295,
-0.029195280745625496,
0.010194899514317513,
0.011630593799054623,
0.028619157150387764,
-0.10208519548177719,
0.13118597865104675,
-0.04724050313234329,
0.16715489327907562,
-0.09716197103261948,
-0.04702884331345558,
-0.152531698346138,
0.002426156075671315,
-0.1466960608959198,
-0.06943263113498688,
-0.05659385770559311,
-0.06354662030935287,
-0.009821423329412937,
-0.02434639073908329,
-0.05047421529889107,
-0.05024503916501999,
-0.12260215729475021,
0.011571573093533516,
-0.05621323361992836,
0.04525769129395485,
-0.07972045242786407,
-0.013484488241374493,
0.055132120847702026,
-0.030423389747738838,
0.11408451944589615,
0.08748376369476318,
-0.11662473529577255,
0.10743166506290436,
-0.09839223325252533,
-0.12656080722808838,
0.09486260265111923,
0.018033714964985847,
0.058434270322322845,
0.09707959741353989,
0.00891159288585186,
0.05598178133368492,
0.04158338904380798,
0.04411861300468445,
0.009987978264689445,
-0.11124832183122635,
0.0380651019513607,
-0.052996713668107986,
-0.13563407957553864,
-0.06263524293899536,
-0.026792006567120552,
0.030286112800240517,
0.01089276373386383,
0.10767155885696411,
-0.051219791173934937,
0.10564509779214859,
-0.07189621031284332,
0.019965166226029396,
-0.01300421915948391,
-0.1647084802389145,
-0.045782022178173065,
-0.07341710478067398,
0.03664537891745567,
-0.009205256588757038,
0.20625713467597961,
0.077974833548069,
0.02349081262946129,
0.037151604890823364,
0.08264945447444916,
0.0019355815602466464,
0.016970688477158546,
0.21777592599391937,
0.06962968409061432,
-0.07049708813428879,
-0.11852742731571198,
0.05963381752371788,
0.01887577399611473,
0.06965018808841705,
0.15576517581939697,
0.056794699281454086,
-0.011962602846324444,
0.11193615943193436,
-0.024064647033810616,
-0.0068133813329041,
-0.12810677289962769,
-0.16301329433918,
-0.004318809602409601,
0.09310556203126907,
-0.052356306463479996,
0.04415218159556389,
0.17003647983074188,
-0.018104281276464462,
0.0336272232234478,
-0.03094765916466713,
-0.05335594341158867,
-0.16834230720996857,
-0.14789271354675293,
-0.07995127886533737,
-0.11087332665920258,
-0.02718210592865944,
-0.10139597207307816,
0.08172602951526642,
0.06748189777135849,
0.05592411756515503,
-0.0499156229197979,
0.11055595427751541,
0.05528182536363602,
-0.11725020408630371,
0.07114271819591522,
-0.027335863560438156,
0.09812980890274048,
-0.009088918566703796,
-0.006258330307900906,
-0.08307218551635742,
0.010819291695952415,
-0.03894173353910446,
0.049928583204746246,
-0.04747631028294563,
0.008504346944391727,
-0.16299764811992645,
-0.10957249253988266,
-0.04291011765599251,
0.0614963173866272,
-0.019686607643961906,
0.1551779955625534,
0.01387507189065218,
-0.03461948782205582,
0.013804392889142036,
0.2583024799823761,
-0.09972049295902252,
-0.0851818099617958,
-0.041016120463609695,
0.2084883451461792,
0.061222467571496964,
0.07373743504285812,
-0.02859536185860634,
-0.031048838049173355,
-0.11416899412870407,
0.34497207403182983,
0.31482934951782227,
-0.07161965221166611,
0.0398726724088192,
0.016563279554247856,
0.030142735689878464,
0.11637284606695175,
0.14022967219352722,
0.0916035920381546,
0.23573338985443115,
-0.0693872794508934,
-0.010092688724398613,
-0.022344037890434265,
0.0073167746886610985,
-0.10196781903505325,
0.13043533265590668,
0.043621718883514404,
-0.09041506797075272,
-0.026318738237023354,
0.07670903205871582,
-0.21243104338645935,
0.1389734297990799,
-0.08875156939029694,
-0.19405682384967804,
-0.05883593484759331,
0.01516592688858509,
0.1496192216873169,
-0.016031023114919662,
0.09138526022434235,
-0.020647617056965828,
-0.0722113624215126,
0.029204411432147026,
0.01666286215186119,
-0.19871759414672852,
0.033451639115810394,
0.06371412426233292,
-0.11542545258998871,
-0.029661864042282104,
-0.017291566357016563,
0.02199423685669899,
0.08295346796512604,
0.07082275301218033,
-0.04837881773710251,
0.0535416342318058,
0.005592434201389551,
-0.019829990342259407,
0.03503140062093735,
0.05262354016304016,
0.009726877324283123,
-0.10966355353593826,
0.06299593299627304,
-0.17635416984558105,
0.03804607316851616,
-0.036695461720228195,
-0.016934243962168694,
-0.00767596485093236,
-0.0276106558740139,
-0.03190142661333084,
0.07071686536073685,
0.10571801662445068,
-0.015258674509823322,
-0.003450299147516489,
-0.0712389275431633,
-0.058415062725543976,
-0.014903778210282326,
-0.11116986721754074,
-0.09979519993066788,
-0.09476316720247269,
-0.09682852029800415,
0.10881722718477249,
-0.0060325018130242825,
-0.2213381677865982,
0.01311418879777193,
-0.08191841095685959,
0.050175897777080536,
-0.1892065852880478,
0.10583088546991348,
0.10422021895647049,
-0.007532334886491299,
0.0014266044599935412,
-0.007911108434200287,
0.04282023757696152,
0.09190382063388824,
-0.1298302263021469,
-0.07897894084453583
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# distilbert-base-uncased-finetuned-ner
This model is a fine-tuned version of [distilbert-base-uncased](https://huggingface.co/distilbert-base-uncased) on the conll2003 dataset.
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 2e-05
- train_batch_size: 16
- eval_batch_size: 16
- seed: 42
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- num_epochs: 3
### Framework versions
- Transformers 4.10.2
- Pytorch 1.9.0+cu102
- Datasets 1.12.1
- Tokenizers 0.10.3
|
{"license": "apache-2.0", "tags": ["generated_from_trainer"], "datasets": ["conll2003"]}
|
token-classification
|
adityavithaldas/distilbert-base-uncased-finetuned-ner
|
[
"transformers",
"pytorch",
"tensorboard",
"distilbert",
"token-classification",
"generated_from_trainer",
"dataset:conll2003",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tensorboard #distilbert #token-classification #generated_from_trainer #dataset-conll2003 #license-apache-2.0 #autotrain_compatible #endpoints_compatible #region-us
|
# distilbert-base-uncased-finetuned-ner
This model is a fine-tuned version of distilbert-base-uncased on the conll2003 dataset.
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 2e-05
- train_batch_size: 16
- eval_batch_size: 16
- seed: 42
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- num_epochs: 3
### Framework versions
- Transformers 4.10.2
- Pytorch 1.9.0+cu102
- Datasets 1.12.1
- Tokenizers 0.10.3
|
[
"# distilbert-base-uncased-finetuned-ner\n\nThis model is a fine-tuned version of distilbert-base-uncased on the conll2003 dataset.",
"## Model description\n\nMore information needed",
"## Intended uses & limitations\n\nMore information needed",
"## Training and evaluation data\n\nMore information needed",
"## Training procedure",
"### Training hyperparameters\n\nThe following hyperparameters were used during training:\n- learning_rate: 2e-05\n- train_batch_size: 16\n- eval_batch_size: 16\n- seed: 42\n- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n- lr_scheduler_type: linear\n- num_epochs: 3",
"### Framework versions\n\n- Transformers 4.10.2\n- Pytorch 1.9.0+cu102\n- Datasets 1.12.1\n- Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #distilbert #token-classification #generated_from_trainer #dataset-conll2003 #license-apache-2.0 #autotrain_compatible #endpoints_compatible #region-us \n",
"# distilbert-base-uncased-finetuned-ner\n\nThis model is a fine-tuned version of distilbert-base-uncased on the conll2003 dataset.",
"## Model description\n\nMore information needed",
"## Intended uses & limitations\n\nMore information needed",
"## Training and evaluation data\n\nMore information needed",
"## Training procedure",
"### Training hyperparameters\n\nThe following hyperparameters were used during training:\n- learning_rate: 2e-05\n- train_batch_size: 16\n- eval_batch_size: 16\n- seed: 42\n- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n- lr_scheduler_type: linear\n- num_epochs: 3",
"### Framework versions\n\n- Transformers 4.10.2\n- Pytorch 1.9.0+cu102\n- Datasets 1.12.1\n- Tokenizers 0.10.3"
] |
[
65,
43,
6,
12,
8,
3,
90,
34
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #distilbert #token-classification #generated_from_trainer #dataset-conll2003 #license-apache-2.0 #autotrain_compatible #endpoints_compatible #region-us \n# distilbert-base-uncased-finetuned-ner\n\nThis model is a fine-tuned version of distilbert-base-uncased on the conll2003 dataset.## Model description\n\nMore information needed## Intended uses & limitations\n\nMore information needed## Training and evaluation data\n\nMore information needed## Training procedure### Training hyperparameters\n\nThe following hyperparameters were used during training:\n- learning_rate: 2e-05\n- train_batch_size: 16\n- eval_batch_size: 16\n- seed: 42\n- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n- lr_scheduler_type: linear\n- num_epochs: 3### Framework versions\n\n- Transformers 4.10.2\n- Pytorch 1.9.0+cu102\n- Datasets 1.12.1\n- Tokenizers 0.10.3"
] |
[
-0.08146190643310547,
0.14809583127498627,
-0.001795605756342411,
0.09311632812023163,
0.15837648510932922,
0.029101239517331123,
0.09420067816972733,
0.10489467531442642,
-0.11266035586595535,
0.04262798652052879,
0.08107592910528183,
0.08786926418542862,
0.030960137024521828,
0.10210361331701279,
-0.03826519474387169,
-0.2550995647907257,
0.0027012473437935114,
0.022980807349085808,
-0.0638362318277359,
0.0995958149433136,
0.09999919682741165,
-0.1037067398428917,
0.07001330703496933,
0.009153407998383045,
-0.17880254983901978,
0.018308125436306,
-0.03164665773510933,
-0.04548303410410881,
0.0975453108549118,
0.011673933826386929,
0.12598873674869537,
-0.0034014428965747356,
0.13929270207881927,
-0.20172008872032166,
-0.0067142825573682785,
0.07656214386224747,
0.046907324343919754,
0.07696332782506943,
0.03799837827682495,
0.018296433612704277,
0.11083346605300903,
-0.1368635892868042,
0.10402325540781021,
0.018695248290896416,
-0.05128626525402069,
-0.11568444222211838,
-0.0748554989695549,
0.0986499935388565,
0.10702896863222122,
0.10245087742805481,
0.022268014028668404,
0.12644235789775848,
-0.08608784526586533,
0.08541848510503769,
0.15905813872814178,
-0.2592678666114807,
-0.06893527507781982,
0.07775506377220154,
0.04371587187051773,
0.0520009845495224,
-0.09833493828773499,
-0.03692211955785751,
0.03950687497854233,
0.034346289932727814,
0.10959286987781525,
-0.030830837786197662,
-0.10737749934196472,
-0.0014865959528833628,
-0.1479707658290863,
-0.011258842423558235,
0.18004998564720154,
0.03528901934623718,
-0.037097036838531494,
-0.0809793695807457,
-0.05817084014415741,
-0.09750377386808395,
-0.01916840486228466,
-0.04989326000213623,
0.03591444343328476,
-0.04655912518501282,
-0.04148517921566963,
-0.07381844520568848,
-0.07446146011352539,
-0.0560198612511158,
-0.011310365051031113,
0.09762353450059891,
0.05468680337071419,
0.007227266672998667,
-0.029738256707787514,
0.1142396554350853,
0.013819594867527485,
-0.10342010855674744,
-0.0019373312825337052,
-0.013810770586133003,
-0.06925751268863678,
-0.0703120082616806,
-0.035563886165618896,
-0.02353721112012863,
-0.017733002081513405,
0.1493411362171173,
-0.04744919016957283,
0.059549733996391296,
0.039438407868146896,
0.014684243127703667,
-0.025033561512827873,
0.15629926323890686,
-0.0558534637093544,
-0.04188092425465584,
0.004363304004073143,
0.08339037001132965,
0.005239262245595455,
-0.002726621925830841,
-0.1074804812669754,
-0.014892494305968285,
0.09199575334787369,
0.04642966017127037,
-0.04450049623847008,
0.049946147948503494,
-0.022544387727975845,
-0.04999012500047684,
0.027648594230413437,
-0.1296326220035553,
0.04662979394197464,
-0.020747000351548195,
-0.08336388319730759,
0.005407501477748156,
0.04126607999205589,
0.003790792543441057,
-0.04477429389953613,
0.10674773901700974,
-0.08903046697378159,
-0.0011874483898282051,
-0.10185373574495316,
-0.07909630239009857,
0.010313157923519611,
-0.0995243638753891,
0.0037366626784205437,
-0.0839914008975029,
-0.21653127670288086,
-0.03155429661273956,
0.06398801505565643,
-0.029945045709609985,
-0.06120103970170021,
-0.04771041125059128,
-0.06169096380472183,
-0.0006879621068947017,
-0.0038783643394708633,
0.09317902475595474,
-0.041931040585041046,
0.0719284638762474,
-0.01212789211422205,
0.019098356366157532,
0.0019372558454051614,
0.04780503734946251,
-0.0978737473487854,
0.025239985436201096,
-0.10526346415281296,
0.05780813843011856,
-0.0856308788061142,
0.03786364942789078,
-0.09908736497163773,
-0.1241861954331398,
0.004525909665971994,
-0.01872938498854637,
0.03677026927471161,
0.09972883760929108,
-0.15839773416519165,
-0.032352954149246216,
0.12245475500822067,
-0.05721678584814072,
-0.05241450294852257,
0.09768865257501602,
-0.0533183254301548,
0.03136224299669266,
0.05715746805071831,
0.13961446285247803,
0.14473114907741547,
-0.1342083364725113,
-0.013146942481398582,
0.03364548087120056,
0.04395585134625435,
-0.01662352867424488,
0.03961234539747238,
0.01076900027692318,
0.013209953904151917,
0.01627746783196926,
-0.08417117595672607,
0.011030602268874645,
-0.07215065509080887,
-0.09320897608995438,
-0.05080223083496094,
-0.10170555859804153,
0.05815718695521355,
0.04468908905982971,
0.04732261970639229,
-0.04676132649183273,
-0.10415833443403244,
0.16102999448776245,
0.13788791000843048,
-0.06752981245517731,
0.007106237579137087,
-0.06570469588041306,
0.03238894045352936,
-0.021423714235424995,
-0.027787717059254646,
-0.19144171476364136,
-0.10943926870822906,
0.026969537138938904,
-0.047312963753938675,
0.04893438145518303,
0.04346024617552757,
0.056202232837677,
0.07660277187824249,
-0.04103701934218407,
-0.028446175158023834,
-0.06688947975635529,
0.007639729417860508,
-0.10321579873561859,
-0.1715778261423111,
-0.06502863019704819,
-0.022024739533662796,
0.16108109056949615,
-0.23813730478286743,
0.029638998210430145,
-0.055227141827344894,
0.1173665001988411,
0.010049412958323956,
-0.04324936866760254,
-0.014317059889435768,
0.06829703599214554,
-0.025166986510157585,
-0.09434870630502701,
0.04828584939241409,
0.00598322506994009,
-0.06429524719715118,
-0.10049305111169815,
-0.12158571928739548,
0.0664694681763649,
0.08765517175197601,
0.02084272727370262,
-0.0920669361948967,
-0.009151491336524487,
-0.06611789017915726,
-0.05426987633109093,
-0.06088702753186226,
0.010654756799340248,
0.19670988619327545,
-0.006350489798933268,
0.13980475068092346,
-0.0462958849966526,
-0.06827540695667267,
-0.005218475591391325,
-0.005505835637450218,
-0.020611697807908058,
0.06904982775449753,
0.12078174203634262,
-0.10079747438430786,
0.10707107931375504,
0.08497243374586105,
-0.1056186780333519,
0.15016679465770721,
-0.030966337770223618,
-0.06960301846265793,
-0.011381915770471096,
-0.002645601751282811,
-0.014883347786962986,
0.10760471969842911,
-0.10932643711566925,
-0.008040313608944416,
0.026992354542016983,
0.008447138592600822,
0.043982602655887604,
-0.17502960562705994,
-0.0022738988045603037,
0.036384861916303635,
-0.028611885383725166,
0.008272535167634487,
-0.03355688974261284,
0.024523358792066574,
0.08035781979560852,
0.01534330751746893,
-0.049820948392152786,
0.03202999755740166,
0.007524873595684767,
-0.08498746901750565,
0.18369507789611816,
-0.12534521520137787,
-0.15669798851013184,
-0.13561783730983734,
0.019148534163832664,
-0.0682235136628151,
-0.019395431503653526,
0.003804225707426667,
-0.07002950459718704,
-0.053859785199165344,
-0.07906241714954376,
-0.03421882912516594,
-0.056403085589408875,
-0.011297370307147503,
0.040327075868844986,
0.011425969190895557,
0.07274428755044937,
-0.13028304278850555,
0.005361664108932018,
-0.010367575101554394,
-0.10153241455554962,
-0.008826302364468575,
0.047282055020332336,
0.12040923535823822,
0.15462744235992432,
-0.027645375579595566,
0.013492511585354805,
-0.02608504891395569,
0.20683400332927704,
-0.0563032440841198,
0.009598514065146446,
0.12471721321344376,
0.0000611964424024336,
0.0477822907269001,
0.1045018807053566,
0.03844582661986351,
-0.07755380123853683,
0.021274680271744728,
0.06653948873281479,
-0.021803438663482666,
-0.2166171371936798,
-0.06972318142652512,
-0.03601682558655739,
-0.055088240653276443,
0.11192587018013,
0.0437743254005909,
0.05349332094192505,
0.051585469394922256,
0.0008045939612202346,
0.083316370844841,
-0.02260933630168438,
0.0879497230052948,
0.11099965870380402,
0.029361426830291748,
0.1016361191868782,
-0.026253554970026016,
-0.03895045071840286,
0.05378713831305504,
0.014202367514371872,
0.2619144022464752,
-0.02198009192943573,
0.0799529179930687,
0.0349242128431797,
0.1562989056110382,
-0.027816984802484512,
0.0440683476626873,
0.0053481743671,
0.0004941458464600146,
0.015453322790563107,
-0.049920909106731415,
-0.0325307734310627,
0.03088308870792389,
-0.016141030937433243,
0.04848701134324074,
-0.09291069209575653,
0.04473377764225006,
0.019348694011569023,
0.24800384044647217,
0.017232200130820274,
-0.3101789355278015,
-0.09412704408168793,
0.00822535715997219,
-0.02137167565524578,
-0.06309983134269714,
0.023443931713700294,
0.11140970140695572,
-0.12385843694210052,
0.037131525576114655,
-0.06176725775003433,
0.08828464150428772,
-0.06029136851429939,
0.009439629502594471,
0.0772394984960556,
0.12989749014377594,
0.01743302308022976,
0.09794925153255463,
-0.21134503185749054,
0.19743067026138306,
0.018537046387791634,
0.11829916387796402,
-0.06352603435516357,
0.03580019995570183,
0.013268762268126011,
0.10902508348226547,
0.09809532761573792,
-0.0016516342293471098,
-0.005277350544929504,
-0.1820593774318695,
-0.06456374377012253,
0.026560436934232712,
0.0943533405661583,
-0.008876796811819077,
0.08139804750680923,
-0.058117449283599854,
-0.002137233968824148,
0.0590883307158947,
-0.10107475519180298,
-0.15165011584758759,
-0.1449766755104065,
0.01494687981903553,
0.018479079008102417,
-0.05889025330543518,
-0.06861931830644608,
-0.09518857300281525,
-0.04694380983710289,
0.22581639885902405,
-0.03978443518280983,
-0.05245841294527054,
-0.1319744735956192,
0.06849854439496994,
0.11016031354665756,
-0.0635087788105011,
0.02361045964062214,
0.0165388286113739,
0.10878317803144455,
0.02788202464580536,
-0.10915851593017578,
0.03639724478125572,
-0.09190215170383453,
-0.13004674017429352,
-0.04622219502925873,
0.10086057335138321,
0.06098875775933266,
0.05044117942452431,
-0.002389304805546999,
0.006059998646378517,
0.005628977902233601,
-0.09314357489347458,
-0.014966686256229877,
0.10432320833206177,
0.09979221224784851,
0.04249110817909241,
-0.11472606658935547,
0.02194996550679207,
-0.03964341804385185,
0.004599851556122303,
0.13724803924560547,
0.15663465857505798,
-0.09657507389783859,
0.05805119872093201,
0.07654774188995361,
-0.09943803399801254,
-0.190116748213768,
0.05935532599687576,
0.09700477868318558,
0.014131232164800167,
0.024262307211756706,
-0.2077115923166275,
0.1362542361021042,
0.12818829715251923,
-0.013833520002663136,
0.06135304644703865,
-0.3466412127017975,
-0.12103226035833359,
0.1088661476969719,
0.11036422848701477,
0.01430022157728672,
-0.1253771334886551,
-0.023191653192043304,
-0.021486474201083183,
-0.1478879302740097,
0.1118222028017044,
-0.05946033447980881,
0.09986919164657593,
-0.004726252052932978,
0.09576940536499023,
0.02318304404616356,
-0.051916882395744324,
0.1298658549785614,
0.043482862412929535,
0.08111673593521118,
-0.057536251842975616,
-0.00687691243365407,
0.07022972404956818,
-0.06821995973587036,
0.0754842460155487,
-0.02417600527405739,
0.07030754536390305,
-0.1353394091129303,
-0.029742391780018806,
-0.06607220321893692,
0.08186773955821991,
-0.044419243931770325,
-0.08106239885091782,
-0.05418172478675842,
0.06003955006599426,
0.06279586255550385,
-0.02710537426173687,
0.05943021550774574,
0.045120302587747574,
0.08413325250148773,
0.0895356833934784,
0.09744337201118469,
-0.02994503639638424,
-0.11852507293224335,
-0.01846739463508129,
-0.015051915310323238,
0.07344138622283936,
-0.0826757550239563,
0.01589946076273918,
0.14542627334594727,
0.04582755267620087,
0.13803650438785553,
0.0450126975774765,
-0.026947814971208572,
-0.015679895877838135,
0.026170624420046806,
-0.1328507363796234,
-0.12461444735527039,
-0.019602490589022636,
-0.0609876811504364,
-0.1228201612830162,
0.024710578843951225,
0.09846406430006027,
-0.0773724690079689,
-0.0054472326301038265,
-0.009759489446878433,
0.01841668039560318,
-0.02434251829981804,
0.18510988354682922,
0.03987913206219673,
0.055323150008916855,
-0.0762176513671875,
0.10455114394426346,
0.08500642329454422,
-0.0761105939745903,
0.044211357831954956,
0.050152380019426346,
-0.09516292810440063,
-0.030483106151223183,
0.06108713150024414,
0.1348285675048828,
-0.06441804021596909,
-0.04612848907709122,
-0.07751921564340591,
-0.08419698476791382,
0.05211213231086731,
0.09962031990289688,
0.05985748767852783,
-0.009486716240644455,
-0.06866442412137985,
0.03483804315328598,
-0.16727110743522644,
0.08779431879520416,
0.037080034613609314,
0.07905915379524231,
-0.18148300051689148,
0.1406884342432022,
0.014401931315660477,
0.050560493022203445,
-0.01670355536043644,
0.02066863514482975,
-0.09466302394866943,
-0.01894146017730236,
-0.12206925451755524,
-0.03339265286922455,
-0.03398396447300911,
0.012821135111153126,
-0.005405012518167496,
-0.02797713316977024,
-0.037296462804079056,
0.050327517092227936,
-0.061478402465581894,
-0.05085204914212227,
0.02008211798965931,
0.05191068723797798,
-0.13671165704727173,
-0.005290127359330654,
0.01923752762377262,
-0.08482725918292999,
0.05333855003118515,
0.05511074140667915,
0.024796046316623688,
0.032546091824769974,
-0.09569494426250458,
-0.014019809663295746,
0.049502260982990265,
0.049273066222667694,
0.07848934084177017,
-0.07520114630460739,
-0.007580960635095835,
-0.008746166713535786,
0.04361291974782944,
0.012768791988492012,
0.06895389407873154,
-0.1255946308374405,
-0.0064635975286364555,
-0.061425697058439255,
-0.05449838563799858,
-0.07660447061061859,
0.047134529799222946,
0.11770516633987427,
0.029129736125469208,
0.1830807626247406,
-0.07553130388259888,
0.03850521519780159,
-0.17879347503185272,
-0.03298856317996979,
-0.0010296410182490945,
-0.0365610346198082,
-0.040531374514102936,
-0.03914690017700195,
0.062216419726610184,
-0.05259295552968979,
0.11241444200277328,
0.008398903533816338,
0.07516034692525864,
0.029277212917804718,
-0.04258689284324646,
-0.018798328936100006,
0.006438420154154301,
0.17618940770626068,
0.056420646607875824,
-0.02336559258401394,
0.07633648812770844,
0.002204636810347438,
0.07064003497362137,
0.05191996321082115,
0.18798202276229858,
0.12903179228305817,
-0.07417071610689163,
0.060413509607315063,
0.06109130382537842,
-0.1117592453956604,
-0.1815410703420639,
0.08413010090589523,
-0.03535693138837814,
0.12891557812690735,
-0.04192759469151497,
0.19637368619441986,
0.09427326172590256,
-0.17211169004440308,
0.048423852771520615,
-0.04072792828083038,
-0.12458784133195877,
-0.10809888690710068,
-0.07268770039081573,
-0.07220008969306946,
-0.09316746145486832,
0.012579037807881832,
-0.12424909323453903,
0.014540637843310833,
0.06414902955293655,
0.012913956306874752,
-0.008660868741571903,
0.16243872046470642,
-0.04186015576124191,
0.02499397099018097,
0.03954997658729553,
0.012009993195533752,
-0.02721332013607025,
-0.0846107229590416,
-0.05746733024716377,
0.02045711688697338,
0.006101854611188173,
0.07847519218921661,
-0.0649527907371521,
0.0017977547831833363,
0.036564525216817856,
-0.006084653548896313,
-0.04532160237431526,
0.015956543385982513,
0.022198064252734184,
0.03328487277030945,
0.043036215007305145,
0.04080522432923317,
-0.017914896830916405,
-0.04254728928208351,
0.24938996136188507,
-0.08001498132944107,
-0.09706024080514908,
-0.12576906383037567,
0.22995567321777344,
0.04728330671787262,
-0.01978830248117447,
0.07600671797990799,
-0.09921571612358093,
-0.008694848977029324,
0.21189948916435242,
0.1804480403661728,
-0.06108429655432701,
-0.02386651188135147,
-0.010930259712040424,
-0.015039547346532345,
-0.05658823996782303,
0.12294682115316391,
0.12527087330818176,
0.08481660485267639,
-0.04769962280988693,
-0.034652478992938995,
-0.020809704437851906,
-0.025458671152591705,
-0.10542444884777069,
0.05536628141999245,
0.03047795221209526,
0.001063131378032267,
-0.020748810842633247,
0.05540074035525322,
-0.02447672374546528,
-0.15528430044651031,
0.04936685785651207,
-0.1398158222436905,
-0.16979165375232697,
-0.020420387387275696,
0.09931696951389313,
-0.04939541593194008,
0.04648103564977646,
-0.02363486774265766,
-0.020178968086838722,
0.14817284047603607,
-0.020911619067192078,
-0.0790238156914711,
-0.08042856305837631,
0.08129370957612991,
-0.03134264796972275,
0.2298598289489746,
-0.006470119580626488,
0.07439155876636505,
0.10407152026891708,
0.031485751271247864,
-0.11832696944475174,
0.04189988225698471,
0.06472544372081757,
-0.0680072084069252,
0.03798700496554375,
0.1350628286600113,
-0.05451739579439163,
0.1008085086941719,
0.026287658140063286,
-0.13247466087341309,
-0.02092050574719906,
-0.05515677481889725,
-0.029724683612585068,
-0.06929042190313339,
-0.002044711960479617,
-0.06426439434289932,
0.15357591211795807,
0.21693944931030273,
-0.017389003187417984,
-0.00022577882918994874,
-0.08826424926519394,
0.03297816589474678,
0.05294199660420418,
0.08983118087053299,
-0.034046705812215805,
-0.1866929978132248,
0.009477891027927399,
-0.04793694242835045,
0.01775253191590309,
-0.2129320651292801,
-0.09641631692647934,
0.03614700585603714,
-0.05371851474046707,
-0.06442950665950775,
0.10414151102304459,
0.0679529458284378,
0.03967719152569771,
-0.04556526616215706,
-0.06254898756742477,
-0.0648694559931755,
0.1298564225435257,
-0.1672101467847824,
-0.05435517057776451
] |
null | null | null |
♕〖𖡦الس௸اهر𖡦〗♕
|
{}
| null |
admin-63/eToro
|
[
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#region-us
|
〖𖡦الس௸اهر𖡦〗
|
[] |
[
"TAGS\n#region-us \n"
] |
[
6
] |
[
"passage: TAGS\n#region-us \n"
] |
[
0.024608636274933815,
-0.026205500587821007,
-0.009666500613093376,
-0.10395516455173492,
0.08638657629489899,
0.059816278517246246,
0.01882290467619896,
0.020661840215325356,
0.23975107073783875,
-0.005599027033895254,
0.1219947561621666,
0.0015615287702530622,
-0.037353623658418655,
0.03733762726187706,
-0.0035912662278860807,
-0.17583473026752472,
0.03876631706953049,
-0.018274923786520958,
0.01843859627842903,
0.026470553129911423,
-0.07776834815740585,
-0.07564429938793182,
0.015296397730708122,
-0.10247814655303955,
-0.083692267537117,
0.11002834886312485,
0.031466204673051834,
-0.019670886918902397,
0.10779199749231339,
-0.04243955761194229,
0.18699054419994354,
-0.011512263678014278,
-0.11213519424200058,
-0.2536850869655609,
0.021806683391332626,
-0.01765260472893715,
-0.08747660368680954,
0.01506110467016697,
0.0665089413523674,
-0.09014441072940826,
-0.0588928684592247,
0.0795099288225174,
-0.01132340170443058,
0.04246443510055542,
-0.27593839168548584,
-0.12684126198291779,
-0.05297930911183357,
-0.1421966552734375,
0.08651168644428253,
0.04035491496324539,
0.008764253929257393,
0.15506891906261444,
-0.20897391438484192,
0.004104613792151213,
0.08255259692668915,
-0.2538507878780365,
0.05591634660959244,
0.17671173810958862,
0.03623908758163452,
0.18037272989749908,
0.0060391901060938835,
0.11029672622680664,
0.0716743916273117,
-0.024263937026262283,
-0.17590197920799255,
-0.08127854019403458,
-0.04696211963891983,
0.16642488539218903,
-0.06727185100317001,
-0.14248386025428772,
0.34701237082481384,
0.00015008423360995948,
0.009657775051891804,
0.16921205818653107,
-0.059524230659008026,
-0.09972117841243744,
0.07259953022003174,
0.016484731808304787,
0.018492350354790688,
0.1471305936574936,
0.16307872533798218,
-0.0458691343665123,
-0.13837823271751404,
-0.018630273640155792,
-0.22798998653888702,
0.17510560154914856,
-0.03248048573732376,
0.13137903809547424,
-0.27447956800460815,
0.01684025302529335,
-0.2570667266845703,
0.0032130838371813297,
0.04178816080093384,
-0.06004921346902847,
-0.0226522795855999,
-0.013265985064208508,
-0.08018817007541656,
0.004899587947875261,
0.06192673370242119,
0.1266920566558838,
-0.06128726154565811,
0.06128238886594772,
-0.09319206327199936,
0.141696035861969,
0.07166698575019836,
0.07868369668722153,
0.13037432730197906,
0.041205424815416336,
-0.07187089323997498,
-0.21872246265411377,
-0.0026476888451725245,
-0.06275863200426102,
-0.09502086788415909,
-0.0020165652967989445,
-0.11606067419052124,
0.17244569957256317,
-0.030802514404058456,
-0.09825427830219269,
-0.11208184063434601,
0.09148659557104111,
-0.032992321997880936,
-0.03437839448451996,
-0.03552987426519394,
-0.020977836102247238,
0.019381176680326462,
0.04704452306032181,
-0.1548958420753479,
-0.005131472367793322,
0.07039852440357208,
0.11502562463283539,
-0.1346137970685959,
-0.003783059772104025,
-0.07908964157104492,
0.03039063885807991,
0.07654735445976257,
-0.16510222852230072,
0.03158547356724739,
-0.1124754324555397,
-0.07531405985355377,
0.002912673633545637,
-0.015710093080997467,
-0.016202643513679504,
0.166526660323143,
-0.0020451415330171585,
0.0714716836810112,
-0.026345307007431984,
-0.05890209600329399,
-0.11243434250354767,
-0.08489254862070084,
0.05390460044145584,
0.03670717030763626,
0.03266148269176483,
-0.2193479984998703,
0.014805203303694725,
-0.12762966752052307,
0.1360815018415451,
-0.10566820204257965,
-0.04705966264009476,
-0.022842247039079666,
0.20562705397605896,
0.037286072969436646,
0.08762791007757187,
-0.22171171009540558,
0.039756543934345245,
-0.05404696613550186,
0.18480908870697021,
-0.1502426266670227,
-0.0799463614821434,
0.20813211798667908,
-0.07964949309825897,
-0.10115210711956024,
0.021235812455415726,
0.020391687750816345,
0.026287272572517395,
0.0766737088561058,
0.4564172327518463,
-0.09766800701618195,
-0.09146861732006073,
0.10178250074386597,
0.17055274546146393,
-0.12427149713039398,
-0.1827561855316162,
0.06446871906518936,
-0.16666454076766968,
-0.1973118633031845,
0.0018917324487119913,
0.09222044050693512,
0.038269978016614914,
-0.07875611633062363,
-0.020746968686580658,
0.06325206160545349,
-0.0007678253459744155,
0.09095914661884308,
0.03755716234445572,
0.09034032374620438,
-0.08716782182455063,
0.11115926504135132,
-0.05017651244997978,
0.004037132486701012,
0.1343354731798172,
0.027325427159667015,
-0.03223329409956932,
0.08694463223218918,
-0.0485352948307991,
0.05295134335756302,
-0.1662379503250122,
-0.15068690478801727,
0.03398871049284935,
0.06283251196146011,
0.03186952322721481,
0.1280253529548645,
0.08141885697841644,
-0.10732853412628174,
0.022690722718834877,
-0.004228927195072174,
0.058398615568876266,
0.03891623765230179,
0.006107209715992212,
0.008764320984482765,
0.0961301177740097,
-0.10607069730758667,
-0.13589619100093842,
-0.07336436957120895,
-0.014715781435370445,
0.14371353387832642,
-0.0302802175283432,
0.07690227776765823,
-0.004240254405885935,
0.00013200697139836848,
0.06930823624134064,
0.08137880265712738,
0.016412746161222458,
0.08971183747053146,
-0.05237193778157234,
-0.05160155147314072,
0.10863113403320312,
-0.13533565402030945,
0.17837053537368774,
0.14053137600421906,
-0.20532016456127167,
0.029453208670020103,
-0.06838275492191315,
0.03670361638069153,
-0.008162540383636951,
0.0975119024515152,
-0.08272241055965424,
-0.02106042578816414,
0.013134466484189034,
0.0052274600602686405,
-0.013007243163883686,
0.017682146281003952,
-0.07295988500118256,
-0.07787393033504486,
-0.10233919322490692,
0.08436838537454605,
0.11562882363796234,
-0.10282530635595322,
0.14214380085468292,
0.4384984076023102,
0.11495281755924225,
0.21582984924316406,
-0.09581480920314789,
-0.0412987545132637,
0.007486371789127588,
0.0001535322517156601,
-0.04476691037416458,
0.08031861484050751,
-0.15973517298698425,
-0.038901735097169876,
0.027348900213837624,
0.07128690183162689,
0.11475157737731934,
-0.14959022402763367,
-0.09639324247837067,
-0.00793045200407505,
0.0022841424215584993,
-0.1249532699584961,
0.023905446752905846,
-0.03974650055170059,
0.04015624523162842,
0.07232289016246796,
-0.021535737439990044,
0.13939237594604492,
-0.04166141897439957,
-0.0639561116695404,
0.07585346698760986,
-0.2017085999250412,
-0.23179671168327332,
-0.12309670448303223,
-0.14680525660514832,
0.04366797208786011,
0.05154111236333847,
0.01726446859538555,
-0.17635835707187653,
-0.015074856579303741,
0.07706750929355621,
0.07820965349674225,
-0.20886357128620148,
-0.022814949974417686,
-0.004290030337870121,
0.0895976573228836,
-0.10227091610431671,
-0.0017130117630586028,
-0.04419664293527603,
-0.10150232166051865,
0.0017003051470965147,
0.07279510796070099,
-0.137485533952713,
0.13807645440101624,
0.21589438617229462,
0.07225540280342102,
0.07359948754310608,
-0.019093448296189308,
0.09936179965734482,
-0.10856141895055771,
-0.16549113392829895,
0.08348225057125092,
-0.06234746053814888,
0.047262318432331085,
0.17534415423870087,
0.03307317942380905,
-0.13904969394207,
-0.015682822093367577,
-0.0402069091796875,
-0.15603256225585938,
-0.238995760679245,
-0.09178274869918823,
-0.1182505264878273,
0.16442428529262543,
0.0009358620154671371,
0.06651917099952698,
0.08258313685655594,
-0.022042419761419296,
0.16447891294956207,
-0.07379321753978729,
-0.07578866183757782,
-0.006978808436542749,
0.12375060468912125,
-0.056660156697034836,
-0.03080669604241848,
-0.10566964000463486,
-0.008295975625514984,
0.1151021271944046,
0.15304014086723328,
0.12214863300323486,
0.2957419455051422,
0.08268889784812927,
0.026645636186003685,
0.08958091586828232,
0.17622539401054382,
0.09495089203119278,
0.07838419824838638,
-0.045413073152303696,
-0.014814783819019794,
0.014317171648144722,
-0.04022889584302902,
0.010141594335436821,
0.14683100581169128,
-0.2679629921913147,
-0.006678564939647913,
-0.2710230350494385,
0.0965198427438736,
-0.10913380235433578,
0.11837165057659149,
-0.01015760749578476,
0.10194015502929688,
0.11082887649536133,
0.03233652561903,
-0.03858073800802231,
0.16613617539405823,
0.08450309932231903,
-0.11277695000171661,
0.001758623169735074,
0.03737903758883476,
0.09715615212917328,
-0.02818971499800682,
0.12721189856529236,
-0.11048974841833115,
-0.1464834064245224,
0.013753619976341724,
0.07152791321277618,
-0.15373679995536804,
0.3138748109340668,
0.012069208547472954,
-0.13481520116329193,
-0.01481647603213787,
-0.09957809001207352,
-0.006440147757530212,
0.1254177987575531,
0.09333524852991104,
0.07935678958892822,
-0.2185502052307129,
-0.13339371979236603,
0.05872276425361633,
-0.00575496768578887,
0.22408108413219452,
-0.034034017473459244,
-0.11356475204229355,
-0.027013886719942093,
0.04241163283586502,
-0.06043251231312752,
0.08524788916110992,
0.023536119610071182,
-0.08113526552915573,
-0.032957352697849274,
0.05323701351881027,
0.012368366122245789,
0.00524376705288887,
0.09360801428556442,
0.020107939839363098,
-0.0009265501867048442,
0.01785753294825554,
0.047885000705718994,
-0.0675911232829094,
-0.1984109878540039,
0.09357594698667526,
-0.05215044692158699,
0.0015536568826064467,
-0.08013670891523361,
-0.15122665464878082,
-0.08837161958217621,
-0.16009655594825745,
0.12540200352668762,
-0.034406669437885284,
0.12700119614601135,
-0.06619787961244583,
0.17341409623622894,
-0.07871770113706589,
0.04481020197272301,
-0.047349292784929276,
0.050332702696323395,
-0.007268077693879604,
-0.07756082713603973,
0.16585899889469147,
-0.15564003586769104,
0.01809087023139,
0.19572502374649048,
-0.018915493041276932,
0.07177707552909851,
0.021322092041373253,
-0.0636206790804863,
0.23147478699684143,
0.3014698624610901,
0.008138049393892288,
0.1665448248386383,
0.3018903136253357,
-0.07466315478086472,
-0.2642788887023926,
-0.05505012720823288,
-0.2841376066207886,
-0.05371501296758652,
0.10716094076633453,
-0.22523896396160126,
0.06986407935619354,
0.14383509755134583,
-0.06471995264291763,
0.30228954553604126,
-0.21825523674488068,
0.012589273042976856,
0.15434536337852478,
-0.08868814259767532,
0.5515313148498535,
-0.1133413165807724,
-0.17677772045135498,
-0.008122089318931103,
-0.08741296827793121,
0.10602109134197235,
-0.0340677872300148,
0.06877441704273224,
0.013465235009789467,
0.04797380417585373,
0.048932258039712906,
-0.03111894056200981,
0.22701001167297363,
0.008710170164704323,
0.09015397727489471,
-0.07378865778446198,
-0.18624304234981537,
0.11639340221881866,
-0.04359482601284981,
-0.08891059458255768,
0.0849778801202774,
-0.05942516401410103,
-0.11078983545303345,
0.04663389176130295,
-0.07950539886951447,
-0.024862350896000862,
0.08423490077257156,
-0.04678233340382576,
-0.042606171220541,
-0.008054176345467567,
-0.1618063747882843,
-0.0002289071271661669,
0.31360217928886414,
-0.07096036523580551,
0.16695955395698547,
0.03677211329340935,
0.00038613268407061696,
-0.11027684062719345,
0.030288029462099075,
-0.05203165486454964,
-0.021576624363660812,
0.09578979015350342,
-0.11096979677677155,
0.03204701095819473,
0.14160704612731934,
-0.04864364117383957,
0.05846960097551346,
0.09256096184253693,
-0.0849417969584465,
0.007583672646433115,
0.17753590643405914,
-0.17537221312522888,
-0.1273445188999176,
-0.006135711446404457,
-0.09862716495990753,
0.14055661857128143,
0.04394126310944557,
0.05191568285226822,
0.16669964790344238,
0.03967129811644554,
-0.029474308714270592,
-0.02817419543862343,
-0.1153380498290062,
-0.0201893113553524,
0.040153320878744125,
0.00045633706031367183,
-0.08791285753250122,
0.2262638509273529,
0.06409153342247009,
-0.1328488290309906,
-0.051157206296920776,
0.2161225974559784,
-0.06805316358804703,
-0.04911920800805092,
-0.223562553524971,
0.10752306133508682,
-0.07112517952919006,
-0.0965060144662857,
0.05453834682703018,
-0.02270081453025341,
0.005106312222778797,
0.181985542178154,
0.03941008821129799,
0.11070270836353302,
0.03738937899470329,
-0.02448922023177147,
0.15798696875572205,
-0.142850860953331,
-0.14191335439682007,
-0.025354057550430298,
-0.08757315576076508,
-0.13844476640224457,
-0.026804137974977493,
0.1617041826248169,
-0.09177309274673462,
-0.14772607386112213,
-0.2621181011199951,
0.10968475043773651,
-0.16432365775108337,
-0.10192688554525375,
-0.03469514101743698,
-0.08968492597341537,
0.0696166530251503,
0.030301768332719803,
-0.03093348816037178,
-0.06706760823726654,
-0.18593791127204895,
0.0816768929362297,
0.06349513679742813,
0.045533183962106705,
-0.017847947776317596,
0.0067379772663116455,
0.1720137596130371,
0.025955144315958023,
0.10040043294429779,
0.16762186586856842,
0.011397695168852806,
0.2246655523777008,
-0.1671202927827835,
-0.11496317386627197,
0.1336962729692459,
-0.026543032377958298,
0.06762003898620605,
0.16792191565036774,
-0.0772583931684494,
0.015526676550507545,
-0.028136352077126503,
0.07066910713911057,
-0.11003983020782471,
-0.105624258518219,
0.007937257178127766,
0.02567129209637642,
-0.2755882740020752,
-0.005599735304713249,
-0.19717298448085785,
0.14788752794265747,
0.02579621411859989,
0.03297143429517746,
0.10257530212402344,
0.10404334217309952,
0.08312062919139862,
-0.0017710148822516203,
0.03226327523589134,
-0.1176818460226059,
0.02753005363047123,
-0.059239376336336136,
-0.020663779228925705,
0.017624232918024063,
0.36952024698257446,
-0.03603357449173927,
-0.046802736818790436,
0.003710439894348383,
0.1307835876941681,
-0.02139742486178875,
0.017395347356796265,
0.13209912180900574,
0.12607666850090027,
-0.08595693111419678,
-0.1504845917224884,
0.04888554662466049,
-0.04565655067563057,
-0.02836887165904045,
0.1464131623506546,
0.05905961990356445,
0.1050296202301979,
0.0908031314611435,
-0.014463032595813274,
-0.00318976235575974,
0.012856799177825451,
-0.15486004948616028,
0.06223496049642563,
-0.010558074340224266,
0.012565906159579754,
0.017934376373887062,
0.15238402783870697,
-0.005540105979889631,
0.07739730179309845,
-0.09889880567789078,
0.004208535887300968,
-0.13498884439468384,
-0.07913459837436676,
0.03617347031831741,
-0.13393273949623108,
0.04141177982091904,
-0.01871878281235695,
0.029611799865961075,
0.30386561155319214,
0.02558239921927452,
-0.020639164373278618,
0.12512871623039246,
-0.1214587539434433,
-0.12050267308950424,
-0.001594188273884356,
-0.029960084706544876,
0.0791488066315651,
-0.02633434161543846,
-0.0997740775346756,
-0.1001306027173996,
-0.15166029334068298,
-0.09759195148944855,
0.05182836204767227,
-0.04993441700935364,
-0.059362251311540604,
-0.17634081840515137,
-0.05707859992980957,
-0.05147340148687363,
0.14025864005088806,
-0.12263951450586319,
0.15159130096435547,
-0.014490418136119843,
0.004084470681846142,
0.04405883327126503,
0.1950942426919937,
-0.03644494712352753,
0.08714226633310318,
0.0154351145029068,
0.1522706001996994,
-0.05119588226079941,
0.14720745384693146,
-0.10931728035211563,
-0.04014137014746666,
-0.06710435450077057,
0.21513493359088898,
0.25630924105644226,
-0.06136954948306084,
-0.008937356993556023,
-0.012760217301547527,
0.058654606342315674,
0.1073930487036705,
0.16049085557460785,
0.002326392102986574,
0.2802925705909729,
-0.03133585304021835,
0.04815128445625305,
0.02901598811149597,
0.013607407920062542,
-0.06336209923028946,
0.03397751972079277,
0.07539387792348862,
-0.035039983689785004,
-0.1412304788827896,
0.15837742388248444,
-0.21980468928813934,
0.18157227337360382,
0.11640069633722305,
-0.19996967911720276,
-0.013728445395827293,
-0.04882071167230606,
0.1689416468143463,
-0.0856364443898201,
0.1637246012687683,
-0.0903693437576294,
-0.2108195722103119,
-0.2056000679731369,
0.03867346793413162,
-0.34623071551322937,
-0.254462867975235,
0.10422009229660034,
0.1488201916217804,
0.04015883058309555,
-0.018507536500692368,
-0.019967829808592796,
-0.018367022275924683,
0.04877542704343796,
-0.0067357709631323814,
0.06014643982052803,
0.031397558748722076,
-0.02988368645310402,
-0.24127542972564697,
-0.029804671183228493,
0.023964406922459602,
-0.07093082368373871,
0.07464958727359772,
-0.06874357163906097,
-0.022495782002806664,
0.08059766888618469,
-0.03066304884850979,
0.03298592567443848,
-0.035373736172914505,
-0.16326889395713806,
0.027529051527380943,
0.03900543600320816,
0.036012712866067886,
0.00634160777553916,
0.0008072225609794259,
-0.03455270454287529,
0.0644603744149208,
-0.16716794669628143,
-0.16015739738941193,
0.14140215516090393,
-0.06745140254497528,
0.2779497504234314,
-0.05812826007604599,
-0.0809100940823555,
0.04766704887151718,
-0.03426874056458473,
0.1807648241519928,
-0.07756473124027252,
0.047254521399736404,
0.12766779959201813,
0.011127962730824947,
0.03121316432952881,
-0.3092964291572571,
0.11082969605922699,
-0.000795336440205574,
-0.006093299947679043,
-0.07581598311662674
] |
null | null |
transformers
|
```python
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
processor = Wav2Vec2Processor.from_pretrained("adresgezgini/Wav2Vec-tr-AG-v1")
model = Wav2Vec2ForCTC.from_pretrained("adresgezgini/Wav2Vec-tr-AG-v1")
```
Dosyalar bölümünde paylaşılan ses1.mp3[1], ses1.mp3[2] ve ses1.mp3[3] ses dosyaları açık kaynaklı canlı kitap ses kayıtları üzerinden 1 - 1.5 dakika arasında belli bir kısmın alınması ile oluşturulmuştur. Oluşturulan sesler ile model test edilmiş ve WER değerleri kaydedilmiştir.
<div align="center">
|Sesler|WER|
| :---: | :---: |
|SES1.mp3|0,17|
|SES2.mp3|0,31|
|SES3.mp3|0,20|
</div>
[1][Sabahattin Ali - Çaydanlık | YT: Sesli Kitap Dünyası](https://www.youtube.com/watch?v=IHUfOpqw-8s)\
[2][Sabahattin Ali - Ses | YT: Sesli Kitap Dünyası](https://www.youtube.com/watch?v=XzX2wBjncOg)\
[3][Sabahattin Ali - Sıçra Köşk | YT: Sesli Kitap Dünyası](https://www.youtube.com/watch?v=SJwUaq0Nu9c)\
|
{}
|
automatic-speech-recognition
|
adresgezgini/Wav2Vec2-tr-AG-v1
|
[
"transformers",
"pytorch",
"wav2vec2",
"automatic-speech-recognition",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #region-us
|
Dosyalar bölümünde paylaşılan ses1.mp3[1], ses1.mp3[2] ve ses1.mp3[3] ses dosyaları açık kaynaklı canlı kitap ses kayıtları üzerinden 1 - 1.5 dakika arasında belli bir kısmın alınması ile oluşturulmuştur. Oluşturulan sesler ile model test edilmiş ve WER değerleri kaydedilmiştir.
[1]Sabahattin Ali - Çaydanlık | YT: Sesli Kitap Dünyası
[2]Sabahattin Ali - Ses | YT: Sesli Kitap Dünyası
[3]Sabahattin Ali - Sıçra Köşk | YT: Sesli Kitap Dünyası\
|
[] |
[
"TAGS\n#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #region-us \n"
] |
[
37
] |
[
"passage: TAGS\n#transformers #pytorch #wav2vec2 #automatic-speech-recognition #endpoints_compatible #region-us \n"
] |
[
-0.0721188485622406,
-0.028641358017921448,
-0.008131618611514568,
-0.06536999344825745,
0.10369497537612915,
-0.029162423685193062,
0.05449262633919716,
0.09215618669986725,
0.09012462943792343,
0.0024160705506801605,
0.09899672865867615,
0.19874386489391327,
0.0032586746383458376,
-0.005444767884910107,
-0.07112546265125275,
-0.21837934851646423,
0.09600707143545151,
0.055372726172208786,
0.09617701917886734,
0.10875947028398514,
0.0784798189997673,
-0.07249201834201813,
0.032563697546720505,
0.0496663972735405,
-0.13367274403572083,
0.037079449743032455,
0.058643877506256104,
-0.1467711329460144,
0.11741682142019272,
0.03624606877565384,
0.09462464600801468,
0.015703529119491577,
-0.025202931836247444,
-0.2087007611989975,
0.0028133990708738565,
-0.010769401676952839,
-0.015249740332365036,
-0.01163998618721962,
0.05165225267410278,
-0.06630266457796097,
0.009819267317652702,
0.04223077744245529,
0.007102530915290117,
0.07885675877332687,
-0.07011023163795471,
-0.16595390439033508,
0.027373118326067924,
0.04979840666055679,
0.06278723478317261,
0.09646070003509521,
-0.015308019705116749,
0.15824733674526215,
-0.10311099886894226,
0.11664288491010666,
0.11729957163333893,
-0.34086450934410095,
0.025533635169267654,
-0.018387693911790848,
0.06145255267620087,
-0.005980021320283413,
-0.023320090025663376,
0.08407841622829437,
-0.009680325165390968,
0.02594558708369732,
-0.06523970514535904,
-0.053816139698028564,
-0.17229726910591125,
0.02152976207435131,
-0.10039443522691727,
-0.05630653724074364,
0.14903028309345245,
-0.02459312416613102,
0.050798237323760986,
-0.070815309882164,
-0.06357286870479584,
-0.017195679247379303,
-0.024608483538031578,
-0.05171274393796921,
-0.05008924379944801,
0.05738884210586548,
-0.0357147753238678,
-0.03484039008617401,
-0.11432810872793198,
-0.056904442608356476,
-0.20911253988742828,
0.30520865321159363,
0.01152665913105011,
0.09113361686468124,
-0.19182562828063965,
0.018077926710247993,
-0.016760041937232018,
-0.04602903127670288,
-0.009070603176951408,
-0.03480092063546181,
-0.005194336175918579,
0.03196761757135391,
-0.09672658145427704,
-0.005024821497499943,
0.09619411826133728,
0.05404296889901161,
0.048924271017313004,
0.05579700693488121,
-0.05309535190463066,
0.08241341263055801,
-0.028520308434963226,
0.12802933156490326,
-0.022204747423529625,
0.005049536935985088,
0.019602568820118904,
-0.15963000059127808,
0.021364036947488785,
-0.046107299625873566,
-0.1059512197971344,
-0.09832640737295151,
0.05150148272514343,
0.10308951884508133,
0.007624712772667408,
0.02769755758345127,
-0.02915896475315094,
-0.005024684127420187,
-0.0010898011969402432,
-0.08071205765008926,
-0.0027757075149565935,
0.06134745106101036,
0.054733775556087494,
0.2042790800333023,
0.01401777658611536,
0.027865497395396233,
-0.13477809727191925,
0.03430946171283722,
0.029110778123140335,
0.03076641820371151,
0.06137247383594513,
-0.027149634435772896,
0.016688477247953415,
-0.10584334284067154,
0.02761022001504898,
-0.2108437865972519,
-0.038738854229450226,
0.019536681473255157,
-0.04570518061518669,
0.011210971511900425,
-0.005862658843398094,
-0.07294338941574097,
-0.02412441000342369,
0.026644060388207436,
-0.0848965272307396,
-0.02483426034450531,
-0.04335634037852287,
0.09084957093000412,
0.035603780299425125,
0.1144782155752182,
-0.13740399479866028,
0.06781121343374252,
-0.041382916271686554,
-0.024146918207406998,
0.006467015482485294,
0.0786295235157013,
-0.017164042219519615,
0.09126786887645721,
-0.07609090209007263,
-0.0363796167075634,
-0.11339505016803741,
0.061230018734931946,
-0.03372715786099434,
0.1294335424900055,
-0.10931240767240524,
-0.12374426424503326,
0.2250494360923767,
-0.0921396017074585,
-0.09642865508794785,
0.09675160050392151,
0.047799888998270035,
-0.023683663457632065,
0.09046079218387604,
0.26092445850372314,
-0.006438442971557379,
-0.1344638615846634,
0.06955351680517197,
0.11876583099365234,
-0.1670559197664261,
-0.10538151115179062,
0.01964217610657215,
-0.07074971497058868,
-0.09296547621488571,
0.021963927894830704,
0.01981767639517784,
0.060468241572380066,
-0.05301972106099129,
-0.0665108859539032,
-0.03185911104083061,
-0.06936094164848328,
0.04747062921524048,
-0.03376079350709915,
0.07992979139089584,
-0.04860096052289009,
0.00022167984570842236,
-0.040190186351537704,
0.026694048196077347,
-0.03910767287015915,
0.08643262833356857,
-0.152516707777977,
0.10339003056287766,
-0.03455405682325363,
0.02255146950483322,
-0.18151403963565826,
0.1193070039153099,
-0.021366599947214127,
0.08709456026554108,
0.044252097606658936,
0.053591787815093994,
0.10817640274763107,
-0.07318807393312454,
0.037599824368953705,
-0.03522860258817673,
0.1588101089000702,
0.05225297436118126,
-0.018174700438976288,
-0.05587159842252731,
0.034731682389974594,
-0.06348273903131485,
-0.047311894595623016,
0.009232322685420513,
-0.02524193562567234,
0.10357552021741867,
0.11902469396591187,
-0.002044686349108815,
0.023700617253780365,
-0.015857627615332603,
0.033494967967271805,
-0.003626496996730566,
0.031247051432728767,
0.08235985040664673,
-0.02272169478237629,
-0.0769941657781601,
0.25266093015670776,
-0.14026528596878052,
0.23503975570201874,
0.24047064781188965,
-0.29727834463119507,
0.048978712409734726,
0.07219430804252625,
0.01735152304172516,
0.0021673247683793306,
0.0925217941403389,
-0.05285301432013512,
0.20381318032741547,
-0.01743427850306034,
0.1389402598142624,
-0.03674538433551788,
-0.0029116040095686913,
0.03353562951087952,
-0.03337210416793823,
-0.05041591078042984,
0.04755973070859909,
0.0015957624418660998,
-0.08388978242874146,
0.0828937366604805,
0.15193380415439606,
-0.0368693470954895,
0.09280958771705627,
-0.005733860656619072,
-0.016735907644033432,
0.06162073463201523,
0.012068409472703934,
-0.032136328518390656,
-0.02572881057858467,
-0.3251122832298279,
-0.08921423554420471,
0.06767655909061432,
-0.004273096099495888,
0.1147976741194725,
-0.1401272714138031,
0.006002949085086584,
0.006024875678122044,
-0.059807486832141876,
-0.07236415892839432,
0.07770498842000961,
0.020530637353658676,
0.07419945299625397,
-0.02522803470492363,
-0.11176551133394241,
0.07386600971221924,
-0.03610837087035179,
-0.10568254441022873,
0.07786476612091064,
-0.11866384744644165,
-0.2900512218475342,
-0.14687477052211761,
-0.14070986211299896,
0.02008247748017311,
0.06637454032897949,
0.119004026055336,
-0.11709826439619064,
-0.004356969613581896,
0.037115056067705154,
0.03698491305112839,
-0.08836003392934799,
0.06865092366933823,
0.029483821243047714,
0.030205586925148964,
-0.02406148426234722,
-0.09716613590717316,
-0.03349224105477333,
-0.07106846570968628,
-0.01235450804233551,
0.08198442310094833,
-0.06806303560733795,
0.07756998389959335,
0.1869477778673172,
0.04971100017428398,
0.08263817429542542,
-0.0121439378708601,
0.12353795021772385,
-0.05943383276462555,
-0.10815918445587158,
0.17252440750598907,
-0.07307032495737076,
0.02297825925052166,
0.14187809824943542,
0.000404434947995469,
-0.07981004565954208,
-0.044941119849681854,
-0.09086789190769196,
-0.09349201619625092,
-0.19090873003005981,
-0.12945573031902313,
-0.08469346910715103,
-0.027750767767429352,
0.003936579450964928,
0.03929224982857704,
0.10053049772977829,
-0.015164372511208057,
0.0318857803940773,
-0.06527750939130783,
0.037118468433618546,
0.04925777390599251,
0.22011712193489075,
-0.035632967948913574,
0.11447834223508835,
-0.06257307529449463,
-0.11675461381673813,
0.042986106127500534,
0.060705941170454025,
0.10117717832326889,
0.15401828289031982,
0.027867771685123444,
0.005864634178578854,
0.11680784821510315,
0.1787092089653015,
0.12349317967891693,
0.057697635143995285,
-0.014458432793617249,
0.04202825948596001,
-0.031030582264065742,
-0.07545241713523865,
0.06351669877767563,
0.24637570977210999,
-0.10907962918281555,
-0.04299181327223778,
-0.17554126679897308,
0.06269893795251846,
0.17280790209770203,
0.06886234879493713,
-0.19033943116664886,
0.009430297650396824,
0.06161557883024216,
-0.08718495815992355,
-0.039426445960998535,
0.12834890186786652,
0.03255130723118782,
-0.08927353471517563,
0.08593298494815826,
0.023056067526340485,
0.06641831994056702,
-0.06849268823862076,
0.08725766092538834,
-0.10478068888187408,
-0.15477952361106873,
0.06414807587862015,
0.04578937590122223,
-0.24747368693351746,
0.2281670868396759,
-0.015506764873862267,
0.026676084846258163,
-0.07251138240098953,
-0.014630771242082119,
0.005118122790008783,
0.07661780714988708,
0.14594794809818268,
-0.005033341236412525,
-0.04946841299533844,
-0.12404008209705353,
-0.007966546341776848,
0.0572483129799366,
0.17199034988880157,
0.05051308125257492,
-0.031220808625221252,
-0.007178888190537691,
-0.06425842642784119,
-0.0023619099520146847,
-0.057564083486795425,
-0.04774671047925949,
-0.10623916983604431,
0.013821293599903584,
0.2107272744178772,
0.11030402034521103,
0.008771294727921486,
-0.014119311235845089,
-0.14217756688594818,
0.13927903771400452,
-0.20314770936965942,
-0.012101659551262856,
-0.06178198382258415,
-0.17580774426460266,
0.09098359942436218,
-0.047833316028118134,
0.06665368378162384,
-0.0246900487691164,
0.007919765077531338,
-0.05927768722176552,
-0.15633289515972137,
0.1300991028547287,
-0.11683275550603867,
-0.019420908764004707,
-0.02266664244234562,
0.2542378008365631,
-0.02868989109992981,
-0.00022344836906995624,
0.06588499993085861,
0.012667160481214523,
-0.07948637753725052,
-0.045111484825611115,
0.08735814690589905,
0.14315460622310638,
-0.08427585661411285,
0.053661324083805084,
0.03975791856646538,
-0.17768527567386627,
-0.07372710108757019,
0.034207575023174286,
0.2839866280555725,
0.07747307419776917,
-0.057250987738370895,
0.18260008096694946,
0.20878978073596954,
-0.010367152281105518,
-0.28379762172698975,
-0.15502293407917023,
-0.07944010198116302,
0.0009141949703916907,
-0.11721421778202057,
-0.09218011796474457,
0.09241478890180588,
-0.06837643682956696,
-0.05737648904323578,
0.07152894884347916,
-0.19546766579151154,
-0.0981190875172615,
0.219879612326622,
-0.034856200218200684,
0.4114883840084076,
-0.06148048862814903,
-0.1459461748600006,
-0.0555725172162056,
-0.18084096908569336,
0.0894971713423729,
-0.026558540761470795,
0.0820658728480339,
0.01966485008597374,
0.09183495491743088,
0.05033127963542938,
-0.04245033487677574,
0.10872094333171844,
0.0760914608836174,
-0.057044703513383865,
-0.05393856763839722,
-0.05787365511059761,
-0.03484024107456207,
0.016244076192378998,
0.03970678150653839,
0.029991568997502327,
0.023477301001548767,
-0.08885850012302399,
-0.06251884996891022,
-0.12129434943199158,
0.0884556919336319,
0.0841657966375351,
-0.0031620634254068136,
0.051357369869947433,
-0.14773449301719666,
-0.021893896162509918,
0.04363230988383293,
0.14964716136455536,
-0.09176138788461685,
0.07405997067689896,
0.17426884174346924,
0.14886562526226044,
-0.14664708077907562,
0.003058107104152441,
-0.05776814743876457,
-0.1011233702301979,
0.12968605756759644,
0.024770202115178108,
0.06956075131893158,
0.08249596506357193,
0.019914090633392334,
0.00811604131013155,
0.09039079397916794,
-0.020434709265828133,
0.0070288218557834625,
0.09483665227890015,
-0.15127912163734436,
-0.07125047594308853,
-0.015156523324549198,
0.017886854708194733,
0.1545657217502594,
0.14007161557674408,
0.16529449820518494,
0.02486141212284565,
-0.012096241116523743,
-0.04985383525490761,
-0.005592867266386747,
-0.14501993358135223,
0.09596604108810425,
0.056048765778541565,
0.03177822381258011,
-0.1689673811197281,
0.053186409175395966,
-0.04085535928606987,
-0.19084806740283966,
0.013390779495239258,
0.017495164647698402,
-0.11244485527276993,
-0.11561312526464462,
-0.10894180089235306,
0.02844228409230709,
-0.06521294265985489,
-0.13255275785923004,
0.043576110154390335,
-0.16649563610553741,
0.08408192545175552,
0.2183782309293747,
0.05991727486252785,
0.10767434537410736,
-0.08789479732513428,
-0.03133482486009598,
0.005640141665935516,
-0.058524247258901596,
-0.03673648089170456,
-0.011027595959603786,
-0.10944608598947525,
0.0506475605070591,
0.013274303637444973,
0.13179078698158264,
-0.09587356448173523,
-0.10142890363931656,
-0.11364098638296127,
0.09812068939208984,
-0.14064590632915497,
-0.030631106346845627,
-0.1098189726471901,
-0.03166437894105911,
0.06403834372758865,
-0.0851232185959816,
-0.03693908452987671,
0.017473148182034492,
-0.0980556383728981,
0.05528227239847183,
-0.004201426636427641,
0.001913837855681777,
-0.10883459448814392,
0.002962311264127493,
0.062244582921266556,
-0.040038544684648514,
0.12362077832221985,
0.249018132686615,
-0.14891332387924194,
0.1465185135602951,
-0.18008393049240112,
-0.19724424183368683,
0.1500670164823532,
0.01500400435179472,
0.009510787203907967,
0.02629696950316429,
0.012114698998630047,
0.12700627744197845,
0.040209777653217316,
0.028408609330654144,
0.17248576879501343,
-0.06685706973075867,
0.03851442039012909,
-0.0759325698018074,
-0.10440655797719955,
-0.034970253705978394,
-0.07160848379135132,
0.1400478631258011,
0.06202966347336769,
0.09748422354459763,
-0.03615143150091171,
0.06252597272396088,
0.02196015976369381,
0.035767797380685806,
-0.06036775931715965,
-0.10780566930770874,
-0.04740653187036514,
-0.03826800733804703,
0.042743176221847534,
-0.031614698469638824,
0.2334204912185669,
-0.10858950763940811,
0.038850028067827225,
0.011987561360001564,
-0.008462372235953808,
-0.09795255213975906,
0.03197858855128288,
0.2952418327331543,
0.1218862533569336,
-0.03296373039484024,
-0.066999152302742,
0.013009646907448769,
0.01815200038254261,
0.030443403869867325,
0.001572250621393323,
0.14926548302173615,
0.023670630529522896,
0.17915105819702148,
0.10188481956720352,
0.02828742191195488,
-0.12838032841682434,
-0.1490498036146164,
-0.09192974120378494,
0.03111591562628746,
-0.03810209035873413,
0.11369074881076813,
0.14607982337474823,
0.016350792720913887,
0.01697431318461895,
-0.004843822680413723,
-0.022071609273552895,
-0.1752617508172989,
-0.10440753400325775,
-0.08142578601837158,
-0.11838482320308685,
0.032670944929122925,
-0.014440135098993778,
0.03735579550266266,
0.05140050873160362,
0.05859667435288429,
-0.02119131200015545,
0.09585927426815033,
0.009440023452043533,
-0.06592240929603577,
0.10088648647069931,
-0.04326128214597702,
0.013741651549935341,
0.016331855207681656,
-0.03154413402080536,
-0.010452311486005783,
-0.019497450441122055,
-0.005802567582577467,
0.046686772257089615,
-0.13079345226287842,
0.010648160241544247,
-0.12401501834392548,
-0.09085725992918015,
-0.041131582111120224,
0.029086057096719742,
-0.03246871381998062,
0.11770851910114288,
0.07890281081199646,
-0.06317076832056046,
0.028930896893143654,
0.13956138491630554,
-0.11462242156267166,
-0.15169039368629456,
-0.012151491828262806,
0.19187034666538239,
0.049830734729766846,
0.14869624376296997,
-0.039655085653066635,
-0.011490479111671448,
-0.1268738955259323,
0.3134293258190155,
0.21894627809524536,
-0.029566623270511627,
0.06968390941619873,
0.012242328375577927,
0.04945865646004677,
0.05147209390997887,
0.037134476006031036,
0.14212264120578766,
0.3142167627811432,
-0.005606405436992645,
-0.0678129643201828,
-0.029410207644104958,
-0.040482621639966965,
-0.08318427950143814,
0.060821712017059326,
-0.06858836114406586,
-0.13105294108390808,
-0.042223330587148666,
0.09486715495586395,
-0.21290788054466248,
0.08625967800617218,
-0.014330721460282803,
-0.16493283212184906,
-0.025777185335755348,
0.025004224851727486,
0.12373810261487961,
0.08473566174507141,
0.059048641473054886,
-0.032053492963314056,
-0.10001970827579498,
0.0649167001247406,
0.03845933824777603,
-0.2532598376274109,
0.04978358373045921,
0.01770191080868244,
-0.07009257376194,
-0.05270304158329964,
-0.0012992133852094412,
0.13695812225341797,
0.01977551355957985,
0.1564396768808365,
0.00699559086933732,
0.1385391801595688,
-0.011141940020024776,
-0.10966970771551132,
0.0020350429695099592,
0.13195984065532684,
-0.00764200184494257,
-0.012492666952311993,
0.04399271681904793,
-0.17780189216136932,
0.0534556619822979,
-0.018362876027822495,
-0.0200018472969532,
-0.0611613392829895,
-0.025413908064365387,
-0.040704384446144104,
0.02814488857984543,
-0.029701540246605873,
-0.01489523146301508,
-0.018930602818727493,
0.009367123246192932,
-0.0017991254571825266,
0.015221191570162773,
-0.11187604814767838,
-0.1212301030755043,
-0.15346519649028778,
-0.07677175104618073,
-0.032612718641757965,
0.027920229360461235,
-0.09698854386806488,
-0.016907071694731712,
-0.0763961598277092,
0.026637034490704536,
-0.07471530884504318,
0.02430429309606552,
0.08550451695919037,
0.001998303458094597,
0.014178152196109295,
-0.058580655604600906,
0.12117161601781845,
0.1384691596031189,
-0.1377907544374466,
-0.12039846181869507
] |
null | null |
transformers
|
AdresGezgini Inc. R&D Center Turkish GPT-2 Model Trained with Turkish Wiki Corpus for 10 Epochs
|
{}
|
text-generation
|
adresgezgini/turkish-gpt-2
|
[
"transformers",
"pytorch",
"tf",
"jax",
"gpt2",
"text-generation",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tf #jax #gpt2 #text-generation #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
AdresGezgini Inc. R&D Center Turkish GPT-2 Model Trained with Turkish Wiki Corpus for 10 Epochs
|
[] |
[
"TAGS\n#transformers #pytorch #tf #jax #gpt2 #text-generation #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n"
] |
[
53
] |
[
"passage: TAGS\n#transformers #pytorch #tf #jax #gpt2 #text-generation #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n"
] |
[
-0.018587395548820496,
0.009235069155693054,
-0.006672721356153488,
0.0247216634452343,
0.16404971480369568,
0.03665168955922127,
0.08494317531585693,
0.14217494428157806,
-0.006030709482729435,
-0.03466939181089401,
0.14348401129245758,
0.20880107581615448,
0.0007542037055827677,
0.0887284055352211,
-0.06535610556602478,
-0.27254655957221985,
0.03991684317588806,
0.05865507200360298,
-0.034872736781835556,
0.11828067898750305,
0.0853922888636589,
-0.04309149459004402,
0.09352563321590424,
-0.03240712359547615,
-0.18590562045574188,
0.034873660653829575,
0.06583712249994278,
-0.12424807250499725,
0.114150770008564,
0.07465716451406479,
0.08069240301847458,
0.03026532754302025,
-0.06766403466463089,
-0.11916866898536682,
0.030453229323029518,
0.033777687698602676,
-0.07872138917446136,
0.06237945705652237,
0.1006869301199913,
-0.08272578567266464,
0.1115427017211914,
0.0910625159740448,
-0.023898381739854813,
0.05965781584382057,
-0.1664886176586151,
-0.08628477156162262,
-0.02334817312657833,
0.023733919486403465,
0.0445893369615078,
0.09181798249483109,
-0.007505293469876051,
0.1322903037071228,
-0.08271350711584091,
0.12117785215377808,
0.14540669322013855,
-0.3139655888080597,
-0.0068994262255728245,
0.08508043736219406,
0.041594866663217545,
0.05683811753988266,
-0.02843133732676506,
0.05269335210323334,
0.035818006843328476,
0.026415947824716568,
0.03958810493350029,
-0.08417028188705444,
-0.1555609554052353,
0.049069732427597046,
-0.0943327397108078,
-0.05944015085697174,
0.2509447932243347,
-0.05545961484313011,
0.0501854307949543,
-0.0005421533714979887,
-0.10515658557415009,
-0.03906271606683731,
-0.015422471798956394,
-0.0061912015080451965,
-0.06485860794782639,
0.08013545721769333,
0.023420564830303192,
-0.07933645695447922,
-0.1266346275806427,
-0.03046495094895363,
-0.18177470564842224,
0.1645948737859726,
0.012553106062114239,
0.05671742931008339,
-0.20616136491298676,
0.10628213733434677,
-0.0033769577275961637,
-0.09879681468009949,
0.03911066800355911,
-0.094792939722538,
0.016042305156588554,
-0.01592225581407547,
-0.04252680018544197,
-0.10232851654291153,
0.07801830768585205,
0.12653349339962006,
-0.0006039236905053258,
0.026596594601869583,
-0.052716922014951706,
0.08165788650512695,
0.02173433266580105,
0.08599194139242172,
-0.025199446827173233,
-0.011269161477684975,
0.057656966149806976,
-0.1337440311908722,
-0.036188703030347824,
-0.07418659329414368,
-0.15297913551330566,
-0.038561031222343445,
0.06846030056476593,
0.08763264119625092,
0.014824950136244297,
0.10366585105657578,
-0.03770924359560013,
-0.038078200072050095,
0.051171258091926575,
-0.07160980999469757,
-0.013386998325586319,
-0.009216462261974812,
0.03198438137769699,
0.12458956986665726,
0.0007775133126415312,
0.010663283057510853,
-0.1260964572429657,
0.04695966839790344,
-0.08459614217281342,
-0.016047881916165352,
-0.029812749475240707,
-0.06690388917922974,
0.024694550782442093,
-0.08190447092056274,
0.02387191541492939,
-0.17235273122787476,
-0.1516118198633194,
0.027539696544408798,
0.012874475680291653,
-0.031920719891786575,
-0.04976843670010567,
-0.015910597518086433,
-0.04378073662519455,
0.04868490248918533,
-0.05230031535029411,
0.02232290618121624,
-0.05489083752036095,
0.10791867226362228,
-0.040533438324928284,
0.06622444093227386,
-0.11723652482032776,
0.07539934664964676,
-0.10878101736307144,
-0.018328681588172913,
-0.10386843979358673,
0.06269732862710953,
-0.008092857897281647,
0.13250373303890228,
-0.02699309028685093,
-0.02736649475991726,
-0.07961352914571762,
0.046208884567022324,
-0.03566046431660652,
0.1871132105588913,
-0.09566682577133179,
-0.11390183120965958,
0.25043168663978577,
-0.0762811228632927,
-0.15856441855430603,
0.10610592365264893,
0.014365962706506252,
0.05737365782260895,
0.08104044944047928,
0.17987224459648132,
0.05971093848347664,
-0.010156112723052502,
0.11351119726896286,
0.11731047183275223,
-0.11308007687330246,
-0.06806618720293045,
0.014471244998276234,
-0.020609837025403976,
-0.16507652401924133,
0.04534066468477249,
0.06815342605113983,
0.10172746330499649,
-0.05105290561914444,
-0.024166537448763847,
-0.03907974436879158,
0.0033343106042593718,
0.05303787812590599,
0.009095934219658375,
0.12604396045207977,
-0.05721841752529144,
-0.02539670281112194,
-0.033697471022605896,
-0.01163670513778925,
-0.019510112702846527,
0.02901282161474228,
-0.02919180691242218,
0.1230103000998497,
-0.028152547776699066,
0.06392694264650345,
-0.17871913313865662,
-0.10134267807006836,
0.008669956587255001,
0.13866862654685974,
-0.003453391371294856,
0.09542684257030487,
0.05842619761824608,
-0.030235612764954567,
-0.012885703705251217,
-0.005292550660669804,
0.14813466370105743,
-0.011163382790982723,
-0.05179668590426445,
-0.07747562974691391,
0.06794100254774094,
-0.06438424438238144,
-0.01230118703097105,
-0.061673834919929504,
0.011275854893028736,
0.06425680965185165,
0.10907597839832306,
0.02099626697599888,
0.04141668230295181,
-0.016879888251423836,
0.0044844504445791245,
-0.07885278761386871,
-0.009877598844468594,
0.08473861217498779,
-0.0015311073511838913,
-0.06001431494951248,
0.21409516036510468,
-0.152195543050766,
0.26010453701019287,
0.18940086662769318,
-0.266620934009552,
-0.016714755445718765,
-0.04282618314027786,
-0.028825299814343452,
0.013057323172688484,
0.06183205172419548,
-0.04960909113287926,
0.09395723044872284,
-0.01885410211980343,
0.18490993976593018,
-0.06393375247716904,
-0.06582887470722198,
0.011701912619173527,
-0.03894782066345215,
-0.006297817453742027,
0.07320383191108704,
0.11227662861347198,
-0.18005257844924927,
0.18851788341999054,
0.20283403992652893,
0.04603021591901779,
0.1932646632194519,
-0.01575944945216179,
-0.03242363780736923,
0.07158209383487701,
0.0006501481402665377,
-0.024882489815354347,
-0.06756759434938431,
-0.18745765089988708,
-0.019649671390652657,
0.07740837335586548,
0.0440865159034729,
0.09072300791740417,
-0.11438301205635071,
-0.049030862748622894,
-0.009960057213902473,
-0.013624361716210842,
0.01035553589463234,
0.10853960365056992,
0.046615250408649445,
0.1295192986726761,
-0.0173726137727499,
-0.023036381229758263,
0.1150643453001976,
0.020723773166537285,
-0.11082139611244202,
0.19675105810165405,
-0.1380341500043869,
-0.3465414345264435,
-0.13821224868297577,
-0.14585687220096588,
-0.027647119015455246,
0.03671838343143463,
0.10094334930181503,
-0.10175886750221252,
-0.026613259688019753,
0.00470177223905921,
0.08744572103023529,
-0.10039712488651276,
0.03365456312894821,
-0.08533965796232224,
0.03968095779418945,
-0.06820804625749588,
-0.07504430413246155,
-0.05644352361559868,
-0.009097280912101269,
-0.05988794565200806,
0.15118548274040222,
-0.11908172816038132,
0.0613362118601799,
0.18866990506649017,
0.02161281928420067,
0.05995042249560356,
-0.0424862876534462,
0.21475256979465485,
-0.10096167773008347,
0.018527474254369736,
0.18905241787433624,
-0.04323020577430725,
0.07084055244922638,
0.09976541250944138,
0.003063039854168892,
-0.08463075757026672,
0.02761751227080822,
-0.026483409106731415,
-0.093450628221035,
-0.23616331815719604,
-0.09170274436473846,
-0.13959497213363647,
0.07723906636238098,
0.05302364006638527,
0.06864593923091888,
0.17727380990982056,
0.07090119272470474,
-0.011854846030473709,
0.04935076832771301,
0.006131359376013279,
0.07400146126747131,
0.17584794759750366,
-0.011504880152642727,
0.11576478183269501,
-0.05219535157084465,
-0.12496571987867355,
0.1059778481721878,
0.05232307314872742,
0.12326698750257492,
0.05523334816098213,
0.057330843061208725,
0.008453072048723698,
0.09108683466911316,
0.12801805138587952,
0.13301251828670502,
0.0007621980621479452,
-0.02758980542421341,
-0.039580587297677994,
-0.02588338404893875,
-0.024654187262058258,
0.036357369273900986,
0.01787649281322956,
-0.15044339001178741,
-0.06188368424773216,
-0.11520031094551086,
0.07948671281337738,
0.1016545221209526,
0.061685871332883835,
-0.21284234523773193,
0.013283351436257362,
0.07026632130146027,
-0.033864691853523254,
-0.11983351409435272,
0.08205912262201309,
0.00631908793002367,
-0.13980084657669067,
0.05003947392106056,
-0.056556131690740585,
0.118854820728302,
-0.02943241409957409,
0.08068045973777771,
-0.013429328799247742,
-0.05354531481862068,
0.011414701119065285,
0.1104767918586731,
-0.32001787424087524,
0.2022586613893509,
0.003583787241950631,
-0.06378652900457382,
-0.10315663367509842,
0.009499620646238327,
0.01620020717382431,
0.12513966858386993,
0.11252368241548538,
0.007888834923505783,
-0.034427981823682785,
-0.09218779951334,
-0.004574996884912252,
0.026058072224259377,
0.12137775868177414,
-0.059205565601587296,
-0.011459864675998688,
-0.04839080199599266,
-0.008505962789058685,
-0.012690424919128418,
-0.016667617484927177,
-0.00295274774543941,
-0.16343066096305847,
0.09522031247615814,
0.005769776646047831,
0.07286826521158218,
0.008783615194261074,
-0.019443148747086525,
-0.08902609348297119,
0.2153472900390625,
-0.049982789903879166,
-0.09510941058397293,
-0.13889065384864807,
-0.047783270478248596,
0.06673209369182587,
-0.07436059415340424,
0.05038416013121605,
-0.0764365866780281,
0.010335484519600868,
-0.04258740693330765,
-0.2352316677570343,
0.13681404292583466,
-0.10346612334251404,
-0.04248093068599701,
-0.04026254266500473,
0.17514310777187347,
-0.08514899760484695,
0.008373820222914219,
0.01473949383944273,
0.006392231676727533,
-0.08654724806547165,
-0.09804212301969528,
0.025593716651201248,
-0.04274057224392891,
0.027105960994958878,
0.017844712361693382,
-0.0744776576757431,
0.018734866753220558,
-0.0243386197835207,
-0.010741151869297028,
0.3050714135169983,
0.16010499000549316,
-0.050997357815504074,
0.16546009480953217,
0.10312675684690475,
-0.07395486533641815,
-0.302263468503952,
-0.07846750319004059,
-0.09874900430440903,
-0.04178540036082268,
-0.0379960760474205,
-0.19965502619743347,
0.06873581558465958,
0.014900034293532372,
-0.0005112813669256866,
0.1617172658443451,
-0.2373679131269455,
-0.07674095034599304,
0.14306777715682983,
0.00681115360930562,
0.34116971492767334,
-0.13999654352664948,
-0.09707428514957428,
-0.022028109058737755,
-0.14441712200641632,
0.17212918400764465,
-0.04277023673057556,
0.09582416713237762,
-0.019888432696461678,
0.07581345736980438,
0.0485907681286335,
-0.03816443681716919,
0.06802710145711899,
0.010975965298712254,
0.01251327432692051,
-0.10286522656679153,
-0.029956068843603134,
0.057925477623939514,
0.014031484723091125,
0.03254792094230652,
-0.040198348462581635,
0.035232529044151306,
-0.12183618545532227,
-0.03616083413362503,
-0.09236260503530502,
0.05534573644399643,
0.03490268439054489,
-0.0806862860918045,
0.009108372963964939,
-0.044165417551994324,
-0.006434726994484663,
-0.016533998772501945,
0.1869186908006668,
-0.03747279942035675,
0.16599133610725403,
0.09659390896558762,
0.10877491533756256,
-0.14430218935012817,
0.008798886090517044,
-0.06464994698762894,
-0.061315037310123444,
0.0869099572300911,
-0.11954963207244873,
0.06653293967247009,
0.10770932585000992,
-0.037672996520996094,
0.07550397515296936,
0.11255088448524475,
-0.010753577575087547,
-0.024478616192936897,
0.1354140192270279,
-0.2620249390602112,
0.011112150736153126,
-0.10277087986469269,
-0.053603339940309525,
0.08589326590299606,
0.059543341398239136,
0.17063240706920624,
0.019094394519925117,
-0.03691435977816582,
0.002286518458276987,
-0.0032613908406347036,
-0.04297538474202156,
0.06876692175865173,
0.036201633512973785,
0.018735934048891068,
-0.1412288397550583,
0.06380559504032135,
0.019676469266414642,
-0.12987346947193146,
0.014359957538545132,
0.18374264240264893,
-0.13636545836925507,
-0.12283436208963394,
-0.01472350675612688,
0.10303474217653275,
-0.1375390589237213,
-0.010404600761830807,
-0.047937601804733276,
-0.12442086637020111,
0.08908337354660034,
0.1662478744983673,
0.057791464030742645,
0.09958991408348083,
-0.04062404856085777,
-0.031105797737836838,
-0.02718747965991497,
-0.0027079822029918432,
-0.004614054691046476,
0.026808008551597595,
-0.10010819137096405,
0.06410907208919525,
-0.028616370633244514,
0.1616012305021286,
-0.09509016573429108,
-0.058415792882442474,
-0.17012803256511688,
0.020438166335225105,
-0.0980398952960968,
-0.07545942813158035,
-0.06191256269812584,
-0.046872347593307495,
0.0024530207738280296,
-0.031492725014686584,
-0.037997931241989136,
-0.04167279973626137,
-0.1273663491010666,
0.010485270991921425,
-0.03421832621097565,
0.026860404759645462,
-0.05130409076809883,
-0.017405139282345772,
0.07838631421327591,
-0.04366683214902878,
0.14187225699424744,
0.1251930147409439,
-0.0813216045498848,
0.13244137167930603,
-0.13989607989788055,
-0.08764417469501495,
0.10466115176677704,
0.02126849815249443,
0.050429604947566986,
0.07374565303325653,
0.03287266939878464,
0.049179356545209885,
0.011608350090682507,
0.04609746113419533,
-0.007829797454178333,
-0.12268529832363129,
0.023345254361629486,
-0.02798962965607643,
-0.1538762003183365,
-0.05655300244688988,
-0.03891851752996445,
0.03750801831483841,
0.013090603053569794,
0.0927872508764267,
-0.043296340852975845,
0.10165534913539886,
-0.07603445649147034,
0.01318682637065649,
0.0051549095660448074,
-0.1842835247516632,
-0.05110134929418564,
-0.07082372158765793,
0.03012164682149887,
0.002235719934105873,
0.21606981754302979,
0.04629960283637047,
0.007445674389600754,
0.023777058348059654,
0.05448852479457855,
0.05619640275835991,
0.013472571037709713,
0.20154157280921936,
0.0974220335483551,
-0.058137644082307816,
-0.1242523193359375,
0.07596153020858765,
0.013066772371530533,
0.013343184255063534,
0.14974428713321686,
0.007691948674619198,
-0.02717658504843712,
0.07740107923746109,
-0.00910227745771408,
-0.002045026980340481,
-0.06961663067340851,
-0.13770876824855804,
-0.009465648792684078,
0.07519535720348358,
-0.00919375754892826,
0.07025028765201569,
0.16587702929973602,
-0.0168086476624012,
0.03919226676225662,
-0.014231913723051548,
-0.046651918441057205,
-0.17258740961551666,
-0.15251554548740387,
-0.07746926695108414,
-0.12711922824382782,
0.003311848733574152,
-0.10817895829677582,
0.04843924194574356,
0.04319892078638077,
0.07076722383499146,
-0.05603676661849022,
0.0959409549832344,
0.08587279170751572,
-0.11532182991504669,
0.08143848925828934,
-0.02620360255241394,
0.05048292502760887,
-0.01426080521196127,
-0.009900632314383984,
-0.09245618432760239,
0.00127472635358572,
-0.03180527314543724,
0.03988741338253021,
-0.045479029417037964,
0.02687423676252365,
-0.15346243977546692,
-0.10722704231739044,
-0.039490316063165665,
0.056313030421733856,
-0.059847455471754074,
0.100949227809906,
0.009495710022747517,
-0.018237562850117683,
0.04186875373125076,
0.20905965566635132,
-0.06365211308002472,
-0.05178653821349144,
-0.061319783329963684,
0.22792138159275055,
0.03538671135902405,
0.10324656218290329,
-0.008749047294259071,
-0.0019403878832235932,
-0.07268697023391724,
0.3617743253707886,
0.29284629225730896,
-0.07791155576705933,
0.022451438009738922,
0.0331730991601944,
0.029743103310465813,
0.12501496076583862,
0.16055123507976532,
0.08796408772468567,
0.26125219464302063,
-0.06940464675426483,
-0.03451351448893547,
-0.027550332248210907,
-0.0045446059666574,
-0.08312693238258362,
0.11572974175214767,
0.07365956157445908,
-0.06728135794401169,
-0.02372513897716999,
0.09932438284158707,
-0.2185433804988861,
0.1337432712316513,
-0.08737323433160782,
-0.14821475744247437,
-0.06273291260004044,
-0.014948664233088493,
0.07319276034832001,
0.008631710894405842,
0.08185487985610962,
-0.021191345527768135,
-0.08937317132949829,
0.048693202435970306,
0.0319901742041111,
-0.2291279435157776,
-0.016278933733701706,
0.07163753360509872,
-0.07518631219863892,
0.005438809748739004,
-0.025554493069648743,
0.03820541501045227,
0.06925502419471741,
0.0428287535905838,
-0.04184453561902046,
0.03282124921679497,
-0.005246590822935104,
-0.02624332159757614,
0.012373761273920536,
0.046713367104530334,
0.026597198098897934,
-0.1116935983300209,
0.05201485753059387,
-0.13208149373531342,
0.031130904331803322,
-0.037749629467725754,
-0.029812444001436234,
-0.0018061886075884104,
0.0018404837464913726,
-0.06120757386088371,
0.06363876163959503,
0.10851772874593735,
-0.0024609314277768135,
-0.0011416031047701836,
-0.09249333292245865,
-0.016737477853894234,
0.011317041702568531,
-0.08328749984502792,
-0.10588467866182327,
-0.10943593829870224,
-0.10668890178203583,
0.0944848507642746,
-0.019884582608938217,
-0.17893342673778534,
0.011786963790655136,
-0.10584411770105362,
0.05600238963961601,
-0.18422426283359528,
0.09811784327030182,
0.08472593873739243,
0.0197992455214262,
0.004471381660550833,
-0.014000785537064075,
0.04016757011413574,
0.07623877376317978,
-0.12001560628414154,
-0.07256489247083664
] |
null | null |
transformers
|
# wav2vec-tr-lite-AG
## Usage
The model can be used directly (without a language model) as follows:
```python
import torch
import torchaudio
from datasets import load_dataset
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
test_dataset = load_dataset("common_voice", "tr", split="test[:2%]")
processor = Wav2Vec2Processor.from_pretrained("emre/wav2vec-tr-lite-AG")
model = Wav2Vec2ForCTC.from_pretrained("emre/wav2vec-tr-lite-AG")
resampler = torchaudio.transforms.Resample(48_000, 16_000)
**Test Result**: 27.30 %
[here](https://adresgezgini.com)
|
{"language": "tr", "license": "apache-2.0", "tags": ["audio", "automatic-speech-recognition", "speech"], "datasets": ["common_voice"], "metrics": ["wer"]}
|
automatic-speech-recognition
|
adresgezgini/wav2vec-tr-lite-AG
|
[
"transformers",
"pytorch",
"jax",
"wav2vec2",
"automatic-speech-recognition",
"audio",
"speech",
"tr",
"dataset:common_voice",
"license:apache-2.0",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"tr"
] |
TAGS
#transformers #pytorch #jax #wav2vec2 #automatic-speech-recognition #audio #speech #tr #dataset-common_voice #license-apache-2.0 #endpoints_compatible #region-us
|
# wav2vec-tr-lite-AG
## Usage
The model can be used directly (without a language model) as follows:
'''python
import torch
import torchaudio
from datasets import load_dataset
from transformers import Wav2Vec2ForCTC, Wav2Vec2Processor
test_dataset = load_dataset("common_voice", "tr", split="test[:2%]")
processor = Wav2Vec2Processor.from_pretrained("emre/wav2vec-tr-lite-AG")
model = Wav2Vec2ForCTC.from_pretrained("emre/wav2vec-tr-lite-AG")
resampler = torchaudio.transforms.Resample(48_000, 16_000)
Test Result: 27.30 %
here
|
[
"# wav2vec-tr-lite-AG",
"## Usage\n\nThe model can be used directly (without a language model) as follows:\n\n'''python\nimport torch\nimport torchaudio\nfrom datasets import load_dataset\nfrom transformers import Wav2Vec2ForCTC, Wav2Vec2Processor\n\ntest_dataset = load_dataset(\"common_voice\", \"tr\", split=\"test[:2%]\") \n\nprocessor = Wav2Vec2Processor.from_pretrained(\"emre/wav2vec-tr-lite-AG\")\nmodel = Wav2Vec2ForCTC.from_pretrained(\"emre/wav2vec-tr-lite-AG\")\n\nresampler = torchaudio.transforms.Resample(48_000, 16_000)\n\nTest Result: 27.30 %\n\n\n here"
] |
[
"TAGS\n#transformers #pytorch #jax #wav2vec2 #automatic-speech-recognition #audio #speech #tr #dataset-common_voice #license-apache-2.0 #endpoints_compatible #region-us \n",
"# wav2vec-tr-lite-AG",
"## Usage\n\nThe model can be used directly (without a language model) as follows:\n\n'''python\nimport torch\nimport torchaudio\nfrom datasets import load_dataset\nfrom transformers import Wav2Vec2ForCTC, Wav2Vec2Processor\n\ntest_dataset = load_dataset(\"common_voice\", \"tr\", split=\"test[:2%]\") \n\nprocessor = Wav2Vec2Processor.from_pretrained(\"emre/wav2vec-tr-lite-AG\")\nmodel = Wav2Vec2ForCTC.from_pretrained(\"emre/wav2vec-tr-lite-AG\")\n\nresampler = torchaudio.transforms.Resample(48_000, 16_000)\n\nTest Result: 27.30 %\n\n\n here"
] |
[
65,
11,
188
] |
[
"passage: TAGS\n#transformers #pytorch #jax #wav2vec2 #automatic-speech-recognition #audio #speech #tr #dataset-common_voice #license-apache-2.0 #endpoints_compatible #region-us \n# wav2vec-tr-lite-AG## Usage\n\nThe model can be used directly (without a language model) as follows:\n\n'''python\nimport torch\nimport torchaudio\nfrom datasets import load_dataset\nfrom transformers import Wav2Vec2ForCTC, Wav2Vec2Processor\n\ntest_dataset = load_dataset(\"common_voice\", \"tr\", split=\"test[:2%]\") \n\nprocessor = Wav2Vec2Processor.from_pretrained(\"emre/wav2vec-tr-lite-AG\")\nmodel = Wav2Vec2ForCTC.from_pretrained(\"emre/wav2vec-tr-lite-AG\")\n\nresampler = torchaudio.transforms.Resample(48_000, 16_000)\n\nTest Result: 27.30 %\n\n\n here"
] |
[
-0.09988366812467575,
0.0027330692391842604,
-0.0045263078063726425,
0.02516145072877407,
0.07110395282506943,
0.04154470935463905,
0.1684129387140274,
0.06904448568820953,
0.01287983451038599,
0.04766670987010002,
0.07668625563383102,
0.09801699966192245,
0.05415138229727745,
0.03898125886917114,
0.025982916355133057,
-0.10337299108505249,
0.020037155598402023,
-0.04484326019883156,
0.035386670380830765,
0.08870568126440048,
0.0898134708404541,
-0.06071542948484421,
0.07879415154457092,
0.07211633026599884,
-0.0532328225672245,
0.024045053869485855,
0.015329896472394466,
-0.06097463145852089,
0.07550903409719467,
0.08574830740690231,
0.07038119435310364,
0.017081499099731445,
0.06833440810441971,
-0.18346641957759857,
0.01938682794570923,
0.04974224418401718,
0.0011457778746262193,
0.040213070809841156,
0.18689100444316864,
-0.06638900935649872,
0.020778583362698555,
0.062104228883981705,
0.015840444713830948,
0.09597441554069519,
-0.029524771496653557,
-0.1472586840391159,
-0.08230312913656235,
-0.019813530147075653,
0.06354174017906189,
0.116412453353405,
-0.039617300033569336,
0.128310427069664,
-0.04329729080200195,
0.09083977341651917,
0.13634943962097168,
-0.21972014009952545,
0.020837659016251564,
0.026503179222345352,
0.12622079253196716,
-0.004741682205349207,
-0.04472921043634415,
-0.01701260544359684,
-0.01427476853132248,
0.05780890956521034,
-0.03893556445837021,
-0.05859670788049698,
-0.16755317151546478,
0.006461138837039471,
-0.12231411039829254,
-0.029849480837583542,
0.18519450724124908,
0.017086990177631378,
-0.10029483586549759,
-0.06552652269601822,
-0.05929616093635559,
-0.09350801259279251,
0.051201172173023224,
0.01397513784468174,
-0.0220143124461174,
0.03728451579809189,
-0.004308826755732298,
-0.015247831121087074,
-0.10609887540340424,
-0.16384248435497284,
-0.13147245347499847,
0.03602185472846031,
0.022041872143745422,
0.05381130799651146,
-0.07196301966905594,
0.08336492627859116,
0.014281182549893856,
-0.07571553438901901,
-0.0707835927605629,
-0.022797102108597755,
-0.15409646928310394,
0.014584685675799847,
-0.09874829649925232,
-0.18060165643692017,
0.07369086891412735,
0.1556367427110672,
0.07526248693466187,
0.06621097028255463,
-0.06599626690149307,
0.029161762446165085,
-0.01013657171279192,
0.14284516870975494,
-0.07538184523582458,
-0.018249815329909325,
-0.032374147325754166,
-0.05545305833220482,
-0.02394704706966877,
0.00814860314130783,
-0.028552381321787834,
-0.10086366534233093,
-0.014484007842838764,
0.02875186689198017,
0.01290008146315813,
-0.00012157389573985711,
-0.0887737050652504,
-0.0370117723941803,
0.029893871396780014,
-0.1049090251326561,
-0.011157725006341934,
0.08614581823348999,
0.01550570223480463,
0.18430303037166595,
0.12327344715595245,
0.0186148714274168,
-0.05537550523877144,
-0.03840706869959831,
-0.017972109839320183,
0.006212677340954542,
-0.007294666953384876,
-0.07897043973207474,
0.029416291043162346,
-0.005583927035331726,
0.004606244154274464,
-0.1329287886619568,
-0.1725497990846634,
-0.03676619008183479,
0.05332091078162193,
-0.003492051735520363,
0.0929584801197052,
0.00412644213065505,
0.023607172071933746,
0.009664495475590229,
-0.007281668018549681,
0.0643703043460846,
-0.040394484996795654,
0.025478459894657135,
-0.010360226035118103,
0.043532900512218475,
0.02676018700003624,
0.055008672177791595,
-0.04613102227449417,
0.025613052770495415,
0.0005676263244822621,
0.10278403013944626,
-0.07139961421489716,
0.00863464456051588,
-0.20065294206142426,
-0.04302990809082985,
-0.07917358726263046,
-0.00493607297539711,
0.08113072067499161,
0.11364741623401642,
-0.24431468546390533,
-0.007785773370414972,
0.16212041676044464,
-0.09470245242118835,
-0.08233160525560379,
0.11262542754411697,
0.015773791819810867,
0.007296179421246052,
0.049801796674728394,
0.11139605939388275,
0.11692492663860321,
-0.22698737680912018,
0.009998632594943047,
0.025555599480867386,
-0.01836349628865719,
0.010378233157098293,
0.15620702505111694,
-0.12308462709188461,
-0.04995814338326454,
-0.011308010667562485,
0.006248353980481625,
0.047867923974990845,
-0.01545913890004158,
-0.047085005789995193,
-0.047639936208724976,
-0.0640207901597023,
0.055548012256622314,
-0.07917334139347076,
-0.0407584048807621,
0.005616752430796623,
-0.09216878563165665,
-0.014765302650630474,
0.13564510643482208,
-0.06204572319984436,
0.030779743567109108,
-0.10680985450744629,
0.06779973208904266,
-0.11291751265525818,
-0.0007993577164597809,
-0.1345605105161667,
0.04975145310163498,
0.0005840304656885564,
-0.07494061440229416,
0.04634009674191475,
0.07636402547359467,
0.059187617152929306,
0.014675023034214973,
0.07927711308002472,
-0.04580001160502434,
0.017269400879740715,
-0.007349798455834389,
-0.046476081013679504,
-0.0913827195763588,
-0.03818275034427643,
-0.026224588975310326,
0.1087576299905777,
-0.09566691517829895,
-0.028221845626831055,
0.042223405092954636,
0.029440196231007576,
-0.008539995178580284,
-0.03048601746559143,
0.02323855832219124,
-0.026522869244217873,
-0.05571534484624863,
-0.015711454674601555,
-0.044923510402441025,
-0.0062611592002213,
-0.0604773685336113,
0.09012552350759506,
-0.11316753923892975,
0.0033194597344845533,
0.12478945404291153,
0.01935550384223461,
-0.0298269372433424,
-0.0036627098452299833,
-0.002268314128741622,
-0.02007399871945381,
-0.06839704513549805,
-0.05032484233379364,
0.15024496614933014,
0.07404642552137375,
0.08684299886226654,
-0.0548258051276207,
-0.002386123174801469,
0.044622331857681274,
-0.05167055130004883,
0.007250509690493345,
0.0685308501124382,
-0.014326670207083225,
0.01813686639070511,
-0.025950556620955467,
0.14060181379318237,
-0.10426498204469681,
0.11404404044151306,
0.010042181238532066,
-0.11499354243278503,
-0.05820165202021599,
0.0058228191919624805,
0.00757153145968914,
-0.001453625620342791,
-0.08651769161224365,
0.09931355714797974,
0.08690642565488815,
0.06084974482655525,
0.010476714000105858,
-0.07970531284809113,
0.029036566615104675,
0.060577258467674255,
-0.08231640607118607,
-0.09743257611989975,
0.08667826652526855,
-0.02532198280096054,
-0.006229126825928688,
-0.05088236555457115,
0.024912940338253975,
0.012353361584246159,
-0.03910230100154877,
-0.13957051932811737,
0.1540605127811432,
-0.13031066954135895,
-0.08418050408363342,
-0.23685072362422943,
-0.06735334545373917,
-0.04809794947504997,
0.024200350046157837,
0.10315307229757309,
-0.08122892677783966,
-0.08322042971849442,
-0.055662985891103745,
0.09287242591381073,
-0.008457672782242298,
0.017049966380000114,
0.02059486322104931,
-0.002263001399114728,
0.032709065824747086,
-0.11735828965902328,
0.023445360362529755,
0.03862713649868965,
-0.06846893578767776,
0.011120296083390713,
0.016304785385727882,
0.03225058689713478,
0.09055861085653305,
0.04556751996278763,
0.006819841917604208,
0.033680450171232224,
0.23671843111515045,
-0.03690888360142708,
0.010522504337131977,
0.2499963343143463,
-0.07244005054235458,
0.0051409355364739895,
0.03094547614455223,
0.010462918318808079,
-0.03149409592151642,
0.0058453879319131374,
-0.022266486659646034,
-0.0367218516767025,
-0.35583722591400146,
-0.06349003314971924,
-0.04033741354942322,
-0.05364612489938736,
0.05127646401524544,
0.012034210376441479,
0.040992993861436844,
0.1216990277171135,
0.0034987954422831535,
0.025212544947862625,
0.01423566322773695,
0.02451186068356037,
0.12201628088951111,
0.0030863957945257425,
0.06807222217321396,
-0.07954854518175125,
0.020368915051221848,
0.06808918714523315,
0.07386744767427444,
0.11701659113168716,
0.08164200931787491,
0.13980244100093842,
0.08625520765781403,
0.0985579639673233,
0.019658619537949562,
0.14762699604034424,
-0.004743928089737892,
0.027599802240729332,
0.03761665150523186,
-0.0931955948472023,
-0.032998181879520416,
0.054074399173259735,
0.06698578596115112,
-0.022260867059230804,
-0.012714038603007793,
0.02677551656961441,
0.03004492074251175,
0.1372743546962738,
0.04668152704834938,
-0.30227962136268616,
-0.049034684896469116,
-0.010971149429678917,
-0.023561544716358185,
-0.07235763221979141,
0.011655513197183609,
0.049851808696985245,
-0.13014407455921173,
0.07970604300498962,
0.002757720183581114,
0.07720443606376648,
0.00790441408753395,
0.004123439081013203,
0.01139862835407257,
0.04984083026647568,
0.012035603635013103,
0.08515999466180801,
-0.18403126299381256,
0.11179149895906448,
0.03574717417359352,
0.02610139362514019,
-0.07103034108877182,
0.056778013706207275,
-0.007985579781234264,
-0.008866099640727043,
0.13216033577919006,
-0.013658267445862293,
0.02839856967329979,
-0.027694640681147575,
-0.06464066356420517,
0.03679906949400902,
0.06099071353673935,
0.030852003023028374,
0.048248179256916046,
-0.06608166545629501,
-0.021610472351312637,
-0.008571688085794449,
0.05901491641998291,
-0.15576279163360596,
-0.084467314183712,
0.03640391677618027,
0.10098093003034592,
0.14476706087589264,
-0.03861859068274498,
-0.029559222981333733,
-0.11458541452884674,
0.06948982179164886,
-0.25329557061195374,
-0.022651394829154015,
-0.10700931400060654,
-0.09351404756307602,
0.22371207177639008,
-0.0724407434463501,
0.06991014629602432,
-0.020966393873095512,
0.09602850675582886,
-0.014652957208454609,
-0.11560599505901337,
0.06391025334596634,
-0.10592413693666458,
-0.11031896620988846,
-0.05307462811470032,
0.07396220415830612,
0.012953788042068481,
0.032209329307079315,
0.06358515471220016,
0.006843993440270424,
-0.11775481700897217,
-0.07180305570363998,
-0.028454696759581566,
0.12246770411729813,
-0.09616955369710922,
0.06669488549232483,
-0.005997232161462307,
-0.12340005487203598,
-0.008143562823534012,
-0.05805770680308342,
0.12689319252967834,
0.08914656937122345,
-0.046933989971876144,
0.10624901950359344,
0.11389397829771042,
-0.06315495073795319,
-0.1693759262561798,
-0.025453509762883186,
0.09097060561180115,
0.04165700450539589,
0.0911262109875679,
-0.12257254868745804,
0.13113811612129211,
0.06761474162340164,
-0.01624443754553795,
0.03859110549092293,
-0.2262418419122696,
-0.1277121603488922,
0.14191198348999023,
-0.0303946603089571,
-0.014418086037039757,
-0.08836522698402405,
-0.07774507254362106,
-0.12206569314002991,
-0.16655004024505615,
0.014546637423336506,
-0.1585836559534073,
0.09282524138689041,
0.016376737505197525,
0.0817418172955513,
0.01586686447262764,
-0.0666048601269722,
0.11992806941270828,
0.08953037858009338,
-0.016659650951623917,
-0.01713295839726925,
0.16162917017936707,
0.06377993524074554,
-0.009856014512479305,
0.1246887668967247,
-0.07411250472068787,
0.06873966753482819,
-0.10338376462459564,
-0.042282834649086,
-0.019163571298122406,
0.053377699106931686,
0.003547374624758959,
-0.014249255880713463,
0.028155192732810974,
-0.08994557708501816,
0.04630466550588608,
-0.0320390909910202,
-0.019858084619045258,
-0.02235594391822815,
-0.020842397585511208,
0.17856687307357788,
0.08262664824724197,
0.08622296154499054,
-0.17579004168510437,
-0.003988195676356554,
-0.02958051674067974,
0.060427796095609665,
-0.07603142410516739,
0.08538385480642319,
0.07558996975421906,
-0.011466504074633121,
0.07945071160793304,
0.038188353180885315,
-0.0927649512887001,
0.03424185514450073,
0.04643069580197334,
-0.05994164198637009,
-0.02314801886677742,
0.0015197362517938018,
0.06703067570924759,
-0.07125408947467804,
-0.003046770580112934,
0.19371865689754486,
0.019352717325091362,
0.00018339518283028156,
0.009210750460624695,
0.019828375428915024,
-0.15844464302062988,
0.23351703584194183,
0.008665105327963829,
0.05143176019191742,
-0.08533481508493423,
0.04544391110539436,
0.02354547381401062,
-0.03491789475083351,
0.03898429498076439,
-0.004125692415982485,
-0.06433893740177155,
-0.09803608059883118,
-0.0005253757117316127,
0.05431198328733444,
-0.1388780027627945,
-0.05968797206878662,
0.009251647628843784,
-0.06988843530416489,
0.002081018639728427,
0.07155422866344452,
0.04416150227189064,
0.06493982672691345,
-0.02678287960588932,
-0.07637389749288559,
-0.02203197591006756,
0.0764232873916626,
0.05716660991311073,
0.03584052622318268,
-0.11562711745500565,
0.07014011591672897,
-0.013078865595161915,
0.06462275236845016,
-0.015821799635887146,
-0.01477740053087473,
-0.057117901742458344,
0.04889224097132683,
-0.21268562972545624,
0.0331110842525959,
-0.040825098752975464,
-0.0019722236320376396,
0.04332137480378151,
-0.025555333122611046,
0.00825792457908392,
0.046644099056720734,
-0.05848318710923195,
-0.00779078621417284,
-0.0586770698428154,
0.07992392033338547,
-0.14069895446300507,
0.009464364498853683,
0.04430758208036423,
-0.07494693994522095,
0.07680249959230423,
0.11866734176874161,
-0.049678511917591095,
0.00972576905041933,
-0.18610769510269165,
-0.031424157321453094,
0.005758264102041721,
0.05457838997244835,
-0.016327906399965286,
-0.10078631341457367,
0.02428520657122135,
0.035719133913517,
0.020157573744654655,
-0.03222113102674484,
0.12471430748701096,
-0.08569356799125671,
-0.016997812315821648,
-0.1001238152384758,
0.023976802825927734,
-0.061275143176317215,
-0.018641963601112366,
0.04178732633590698,
0.09788420796394348,
0.16588623821735382,
-0.033289212733507156,
0.0970602035522461,
-0.1154782623052597,
0.01402649749070406,
-0.032611947506666183,
0.014081303961575031,
-0.08555252850055695,
-0.07101518660783768,
0.044242702424526215,
-0.03796294704079628,
0.1445973515510559,
-0.02667590230703354,
0.09739140421152115,
-0.008998897857964039,
0.058977629989385605,
-0.028464438393712044,
-0.005605386104434729,
0.1920715719461441,
0.04662739112973213,
0.04625537618994713,
-0.09011051058769226,
-0.04591791704297066,
0.056737761944532394,
-0.004783333744853735,
-0.047575436532497406,
0.15865272283554077,
-0.07710318267345428,
0.08823579549789429,
0.07973916828632355,
-0.07014752924442291,
-0.0769132599234581,
-0.07783007621765137,
-0.08047129958868027,
0.05894196033477783,
-0.02377919852733612,
0.09897902607917786,
0.05284063145518303,
-0.10285786539316177,
0.011359479278326035,
0.03343162313103676,
-0.03771796077489853,
-0.13391292095184326,
-0.09972809255123138,
-0.08246926963329315,
-0.1427319347858429,
0.016693992540240288,
-0.04138926789164543,
0.09248604625463486,
0.04415680840611458,
0.04626505449414253,
0.03069228120148182,
0.15881213545799255,
-0.04975620284676552,
-0.09927000850439072,
-0.04751788079738617,
-0.03687706217169762,
-0.03047114610671997,
0.08337625116109848,
-0.011948795057833195,
0.12405229359865189,
-0.008528760634362698,
0.05217203125357628,
0.03860532492399216,
-0.025682298466563225,
0.07511889189481735,
-0.0582563653588295,
-0.07718569785356522,
-0.04127109423279762,
0.011852968484163284,
-0.05856240168213844,
0.12133210152387619,
0.07977557927370071,
0.01736070215702057,
0.006830666679888964,
0.047139495611190796,
-0.07527052611112595,
-0.12994034588336945,
-0.14137117564678192,
0.044117484241724014,
0.030594319105148315,
0.07872508466243744,
-0.02398640289902687,
-0.06437460333108902,
-0.006863793823868036,
0.29947879910469055,
0.12466105818748474,
-0.020695067942142487,
-0.013592804782092571,
0.06377697736024857,
0.006363379769027233,
-0.0038643795996904373,
0.032458797097206116,
0.062190789729356766,
0.17634814977645874,
-0.02435539849102497,
0.07564925402402878,
-0.02114071510732174,
-0.058168452233076096,
-0.0018000132404267788,
-0.007992836646735668,
-0.13687852025032043,
-0.08623888343572617,
0.07809923589229584,
0.11886270344257355,
-0.12666863203048706,
-0.09307707101106644,
-0.00445086183026433,
0.02395702712237835,
-0.056820888072252274,
-0.045550692826509476,
0.12336434423923492,
0.058622922748327255,
0.012094172649085522,
-0.080103799700737,
-0.059826210141181946,
0.21358585357666016,
-0.020821349695324898,
-0.11489319801330566,
-0.0643538385629654,
0.03145711123943329,
-0.05281759798526764,
0.04615988954901695,
0.02038203366100788,
0.1447724848985672,
0.048211995512247086,
0.1046781912446022,
-0.1069687232375145,
0.10578763484954834,
0.014313703402876854,
-0.14481110870838165,
0.054032836109399796,
0.06333357095718384,
-0.0332203172147274,
0.08159144967794418,
-0.02475379779934883,
-0.06938783079385757,
0.0451519712805748,
0.07656355947256088,
0.03344360738992691,
-0.06910376995801926,
-0.009663024917244911,
-0.07425983995199203,
0.13205446302890778,
0.08053252100944519,
-0.06714975088834763,
-0.010077928192913532,
-0.04530875384807587,
0.06799925863742828,
0.043848916888237,
-0.0357900969684124,
-0.012942972593009472,
-0.17270293831825256,
0.02273293398320675,
-0.04814454913139343,
0.03722721338272095,
-0.20411255955696106,
-0.03701118379831314,
-0.03287208825349808,
-0.06399346143007278,
-0.0069966730661690235,
0.06967420876026154,
0.09871608763933182,
0.01551645528525114,
-0.010435283184051514,
0.003985109739005566,
0.033495984971523285,
0.0789346992969513,
-0.144444540143013,
-0.1271091252565384
] |
null | null |
transformers
|
# Model Trained Using AutoNLP
- Problem type: Multi-class Classification
- Model ID: 19333717
- CO2 Emissions (in grams): 88.89388195672073
## Validation Metrics
- Loss: 1.0499154329299927
- Accuracy: 0.6207088513638894
- Macro F1: 0.46250803661544765
- Micro F1: 0.6207088513638894
- Weighted F1: 0.5850362079928957
- Macro Precision: 0.6451479987704787
- Micro Precision: 0.6207088513638894
- Weighted Precision: 0.6285080101186085
- Macro Recall: 0.4405680478429344
- Micro Recall: 0.6207088513638894
- Weighted Recall: 0.6207088513638894
## Usage
You can use cURL to access this model:
```
$ curl -X POST -H "Authorization: Bearer YOUR_API_KEY" -H "Content-Type: application/json" -d '{"inputs": "I love AutoNLP"}' https://api-inference.huggingface.co/models/adrianmoses/autonlp-auto-nlp-lyrics-classification-19333717
```
Or Python API:
```
from transformers import AutoModelForSequenceClassification, AutoTokenizer
model = AutoModelForSequenceClassification.from_pretrained("adrianmoses/autonlp-auto-nlp-lyrics-classification-19333717", use_auth_token=True)
tokenizer = AutoTokenizer.from_pretrained("adrianmoses/autonlp-auto-nlp-lyrics-classification-19333717", use_auth_token=True)
inputs = tokenizer("I love AutoNLP", return_tensors="pt")
outputs = model(**inputs)
```
|
{"language": "en", "tags": "autonlp", "datasets": ["adrianmoses/autonlp-data-auto-nlp-lyrics-classification"], "widget": [{"text": "I love AutoNLP \ud83e\udd17"}], "co2_eq_emissions": 88.89388195672073}
|
text-classification
|
adrianmoses/autonlp-auto-nlp-lyrics-classification-19333717
|
[
"transformers",
"pytorch",
"bert",
"text-classification",
"autonlp",
"en",
"dataset:adrianmoses/autonlp-data-auto-nlp-lyrics-classification",
"co2_eq_emissions",
"autotrain_compatible",
"endpoints_compatible",
"has_space",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #bert #text-classification #autonlp #en #dataset-adrianmoses/autonlp-data-auto-nlp-lyrics-classification #co2_eq_emissions #autotrain_compatible #endpoints_compatible #has_space #region-us
|
# Model Trained Using AutoNLP
- Problem type: Multi-class Classification
- Model ID: 19333717
- CO2 Emissions (in grams): 88.89388195672073
## Validation Metrics
- Loss: 1.0499154329299927
- Accuracy: 0.6207088513638894
- Macro F1: 0.46250803661544765
- Micro F1: 0.6207088513638894
- Weighted F1: 0.5850362079928957
- Macro Precision: 0.6451479987704787
- Micro Precision: 0.6207088513638894
- Weighted Precision: 0.6285080101186085
- Macro Recall: 0.4405680478429344
- Micro Recall: 0.6207088513638894
- Weighted Recall: 0.6207088513638894
## Usage
You can use cURL to access this model:
Or Python API:
|
[
"# Model Trained Using AutoNLP\n\n- Problem type: Multi-class Classification\n- Model ID: 19333717\n- CO2 Emissions (in grams): 88.89388195672073",
"## Validation Metrics\n\n- Loss: 1.0499154329299927\n- Accuracy: 0.6207088513638894\n- Macro F1: 0.46250803661544765\n- Micro F1: 0.6207088513638894\n- Weighted F1: 0.5850362079928957\n- Macro Precision: 0.6451479987704787\n- Micro Precision: 0.6207088513638894\n- Weighted Precision: 0.6285080101186085\n- Macro Recall: 0.4405680478429344\n- Micro Recall: 0.6207088513638894\n- Weighted Recall: 0.6207088513638894",
"## Usage\n\nYou can use cURL to access this model:\n\n\n\nOr Python API:"
] |
[
"TAGS\n#transformers #pytorch #bert #text-classification #autonlp #en #dataset-adrianmoses/autonlp-data-auto-nlp-lyrics-classification #co2_eq_emissions #autotrain_compatible #endpoints_compatible #has_space #region-us \n",
"# Model Trained Using AutoNLP\n\n- Problem type: Multi-class Classification\n- Model ID: 19333717\n- CO2 Emissions (in grams): 88.89388195672073",
"## Validation Metrics\n\n- Loss: 1.0499154329299927\n- Accuracy: 0.6207088513638894\n- Macro F1: 0.46250803661544765\n- Micro F1: 0.6207088513638894\n- Weighted F1: 0.5850362079928957\n- Macro Precision: 0.6451479987704787\n- Micro Precision: 0.6207088513638894\n- Weighted Precision: 0.6285080101186085\n- Macro Recall: 0.4405680478429344\n- Micro Recall: 0.6207088513638894\n- Weighted Recall: 0.6207088513638894",
"## Usage\n\nYou can use cURL to access this model:\n\n\n\nOr Python API:"
] |
[
81,
42,
152,
17
] |
[
"passage: TAGS\n#transformers #pytorch #bert #text-classification #autonlp #en #dataset-adrianmoses/autonlp-data-auto-nlp-lyrics-classification #co2_eq_emissions #autotrain_compatible #endpoints_compatible #has_space #region-us \n# Model Trained Using AutoNLP\n\n- Problem type: Multi-class Classification\n- Model ID: 19333717\n- CO2 Emissions (in grams): 88.89388195672073## Validation Metrics\n\n- Loss: 1.0499154329299927\n- Accuracy: 0.6207088513638894\n- Macro F1: 0.46250803661544765\n- Micro F1: 0.6207088513638894\n- Weighted F1: 0.5850362079928957\n- Macro Precision: 0.6451479987704787\n- Micro Precision: 0.6207088513638894\n- Weighted Precision: 0.6285080101186085\n- Macro Recall: 0.4405680478429344\n- Micro Recall: 0.6207088513638894\n- Weighted Recall: 0.6207088513638894## Usage\n\nYou can use cURL to access this model:\n\n\n\nOr Python API:"
] |
[
-0.11093606799840927,
0.22796538472175598,
-0.003789980197325349,
0.10480526089668274,
0.08866030722856522,
0.04518298804759979,
0.05572713166475296,
0.1572093367576599,
0.00984315574169159,
0.1626807600259781,
0.11249367892742157,
0.17180021107196808,
0.05195826664566994,
0.14084480702877045,
-0.10405141115188599,
-0.1368662416934967,
0.019156914204359055,
0.013296116143465042,
0.0713840201497078,
0.08910080045461655,
0.06932636350393295,
-0.0649515762925148,
0.11034590750932693,
-0.034052588045597076,
-0.08253730833530426,
0.03251401707530022,
0.06271900236606598,
-0.058405447751283646,
0.030448827892541885,
0.09041720628738403,
0.1275189369916916,
-0.020254313945770264,
0.07136350125074387,
-0.12623971700668335,
-0.02531132660806179,
0.03300412744283676,
-0.035219430923461914,
0.08031971007585526,
0.12863102555274963,
-0.02553911693394184,
0.08229304105043411,
-0.10793382674455643,
0.07507792860269547,
0.06674802303314209,
-0.09712658822536469,
-0.10252805799245834,
-0.11969323456287384,
0.03672998398542404,
0.07207592576742172,
0.08582823723554611,
0.003145672380924225,
0.21777978539466858,
-0.02298683673143387,
0.08515124768018723,
0.04614196717739105,
-0.2153862565755844,
-0.04942198842763901,
0.1968632936477661,
-0.01735060103237629,
0.012309911660850048,
-0.032577864825725555,
-0.011139379814267159,
0.06294816732406616,
0.01967710629105568,
0.025653749704360962,
-0.03905526548624039,
0.0009938461007550359,
-0.013490299694240093,
-0.11433485895395279,
-0.09566153585910797,
0.14788693189620972,
0.04976177215576172,
-0.056360263377428055,
-0.10235203057527542,
-0.08858383446931839,
-0.15766939520835876,
-0.02375578135251999,
-0.030868306756019592,
0.008946748450398445,
-0.03985815495252609,
-0.02022477425634861,
0.08867168426513672,
-0.049759507179260254,
-0.06597618758678436,
-0.13895976543426514,
0.03773030266165733,
-0.005200693849474192,
0.03423485532402992,
0.023646997287869453,
0.014981709420681,
-0.10499361902475357,
-0.0700991153717041,
-0.02677226811647415,
0.013901806436479092,
-0.051110170781612396,
-0.06422871351242065,
0.018649987876415253,
0.09553360939025879,
0.048819027841091156,
0.12524910271167755,
-0.01841725967824459,
0.08530664443969727,
0.053438033908605576,
-0.011671748012304306,
-0.0343276746571064,
0.1082409992814064,
-0.08889377862215042,
-0.10024257004261017,
0.004588479176163673,
0.003393631661310792,
0.01199971605092287,
-0.04824274033308029,
-0.031100880354642868,
-0.09315801411867142,
0.02465515211224556,
0.050196342170238495,
0.0680805966258049,
0.0015568494563922286,
-0.08632395416498184,
-0.054033905267715454,
0.11422427743673325,
-0.09128569066524506,
0.05185772106051445,
0.0026902579702436924,
-0.11392122507095337,
0.08551307767629623,
0.08899889141321182,
0.0023129363544285297,
-0.09184166043996811,
0.025886187329888344,
-0.10492019355297089,
0.021856078878045082,
-0.06288935244083405,
-0.11384185403585434,
0.03811236843466759,
0.028767215088009834,
-0.01917266845703125,
-0.13447673618793488,
-0.1319112628698349,
-0.07433445006608963,
-0.002086812164634466,
-0.08084888756275177,
-0.05762732774019241,
-0.01417882926762104,
-0.06598182767629623,
0.04077017307281494,
0.015161460265517235,
-0.01774412952363491,
-0.04139475151896477,
0.04562343657016754,
0.04689957946538925,
0.06422553211450577,
-0.06247179955244064,
0.03615201264619827,
-0.03865669667720795,
0.01785002090036869,
-0.12899893522262573,
0.0949234813451767,
-0.0931139662861824,
0.038076236844062805,
-0.1657334864139557,
-0.055196817964315414,
0.06675156205892563,
-0.04453808814287186,
0.07109501212835312,
0.11073317378759384,
-0.1676223874092102,
0.013767804019153118,
0.07825881987810135,
-0.05819786712527275,
-0.10237893462181091,
0.06781335920095444,
0.006055932957679033,
0.04707575961947441,
0.005621735006570816,
0.13129685819149017,
0.1932389885187149,
-0.11472225934267044,
-0.09785111248493195,
-0.01176683884114027,
0.038353364914655685,
-0.042047422379255295,
0.0976894423365593,
-0.04637816920876503,
-0.1211337000131607,
-0.0019423417979851365,
0.019703328609466553,
-0.03727035969495773,
-0.03219267353415489,
-0.09020103514194489,
-0.0496661439538002,
-0.02951633185148239,
0.03614053875207901,
-0.010196836665272713,
-0.00771211925894022,
-0.011867169290781021,
-0.05179475620388985,
0.05683800205588341,
0.15996277332305908,
-0.04072602838277817,
-0.022010628134012222,
-0.11944878101348877,
0.08218789845705032,
-0.1355886459350586,
-0.058006998151540756,
-0.2116478532552719,
-0.02360103651881218,
0.011769359931349754,
-0.089456707239151,
0.03204943612217903,
-0.015055843628942966,
0.09457244724035263,
0.03740324452519417,
0.043765030801296234,
0.019913924857974052,
0.07454659044742584,
-0.00671463692560792,
-0.1191951185464859,
-0.06037745252251625,
-0.031477417796850204,
-0.004398427903652191,
0.22654099762439728,
-0.19205217063426971,
0.003719959408044815,
0.04783101752400398,
0.03790020942687988,
0.0020987619645893574,
-0.055961500853300095,
-0.031329620629549026,
0.06830502301454544,
0.0004121499659959227,
-0.04499881714582443,
0.043118223547935486,
-0.027900254353880882,
-0.07423904538154602,
-0.02326122671365738,
-0.23906229436397552,
0.18356125056743622,
0.13106966018676758,
0.08660019189119339,
-0.09717534482479095,
-0.06260018050670624,
0.036332376301288605,
-0.04392402619123459,
-0.003606460290029645,
0.021045712754130363,
0.08919667452573776,
0.020952623337507248,
0.07787816971540451,
-0.0650237500667572,
0.004253358114510775,
0.00595771474763751,
-0.03483642265200615,
-0.03278694674372673,
0.1970146745443344,
0.024472424760460854,
-0.05188834294676781,
0.06373509764671326,
0.04702374339103699,
-0.09307427704334259,
0.029353827238082886,
0.03705218806862831,
-0.059163253754377365,
-0.09588691592216492,
0.007541255559772253,
0.06150700896978378,
0.03173165023326874,
-0.011736289598047733,
0.1109175980091095,
0.06736025959253311,
-0.0007818715530447662,
0.027353299781680107,
-0.08407121151685715,
0.008242839947342873,
0.055489327758550644,
-0.055207423865795135,
-0.0895053818821907,
0.008762943558394909,
0.026723867282271385,
0.11509145051240921,
-0.006310747936367989,
-0.020966092124581337,
-0.010086862370371819,
0.0011245710775256157,
-0.11417236924171448,
0.19848977029323578,
-0.10404501110315323,
-0.12803155183792114,
-0.17710191011428833,
-0.12094144523143768,
-0.08738642185926437,
-0.06228321045637131,
0.01602203957736492,
-0.024939972907304764,
-0.10831993818283081,
-0.08468357473611832,
-0.07126965373754501,
-0.01699954643845558,
-0.04845940321683884,
-0.0010240197880193591,
0.004583859350532293,
0.07501033693552017,
-0.1194324642419815,
-0.03342337906360626,
-0.0016618933295831084,
-0.0977320671081543,
0.0722578689455986,
-0.005679581314325333,
0.11255670338869095,
0.16013148427009583,
-0.050170980393886566,
0.012555282562971115,
0.023699063807725906,
0.21669062972068787,
0.010301725007593632,
-0.011660228483378887,
0.19232316315174103,
0.07693316787481308,
0.08156599849462509,
0.13204646110534668,
0.06318143755197525,
-0.06514210999011993,
-0.02111418917775154,
0.05199899524450302,
-0.0166394654661417,
-0.20899564027786255,
-0.1860240250825882,
-0.008667734451591969,
-0.001386808231472969,
0.15922005474567413,
0.03775026649236679,
0.06385572254657745,
0.10569217801094055,
0.037268392741680145,
0.0313095785677433,
-0.06443528085947037,
0.07541771978139877,
0.09515026956796646,
0.03179405629634857,
0.1247347742319107,
-0.07235147058963776,
0.046218570321798325,
0.10946813970804214,
-0.009640495292842388,
0.0834427997469902,
0.05009132996201515,
0.07507724314928055,
-0.006345786154270172,
0.14013902842998505,
0.023983117192983627,
0.07528238743543625,
0.060321513563394547,
-0.0077904765494167805,
0.04245397821068764,
-0.11336638033390045,
-0.1047758087515831,
0.009810363873839378,
0.059254877269268036,
0.06824537366628647,
-0.08203724026679993,
-0.0029362773057073355,
0.029309401288628578,
0.040908921509981155,
0.0873415544629097,
-0.44680991768836975,
-0.042666640132665634,
0.01826339028775692,
-0.01473239902406931,
-0.0962628722190857,
-0.02450631372630596,
-0.023970870301127434,
-0.11467138677835464,
0.05216899886727333,
0.008946772664785385,
0.1124328151345253,
-0.07513273507356644,
-0.044831402599811554,
-0.07310684770345688,
0.04204048588871956,
-0.006417840253561735,
0.055839117616415024,
-0.1907309740781784,
0.1701974868774414,
0.03714117780327797,
0.04646177589893341,
-0.05757136642932892,
0.02069295570254326,
0.03633615002036095,
0.007892568595707417,
0.15785840153694153,
0.01872740127146244,
-0.08458021283149719,
-0.2734312415122986,
-0.15702751278877258,
0.02567502111196518,
-0.035392627120018005,
-0.027460793033242226,
0.08252810686826706,
-0.01493997685611248,
-0.026937715709209442,
-0.035403817892074585,
-0.047187384217977524,
-0.07055126130580902,
-0.05229324474930763,
0.024576691910624504,
0.09165018796920776,
-0.06786952167749405,
-0.025079580023884773,
-0.019993949681520462,
-0.03572949767112732,
0.13069216907024384,
-0.08292599022388458,
-0.04377639666199684,
-0.12681855261325836,
-0.04318681359291077,
0.1623951643705368,
-0.1160275787115097,
0.0367182195186615,
-0.013515728525817394,
0.057220861315727234,
-0.000004547454409475904,
-0.09732712060213089,
0.06915569305419922,
-0.034769847989082336,
-0.014857122674584389,
0.028298262506723404,
0.03503824770450592,
-0.005966776050627232,
0.06826119869947433,
0.04537985846400261,
0.045710381120443344,
-0.06053074449300766,
-0.1374705731868744,
-0.030106017366051674,
0.045297544449567795,
0.11283694952726364,
0.07078830152750015,
0.014722289517521858,
-0.16213057935237885,
-0.05997903645038605,
0.0862044170498848,
0.09828127175569534,
0.3040563464164734,
-0.0794055238366127,
-0.011097569949924946,
0.09526118636131287,
-0.03846156224608421,
-0.19649431109428406,
-0.03053622879087925,
0.018097201362252235,
0.018419794738292694,
-0.019998524338006973,
-0.04552963376045227,
0.1237654983997345,
0.18548008799552917,
-0.01895032823085785,
-0.055654674768447876,
-0.30055394768714905,
-0.13921533524990082,
0.14270225167274475,
0.11497059464454651,
0.003483671462163329,
-0.14856848120689392,
-0.0604126900434494,
-0.11298725008964539,
-0.1649169772863388,
0.1342010498046875,
-0.027582606300711632,
0.06742295622825623,
-0.03522995486855507,
0.08996424078941345,
0.046957992017269135,
-0.04962603375315666,
0.24607542157173157,
-0.008207621984183788,
0.006759779527783394,
-0.04170910641551018,
-0.0880490243434906,
-0.03998352587223053,
-0.11528473347425461,
0.12850479781627655,
0.038515470921993256,
0.07323230803012848,
-0.2523840367794037,
0.01688646711409092,
-0.011717076413333416,
0.048813220113515854,
-0.0444674976170063,
-0.020258110016584396,
-0.01582014188170433,
0.03267669305205345,
-0.0011944728903472424,
-0.028329573571681976,
-0.029896125197410583,
-0.06752540916204453,
0.0952305942773819,
0.1866045743227005,
0.08881291002035141,
-0.03880872577428818,
-0.0882372334599495,
0.05178487300872803,
-0.0559660829603672,
0.04655731841921806,
-0.1745133101940155,
0.053439993411302567,
0.12719783186912537,
0.05553647503256798,
0.052190519869327545,
0.02159939892590046,
-0.03427477180957794,
-0.026569237932562828,
0.05038180202245712,
-0.10681649297475815,
0.025486892089247704,
0.024610420688986778,
0.03285173699259758,
-0.11232500523328781,
-0.07291988283395767,
0.1066347286105156,
-0.010388687252998352,
-0.02240663580596447,
0.039644304662942886,
-0.005750319920480251,
-0.0538829043507576,
0.2330417037010193,
0.02222147211432457,
0.08484670519828796,
-0.11014147102832794,
0.05175449699163437,
0.10557376593351364,
-0.1214478611946106,
-0.0027990820817649364,
0.07081212848424911,
-0.05495854839682579,
-0.060497406870126724,
-0.024474821984767914,
0.11547810584306717,
-0.1160278469324112,
-0.05036105588078499,
0.027147866785526276,
-0.037418730556964874,
0.05463389679789543,
0.1764930635690689,
0.11394906789064407,
0.014074290171265602,
-0.013370107859373093,
-0.09145570546388626,
-0.12330331653356552,
0.025134261697530746,
0.10385171324014664,
0.024757470935583115,
-0.10689006745815277,
0.1321435421705246,
-0.02640301175415516,
0.020122772082686424,
-0.0008396581397391856,
0.0011680172756314278,
-0.17145341634750366,
-0.03264099359512329,
0.014497856609523296,
0.0941946804523468,
-0.08001335710287094,
0.061348140239715576,
0.0022041848860681057,
0.016893377527594566,
-0.07203888148069382,
0.00169112638104707,
-0.06728978455066681,
-0.06559682637453079,
0.012296373955905437,
0.048431798815727234,
-0.11584807932376862,
-0.03260737657546997,
0.07308181375265121,
-0.04396989941596985,
0.03550969064235687,
0.10260963439941406,
0.05581757053732872,
-0.017390765249729156,
-0.050599802285432816,
0.00024734847829677165,
0.08228858560323715,
0.024082982912659645,
0.08460965007543564,
-0.1924763321876526,
0.05307350680232048,
-0.0073864818550646305,
0.019284216687083244,
0.06343240290880203,
0.10006897896528244,
-0.12493149936199188,
0.0014142269501462579,
-0.11826848983764648,
-0.07745476812124252,
-0.09543585777282715,
0.06294772773981094,
0.16072577238082886,
0.06075381115078926,
0.06033587455749512,
-0.04486510530114174,
0.029123716056346893,
-0.17911460995674133,
-0.016240639612078667,
-0.05903146043419838,
-0.05356215685606003,
0.028998887166380882,
-0.013566491194069386,
0.08943288028240204,
0.0013925351668149233,
0.07309664040803909,
0.0023671595845371485,
0.023888081312179565,
0.021290279924869537,
0.07959132641553879,
-0.0008481644908897579,
-0.0666002631187439,
0.16981948912143707,
0.0807168185710907,
0.017435794696211815,
0.122561976313591,
0.09832026064395905,
0.00512895779684186,
0.01080978475511074,
0.02211298607289791,
0.05194352939724922,
-0.0535430870950222,
0.06238578259944916,
0.023510141298174858,
-0.07510768622159958,
-0.046717189252376556,
0.07765408605337143,
-0.08780810236930847,
0.007979072630405426,
-0.04257224500179291,
-0.013798537664115429,
0.12362804263830185,
-0.15795043110847473,
0.03911701217293739,
-0.005968513898551464,
-0.06692727655172348,
-0.18435849249362946,
-0.07617119699716568,
-0.12111476808786392,
-0.029782945290207863,
-0.054719746112823486,
-0.09553554654121399,
0.015691200271248817,
0.17860908806324005,
0.01359581295400858,
0.04388367012143135,
0.09211233258247375,
-0.18977220356464386,
-0.00023967577726580203,
-0.04678329452872276,
-0.014794735237956047,
-0.0021589158568531275,
-0.03567399084568024,
-0.012064303271472454,
0.047277528792619705,
0.020380660891532898,
0.08779727667570114,
0.04934617504477501,
0.03692178428173065,
0.09144320338964462,
-0.042839620262384415,
-0.08620373159646988,
-0.05502272769808769,
0.03284129127860069,
0.008840331807732582,
0.18573155999183655,
0.055528674274683,
0.0035739056766033173,
-0.02736811898648739,
0.21996930241584778,
-0.08926346898078918,
0.0024125210475176573,
-0.14317278563976288,
0.23133429884910583,
-0.021616250276565552,
0.04479822516441345,
0.015869993716478348,
-0.02578112855553627,
0.023853277787566185,
0.11749479919672012,
0.07824814319610596,
-0.00577422883361578,
-0.0058703855611383915,
0.04686228185892105,
-0.00631636381149292,
-0.043417736887931824,
0.04488946124911308,
0.04511486738920212,
0.17889395356178284,
-0.08418695628643036,
0.047476280480623245,
-0.004556892905384302,
0.0012783198617398739,
-0.08169636130332947,
0.020057151094079018,
-0.008516625501215458,
-0.03200986236333847,
0.011710046790540218,
0.1055961325764656,
-0.04495421051979065,
0.0756545215845108,
0.11345207691192627,
-0.11475209891796112,
-0.13146676123142242,
0.024710815399885178,
-0.016567779704928398,
-0.05503746494650841,
0.07984744012355804,
-0.06744549423456192,
-0.018944840878248215,
0.05153203383088112,
-0.009616089053452015,
-0.15547969937324524,
-0.08841660618782043,
0.0016742540756240487,
0.17350424826145172,
0.2736837565898895,
0.02322494611144066,
0.12644898891448975,
0.1599591225385666,
0.0067936936393380165,
-0.16084901988506317,
0.08184276521205902,
0.03293989971280098,
-0.12898419797420502,
0.11368578672409058,
0.03471731022000313,
-0.050849199295043945,
0.14159102737903595,
0.06408534198999405,
-0.13158775866031647,
-0.01862412691116333,
0.03408604860305786,
0.07628683745861053,
-0.058054957538843155,
0.01499447412788868,
-0.08990427851676941,
0.11324725300073624,
0.13087786734104156,
-0.03819600120186806,
0.007326814811676741,
-0.038998302072286606,
0.07328267395496368,
-0.015021554194390774,
-0.02351488173007965,
-0.03432029113173485,
-0.10184846073389053,
0.05565459653735161,
-0.1578473001718521,
0.044668253511190414,
-0.2740030586719513,
-0.036745335906744,
0.015536020509898663,
-0.07251335680484772,
-0.03415445610880852,
0.09622133523225784,
0.013713952153921127,
-0.012956938706338406,
-0.04534595459699631,
-0.16689495742321014,
0.01422929484397173,
0.16359242796897888,
-0.1142660453915596,
-0.12278476357460022
] |
null | null | null |
# Hate Speech Detection Model
Created from dataset provided by ROHAN KHILNANI
|
{}
| null |
adrianmoses/hate-speech-detection
|
[
"has_space",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#has_space #region-us
|
# Hate Speech Detection Model
Created from dataset provided by ROHAN KHILNANI
|
[
"# Hate Speech Detection Model\n\n\nCreated from dataset provided by ROHAN KHILNANI"
] |
[
"TAGS\n#has_space #region-us \n",
"# Hate Speech Detection Model\n\n\nCreated from dataset provided by ROHAN KHILNANI"
] |
[
10,
20
] |
[
"passage: TAGS\n#has_space #region-us \n# Hate Speech Detection Model\n\n\nCreated from dataset provided by ROHAN KHILNANI"
] |
[
-0.03169788420200348,
0.04784752056002617,
-0.0012945201015099883,
-0.02890436351299286,
-0.0015943690668791533,
-0.03327567130327225,
0.18231743574142456,
0.07666599005460739,
0.16377557814121246,
-0.01466536708176136,
0.14311467111110687,
-0.03664045035839081,
-0.04038479924201965,
0.07935768365859985,
-0.04593886062502861,
-0.1410285234451294,
0.08925079554319382,
-0.02545507438480854,
0.045001477003097534,
0.11735522747039795,
0.17440710961818695,
-0.06125496327877045,
-0.00006996445881668478,
0.05948903411626816,
-0.15249322354793549,
0.1318175494670868,
0.017681943252682686,
-0.12759536504745483,
0.14774467051029205,
-0.07385125756263733,
0.09648337960243225,
-0.00407345499843359,
0.03873293846845627,
0.009958154521882534,
0.03492245823144913,
0.004287244752049446,
0.006512994412332773,
-0.0032343666534870863,
0.025823300704360008,
-0.18990513682365417,
0.24391648173332214,
0.006956996861845255,
-0.0065437802113592625,
0.07689323276281357,
-0.1355431228876114,
-0.16983133554458618,
-0.0014756473246961832,
0.05484667420387268,
0.06942258030176163,
0.09198009222745895,
-0.09361165016889572,
0.14023888111114502,
-0.0948699340224266,
0.12058471143245697,
0.05764230340719223,
-0.051801398396492004,
-0.0042702145874500275,
0.012041546404361725,
0.007710922509431839,
0.0824296623468399,
-0.08196039497852325,
0.09753797948360443,
0.053659114986658096,
-0.06531747430562973,
-0.01460630726069212,
-0.09064801037311554,
-0.21311646699905396,
0.033439453691244125,
-0.07262860238552094,
0.052977584302425385,
0.1189897358417511,
0.07541545480489731,
0.09802912175655365,
-0.04697580635547638,
-0.043981507420539856,
0.01741589419543743,
0.06564468145370483,
-0.029262619093060493,
-0.07676021009683609,
-0.0033578916918486357,
0.04634462296962738,
-0.0048239282332360744,
-0.09410656243562698,
-0.002694089198485017,
-0.08288447558879852,
0.19717949628829956,
-0.011104719713330269,
0.07034595310688019,
-0.12995003163814545,
-0.10986124724149704,
-0.07437942177057266,
-0.07838654518127441,
0.05395157262682915,
-0.07531934976577759,
-0.08126399666070938,
0.01531849056482315,
-0.08619944751262665,
-0.031089968979358673,
0.0915021076798439,
-0.10091125220060349,
0.011321772821247578,
0.010828002355992794,
-0.0346541590988636,
0.034703925251960754,
0.21042850613594055,
0.01609175093472004,
-0.0694841668009758,
0.0376489982008934,
-0.02998407743871212,
0.056906651705503464,
0.050284843891859055,
0.026436682790517807,
0.01567939482629299,
-0.050255801528692245,
-0.05307057499885559,
0.0988587737083435,
-0.036167655140161514,
0.05758871138095856,
-0.024954505264759064,
0.03143706172704697,
-0.15160289406776428,
-0.0700213685631752,
0.004688232205808163,
0.07070896774530411,
-0.04977692663669586,
-0.07110732793807983,
-0.05342152714729309,
0.03326541557908058,
0.06634903699159622,
-0.11481060832738876,
-0.05876787751913071,
0.06850139796733856,
0.0179984662681818,
-0.09784755110740662,
-0.007802674081176519,
-0.11985833197832108,
0.062127869576215744,
-0.16169309616088867,
-0.03342810273170471,
-0.055327072739601135,
-0.12355721741914749,
0.001279790187254548,
0.057813551276922226,
-0.11201266944408417,
0.08768722414970398,
0.03269614279270172,
-0.10865779221057892,
-0.12047281116247177,
-0.06453970819711685,
0.031847480684518814,
-0.022991543635725975,
0.16151517629623413,
-0.11177702993154526,
0.030394716188311577,
-0.05835583806037903,
0.105764240026474,
0.1524566411972046,
0.07676073908805847,
-0.13123910129070282,
0.10236542671918869,
-0.06795493513345718,
0.005720101296901703,
-0.1654619425535202,
0.07135289162397385,
0.0416971854865551,
0.1612354964017868,
-0.20756153762340546,
-0.007255134638398886,
0.000036672416172223166,
-0.10429807007312775,
-0.10615207999944687,
0.08527471125125885,
-0.06303602457046509,
0.06751278042793274,
0.08368208259344101,
0.4833962023258209,
-0.24708245694637299,
-0.0732291117310524,
-0.06899381428956985,
0.17439550161361694,
-0.11808638274669647,
-0.02853371575474739,
0.015035156160593033,
-0.06990867108106613,
-0.037882089614868164,
-0.07097578793764114,
0.13872189819812775,
0.10648287087678909,
-0.013035234063863754,
-0.10872998833656311,
-0.006871584802865982,
-0.08821304887533188,
0.10042347013950348,
-0.06422753632068634,
0.013012227602303028,
-0.08384295552968979,
0.03154703974723816,
-0.05734497308731079,
0.19020973145961761,
0.01291465200483799,
0.02474628947675228,
-0.11237368732690811,
0.08289927244186401,
-0.022219553589820862,
-0.06653741002082825,
-0.12867440283298492,
-0.003166773123666644,
-0.1328566074371338,
-0.04781467840075493,
0.11906925588846207,
0.09971530735492706,
0.03031388483941555,
-0.13269397616386414,
0.0326571948826313,
0.019696498289704323,
0.0979660153388977,
0.12314645200967789,
-0.06796682626008987,
-0.020277921110391617,
0.047001972794532776,
-0.041398629546165466,
0.07114602625370026,
-0.1495068520307541,
-0.05721404030919075,
0.15894901752471924,
-0.00018614136206451803,
-0.057899389415979385,
0.08831667900085449,
0.13642029464244843,
0.02001393958926201,
0.027300134301185608,
-0.06329084187746048,
0.062060076743364334,
-0.030662667006254196,
-0.08179657906293869,
0.26853856444358826,
-0.11408758908510208,
0.029585711658000946,
0.15829706192016602,
-0.14203649759292603,
0.036714714020490646,
0.1315421611070633,
0.017477145418524742,
0.05286137014627457,
0.07736964523792267,
-0.021396802738308907,
-0.07898704707622528,
-0.07548733800649643,
0.029714209958910942,
-0.01852702721953392,
0.032126810401678085,
-0.06487888097763062,
-0.009318657219409943,
-0.12633341550827026,
0.11530888080596924,
-0.016419565305113792,
-0.24769413471221924,
0.15574778616428375,
0.2988920211791992,
-0.10772710293531418,
0.11164253950119019,
-0.007974105887115002,
0.012324805371463299,
-0.03544315695762634,
-0.10250262916088104,
-0.1341683715581894,
0.13696981966495514,
-0.1633024513721466,
-0.1371299773454666,
-0.0006295175408013165,
-0.03794340044260025,
0.08657081425189972,
-0.026330336928367615,
-0.08213373273611069,
-0.014844594523310661,
0.0360584557056427,
-0.2737179398536682,
0.17120465636253357,
-0.009799571707844734,
0.12405611574649811,
-0.05316257104277611,
-0.17735953629016876,
-0.011986981146037579,
-0.06605390459299088,
-0.07017397880554199,
-0.019136473536491394,
-0.11707894504070282,
-0.1526968777179718,
0.1456315517425537,
-0.031159691512584686,
-0.06932953000068665,
0.037937719374895096,
0.01648077555000782,
-0.22786372900009155,
0.045601505786180496,
-0.06529794633388519,
-0.017158284783363342,
-0.09429217129945755,
0.09723282605409622,
0.021812651306390762,
-0.033323243260383606,
-0.008255599066615105,
-0.153527170419693,
-0.04564116150140762,
-0.16562895476818085,
0.055438775569200516,
0.03402075543999672,
-0.07798311859369278,
0.10085082799196243,
0.19711318612098694,
0.06158142909407616,
-0.010655113495886326,
-0.07534745335578918,
0.1710200309753418,
-0.10344071686267853,
-0.13591839373111725,
0.003303676610812545,
-0.08906062692403793,
-0.027221787720918655,
0.12288083881139755,
-0.028260348364710808,
-0.1076044961810112,
0.012468069791793823,
0.016377421095967293,
-0.012085982598364353,
-0.034135498106479645,
-0.12078654021024704,
-0.039794228971004486,
-0.04591185599565506,
0.04141813889145851,
0.07707799226045609,
-0.04696501046419144,
0.022477278485894203,
0.13438895344734192,
-0.1890714168548584,
0.015991313382983208,
-0.045347895473241806,
0.11314845830202103,
-0.08736550807952881,
0.049834638833999634,
-0.08858893811702728,
-0.05274052545428276,
0.05944650247693062,
-0.02316422387957573,
0.05010801926255226,
0.0204275231808424,
0.08420054614543915,
0.10391338169574738,
0.13087378442287445,
0.14143195748329163,
0.019106391817331314,
-0.0044931890442967415,
-0.011494601145386696,
0.015015828423202038,
0.00410408154129982,
-0.06580585241317749,
0.07546885311603546,
0.15914879739284515,
-0.058349233120679855,
0.0936155617237091,
-0.2134382277727127,
0.16719411313533783,
0.05276875197887421,
0.15252165496349335,
-0.14212307333946228,
0.07002250850200653,
0.0638560801744461,
0.016813455149531364,
0.0897754430770874,
0.14230220019817352,
0.09485077112913132,
0.012815638445317745,
0.05065339431166649,
0.08058017492294312,
0.021504312753677368,
-0.01756417192518711,
0.019130781292915344,
-0.22585177421569824,
-0.1999708116054535,
-0.044481415301561356,
0.03317132592201233,
-0.1639155000448227,
0.2779862582683563,
-0.015248081646859646,
-0.05862874910235405,
-0.09801262617111206,
-0.14437642693519592,
0.08006341010332108,
0.008444076403975487,
0.1536611020565033,
0.04261014237999916,
-0.17066457867622375,
-0.18700073659420013,
-0.03506728261709213,
0.06867051869630814,
0.0972963348031044,
0.04067728668451309,
-0.08782035112380981,
0.03701113909482956,
0.03088349848985672,
0.06331847608089447,
-0.06739190965890884,
-0.15823717415332794,
-0.06927569955587387,
-0.05544167384505272,
0.21972861886024475,
-0.0875236913561821,
0.03290289640426636,
-0.07814984768629074,
-0.13202935457229614,
-0.0281345397233963,
0.10478687286376953,
-0.05264393985271454,
-0.13848508894443512,
0.09880799800157547,
0.1066974401473999,
-0.03908558189868927,
-0.014981752261519432,
0.01500468049198389,
-0.0530424602329731,
-0.0663108304142952,
-0.06551136076450348,
0.13193745911121368,
-0.036693498492240906,
0.09772849828004837,
-0.016313455998897552,
0.1682208776473999,
0.10354345291852951,
0.038685206323862076,
0.07115631550550461,
0.08381471782922745,
-0.04629810154438019,
-0.10872773081064224,
0.04525086283683777,
-0.05072501674294472,
-0.15636321902275085,
0.10290543735027313,
0.10363787412643433,
-0.16910365223884583,
-0.03640170022845268,
0.050222013145685196,
0.22189392149448395,
0.16704082489013672,
-0.00790855847299099,
0.10969886928796768,
0.17561087012290955,
-0.03058573789894581,
-0.2496698647737503,
-0.08370256423950195,
-0.09339418262243271,
0.05954158306121826,
0.16176599264144897,
-0.014389504678547382,
-0.03884328156709671,
0.003921416122466326,
-0.054504573345184326,
0.0586048886179924,
-0.25502505898475647,
-0.06542312353849411,
0.31850913166999817,
-0.030253784731030464,
0.47750499844551086,
-0.02746039628982544,
-0.028557194396853447,
-0.05645252391695976,
0.13260509073734283,
0.1367723047733307,
-0.09346120059490204,
0.019039705395698547,
0.03857430815696716,
0.042526934295892715,
0.0757332444190979,
0.04843529313802719,
0.19301702082157135,
0.09901522845029831,
0.10619012266397476,
-0.14130127429962158,
-0.2860015332698822,
0.10559350997209549,
-0.019056882709264755,
0.00342083559371531,
0.0038301029708236456,
0.03645328804850578,
-0.19682219624519348,
-0.038308873772621155,
-0.13232412934303284,
0.10243222117424011,
0.07894115149974823,
0.005703840404748917,
-0.04128535836935043,
-0.03319662809371948,
-0.007687754463404417,
-0.000499081623274833,
0.15871703624725342,
-0.14349709451198578,
0.07847218215465546,
0.014676149934530258,
0.0182183850556612,
0.107014000415802,
0.07132449001073837,
0.0877658799290657,
-0.03335781767964363,
0.060860998928546906,
-0.1311662644147873,
-0.029052704572677612,
0.005305225495249033,
0.04051195830106735,
0.024086352437734604,
0.029444118961691856,
-0.01733465865254402,
0.1880212426185608,
0.12692862749099731,
-0.035072315484285355,
-0.10244075953960419,
-0.003272037021815777,
-0.2219376415014267,
0.04426475241780281,
-0.010902134701609612,
0.19812928140163422,
0.06570453196763992,
-0.019210590049624443,
-0.0352565161883831,
0.02789868600666523,
-0.1350400596857071,
0.009622961282730103,
0.1681392788887024,
-0.004875051788985729,
-0.07334870100021362,
0.11428402364253998,
0.02820172905921936,
0.0068629346787929535,
0.061003249138593674,
0.07834684103727341,
-0.12801913917064667,
-0.11872893571853638,
-0.32007288932800293,
0.016914423555135727,
0.006671752780675888,
-0.04717690870165825,
0.08833067119121552,
-0.056332267820835114,
-0.09390047192573547,
0.11228971928358078,
0.06289592385292053,
0.1461767554283142,
-0.037745151668787,
-0.0524839349091053,
0.1021457388997078,
-0.013572068884968758,
0.002269374206662178,
-0.07953333854675293,
-0.02069469355046749,
-0.05043184757232666,
-0.00036548281786963344,
0.2086542844772339,
-0.07767059653997421,
-0.11605862528085709,
-0.1067655086517334,
0.056687306612730026,
-0.1830943375825882,
-0.07043980807065964,
-0.10460112988948822,
0.03308573365211487,
0.015188299119472504,
-0.06287940591573715,
-0.04762204736471176,
0.03548693284392357,
-0.14650645852088928,
0.07970471680164337,
0.02220860868692398,
0.08006619662046432,
-0.1658163070678711,
-0.03571216017007828,
0.0716002881526947,
0.06847694516181946,
0.17871399223804474,
0.2160470336675644,
-0.13936665654182434,
0.09748771041631699,
-0.1349744349718094,
-0.1581682711839676,
0.11410900950431824,
-0.04298808053135872,
0.030365455895662308,
0.0442931242287159,
-0.026101157069206238,
0.07533256709575653,
0.11257745325565338,
0.07771763205528259,
0.055139098316431046,
0.012029625475406647,
0.04429761692881584,
0.01747344434261322,
-0.08527257293462753,
0.0805220752954483,
-0.10105141997337341,
0.11642055213451385,
0.10850276052951813,
0.06866292655467987,
0.019966712221503258,
-0.043177809566259384,
0.00587314460426569,
0.047461070120334625,
-0.050804175436496735,
-0.060454703867435455,
-0.08828228712081909,
-0.09726434201002121,
0.0707816556096077,
-0.08009937405586243,
0.2650100886821747,
0.007359864190220833,
-0.1874006986618042,
0.08399458974599838,
0.09463413804769516,
-0.13027527928352356,
0.021036382764577866,
0.2235059142112732,
0.009495233185589314,
-0.03917985409498215,
0.0387420579791069,
-0.04941481351852417,
0.019246742129325867,
0.03233102709054947,
-0.10992010682821274,
0.08729621022939682,
0.1994115263223648,
0.09523262828588486,
0.038043733686208725,
-0.050698742270469666,
0.1061219647526741,
-0.04534846916794777,
-0.16460838913917542,
0.00007531265873694792,
-0.08944255113601685,
-0.018738718703389168,
0.13394387066364288,
0.07719486206769943,
-0.05507544428110123,
-0.08552878350019455,
-0.059922702610492706,
-0.1343960464000702,
-0.24389541149139404,
-0.044006552547216415,
-0.060907769948244095,
0.04594732075929642,
0.011922257021069527,
0.009320159442722797,
0.18537919223308563,
0.12021328508853912,
0.00025486081722192466,
0.13314971327781677,
-0.028305985033512115,
-0.10113860666751862,
0.09133937954902649,
-0.06977631151676178,
0.04181131720542908,
-0.05705470219254494,
-0.0403536856174469,
0.030661869794130325,
-0.02477944642305374,
-0.0117909861728549,
-0.040531646460294724,
-0.0432734340429306,
-0.00251952582038939,
-0.23038995265960693,
-0.04237136244773865,
-0.05697556212544441,
0.13559654355049133,
-0.024020453914999962,
0.14731840789318085,
0.15524756908416748,
-0.0017906896537169814,
-0.052805814892053604,
0.1657821387052536,
0.0311441533267498,
-0.011693012900650501,
-0.03173112869262695,
0.06954939663410187,
-0.057845644652843475,
0.03305001184344292,
-0.1399158537387848,
-0.06939806789159775,
-0.05331173911690712,
0.04977453872561455,
0.3197496831417084,
-0.2553122639656067,
0.07635600119829178,
-0.20606277883052826,
0.06197882443666458,
-0.04644216597080231,
-0.01664012111723423,
0.029281100258231163,
0.06126336380839348,
-0.0615830197930336,
0.056211505085229874,
0.059279609471559525,
-0.04045657441020012,
0.02335590310394764,
0.046786580234766006,
-0.02238738164305687,
-0.04033316671848297,
-0.0638778954744339,
0.19652020931243896,
-0.05650302767753601,
-0.02214210480451584,
-0.08642091602087021,
-0.10828506201505661,
-0.13929937779903412,
0.12267573922872543,
-0.08065365254878998,
0.15274690091609955,
0.1057567447423935,
-0.08498145639896393,
-0.09289606660604477,
-0.0541590191423893,
0.003674233565106988,
-0.17164909839630127,
-0.1703069508075714,
0.13951709866523743,
0.014679240994155407,
0.030613355338573456,
0.011226351372897625,
0.10901632905006409,
0.020295489579439163,
-0.004074510652571917,
-0.05366699397563934,
0.1624336689710617,
0.06953386217355728,
0.049438804388046265,
-0.07195544242858887,
0.045372720807790756,
0.014836257323622704,
-0.042158182710409164,
0.0760856345295906,
0.015028897672891617,
0.06983303278684616,
-0.04313573241233826,
0.0034276721999049187,
-0.09727616608142853,
-0.04211016744375229,
-0.06423137336969376,
0.09519380331039429,
-0.03077734261751175,
-0.01330320630222559,
0.016389917582273483,
0.0685027539730072,
-0.0742306038737297,
-0.10398583859205246,
-0.2000051885843277,
-0.06890565156936646,
-0.08369346708059311,
-0.03806545212864876,
-0.012762627564370632,
0.006727833766490221,
0.08431155979633331,
0.055892378091812134,
-0.1149645447731018,
0.053766001015901566,
0.1331244707107544,
0.02331600897014141,
-0.022346118465065956,
-0.005894879344850779,
0.006579760927706957,
-0.192407488822937,
0.11009947210550308,
0.00812192726880312,
-0.07619215548038483,
-0.11152153462171555
] |
null | null |
transformers
|
# Rick DialoGPT medium model
|
{"tags": ["conversational"]}
|
text-generation
|
adviksinghania/DialoGPT-medium-rick
|
[
"transformers",
"pytorch",
"gpt2",
"text-generation",
"conversational",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
# Rick DialoGPT medium model
|
[
"# Rick DialoGPT medium model"
] |
[
"TAGS\n#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n",
"# Rick DialoGPT medium model"
] |
[
51,
8
] |
[
"passage: TAGS\n#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n# Rick DialoGPT medium model"
] |
[
-0.030108509585261345,
0.06065034866333008,
-0.005259789060801268,
0.008527795784175396,
0.1336814910173416,
-0.0021733685862272978,
0.163829043507576,
0.12824058532714844,
-0.042411260306835175,
-0.056957826018333435,
0.12328558415174484,
0.18358606100082397,
-0.0030247000977396965,
0.07091708481311798,
-0.07113973051309586,
-0.33736589550971985,
0.02943635731935501,
0.06676463037729263,
-0.033269692212343216,
0.12475505471229553,
0.1128426343202591,
-0.04980234429240227,
0.06940905749797821,
0.0008216280257329345,
-0.15312887728214264,
0.009332144632935524,
0.013355279341340065,
-0.11108467727899551,
0.10073218494653702,
0.0783681720495224,
0.0201712679117918,
0.049436602741479874,
-0.04736914858222008,
-0.12109021842479706,
0.04711078852415085,
-0.0036375962663441896,
-0.04007761552929878,
0.05084898695349693,
0.015311237424612045,
-0.07366892695426941,
0.12345433980226517,
0.10502316057682037,
-0.013338536955416203,
0.03469241037964821,
-0.14274005591869354,
-0.007781805004924536,
-0.00888855941593647,
0.07385743409395218,
0.05625146999955177,
0.10551505535840988,
-0.03523865342140198,
0.11648072302341461,
-0.053043145686388016,
0.12087761610746384,
0.09570316970348358,
-0.29387983679771423,
-0.02973291464149952,
0.15075331926345825,
0.0522071048617363,
0.09576338529586792,
-0.04249218851327896,
0.098233163356781,
0.008808857761323452,
0.004133500624448061,
-0.049379587173461914,
-0.07749665528535843,
-0.06938314437866211,
0.019887492060661316,
-0.08179502934217453,
-0.004374299664050341,
0.2532006800174713,
-0.04299174249172211,
0.07641617953777313,
-0.08926311135292053,
-0.08482807874679565,
-0.010943999513983727,
-0.036567527800798416,
-0.025050075724720955,
-0.0956391990184784,
0.0781356692314148,
-0.05313217267394066,
-0.10084706544876099,
-0.11804084479808807,
-0.028111806139349937,
-0.17747145891189575,
0.18209339678287506,
0.020536411553621292,
0.04513673111796379,
-0.2202424556016922,
0.10159838199615479,
0.0054252054542303085,
-0.10397166013717651,
0.027748633176088333,
-0.09181661158800125,
0.020814672112464905,
0.01880842261016369,
-0.021570606157183647,
-0.034297775477170944,
0.08196935802698135,
0.08953101187944412,
-0.004459093324840069,
0.017778316512703896,
-0.01799529232084751,
0.047888293862342834,
0.03685770183801651,
0.05781487748026848,
-0.03214378282427788,
-0.03875236585736275,
0.025040850043296814,
-0.08706746995449066,
-0.003747349139302969,
-0.05952418968081474,
-0.1978987455368042,
-0.004382370971143246,
0.0319560170173645,
0.052772246301174164,
0.0365387462079525,
0.12516143918037415,
0.011257830075919628,
-0.057363852858543396,
0.049525123089551926,
-0.017455248162150383,
-0.016467265784740448,
0.0065848506055772305,
0.011569916270673275,
0.18436110019683838,
0.000737521389964968,
0.04325779527425766,
-0.10472387075424194,
0.0010771563975140452,
-0.033391863107681274,
-0.0126687902957201,
-0.03952433168888092,
-0.059806209057569504,
-0.0018738035578280687,
-0.03278432413935661,
0.009501866064965725,
-0.14174824953079224,
-0.16749998927116394,
-0.010650787502527237,
0.0004531523445621133,
-0.04658874496817589,
-0.11581672728061676,
-0.09344211220741272,
-0.03344530239701271,
0.059416476637125015,
-0.06704486161470413,
0.01703765243291855,
-0.04566902294754982,
0.09202323108911514,
-0.022711805999279022,
0.08334498852491379,
-0.10863763093948364,
0.07816765457391739,
-0.04576357826590538,
-0.04422818869352341,
-0.08472024649381638,
0.1375393271446228,
0.010705159977078438,
0.05358738452196121,
-0.024912694469094276,
-0.02684292383491993,
-0.0971853956580162,
0.07806906849145889,
-0.03625877946615219,
0.22947019338607788,
-0.11623045057058334,
-0.10234414786100388,
0.26474088430404663,
-0.05473845452070236,
-0.1274268478155136,
0.11561214923858643,
-0.00883884821087122,
0.09435506910085678,
0.13281697034835815,
0.14898927509784698,
0.07640501111745834,
0.005352917592972517,
0.06877440214157104,
0.11941298097372055,
-0.06782404333353043,
-0.021276425570249557,
0.02578694373369217,
-0.000686128216329962,
-0.07181011140346527,
0.02707803249359131,
0.09094857424497604,
0.06053534150123596,
-0.06567483395338058,
-0.00903027132153511,
0.00692564994096756,
0.007114144507795572,
0.06570122390985489,
-0.029374487698078156,
0.12860074639320374,
-0.03467599302530289,
-0.07143542170524597,
-0.002351414179429412,
0.021975882351398468,
-0.05161396414041519,
0.03776786848902702,
-0.08530494570732117,
0.023708321154117584,
-0.03853907808661461,
0.07048255950212479,
-0.16497279703617096,
-0.07075457274913788,
-0.0621982142329216,
0.2200637012720108,
0.0734793022274971,
0.13894625008106232,
0.05837884917855263,
-0.07414118945598602,
-0.0143052963539958,
0.02210118994116783,
0.19712390005588531,
-0.020267458632588387,
-0.0812511220574379,
-0.09816620498895645,
0.11278251558542252,
-0.07654727250337601,
0.03919602930545807,
-0.033846452832221985,
0.01753276400268078,
0.041781142354011536,
0.10336349159479141,
-0.030186744406819344,
0.045465707778930664,
0.01714135706424713,
-0.018719470128417015,
-0.05508332699537277,
-0.00010770251537906006,
0.09347251057624817,
0.004490066785365343,
-0.12107644230127335,
0.24701400101184845,
-0.19280430674552917,
0.14738821983337402,
0.1776942014694214,
-0.1980600357055664,
0.01990329660475254,
-0.12709404528141022,
-0.018153294920921326,
0.01658797450363636,
0.040264129638671875,
-0.03238973021507263,
0.2431326061487198,
-0.010603475384414196,
0.16558338701725006,
-0.03568289428949356,
-0.041638534516096115,
-0.041512683033943176,
-0.04213190823793411,
0.0071243904531002045,
0.11732260137796402,
0.06684807687997818,
-0.1671323925256729,
0.16962188482284546,
0.06610694527626038,
0.04087474197149277,
0.19028164446353912,
0.020198095589876175,
0.02458830364048481,
0.06934603303670883,
-0.013402100652456284,
-0.036450665444135666,
-0.07713963836431503,
-0.2028704434633255,
-0.021217595785856247,
0.07548439502716064,
0.0513993464410305,
0.10314768552780151,
-0.09076934307813644,
-0.023769451305270195,
-0.007982311770319939,
-0.026648206636309624,
0.04305165261030197,
0.14582940936088562,
0.016525469720363617,
0.12467828392982483,
-0.021489586681127548,
-0.03901667520403862,
0.07015158981084824,
0.007861671969294548,
-0.08122413605451584,
0.18606330454349518,
-0.12302789092063904,
-0.3513762950897217,
-0.10688088089227676,
-0.1735687404870987,
-0.0639437735080719,
0.03569804131984711,
0.11764661967754364,
-0.14850255846977234,
-0.01719825342297554,
-0.0010714847594499588,
0.052601248025894165,
-0.11082060635089874,
0.019718047231435776,
-0.020552000030875206,
-0.022561345249414444,
-0.12016726285219193,
-0.10547373443841934,
-0.04999278485774994,
-0.0405457466840744,
-0.07168653607368469,
0.13222607970237732,
-0.14599040150642395,
0.020158661529421806,
0.22905713319778442,
0.06337728351354599,
0.07620251923799515,
-0.0366392619907856,
0.17647992074489594,
-0.1026945412158966,
0.013569137081503868,
0.23834800720214844,
-0.01489273738116026,
0.06662211567163467,
0.11898602545261383,
-0.0012757411459460855,
-0.05396849289536476,
0.03252309560775757,
-0.014763614162802696,
-0.07679944485425949,
-0.2055961638689041,
-0.12382959574460983,
-0.11705604195594788,
0.02967226132750511,
0.025555679574608803,
0.046266090124845505,
0.12127191573381424,
0.0596437007188797,
-0.04947074502706528,
0.011311385780572891,
0.0727485939860344,
0.0790214091539383,
0.2635349631309509,
-0.06573835760354996,
0.14563626050949097,
-0.031658828258514404,
-0.16581344604492188,
0.07831116765737534,
0.07759605348110199,
0.09366112947463989,
0.05508142337203026,
0.09127950668334961,
0.0233793742954731,
-0.03742269054055214,
0.1387578845024109,
0.06597273051738739,
0.0022417763248085976,
-0.0349234975874424,
-0.03969224914908409,
-0.041305530816316605,
-0.00035696179838851094,
0.01973014324903488,
0.040796417742967606,
-0.15876591205596924,
-0.016874922439455986,
0.017665736377239227,
0.06876784563064575,
0.022855868563055992,
0.0648360326886177,
-0.1843261569738388,
-0.01538798026740551,
0.06878877431154251,
-0.01499428041279316,
-0.11539340764284134,
0.07037510722875595,
0.030741024762392044,
-0.1003103256225586,
0.05691693350672722,
-0.027289431542158127,
0.1298191249370575,
-0.07644255459308624,
0.07058286666870117,
-0.10904901474714279,
-0.03639378026127815,
-0.012567714788019657,
0.10843267291784286,
-0.3100464344024658,
0.19361905753612518,
-0.00521688349545002,
-0.0436495766043663,
-0.09878964722156525,
-0.021899942308664322,
0.02840983122587204,
0.11710404604673386,
0.08079318702220917,
-0.016204342246055603,
-0.005827185697853565,
0.06693346798419952,
-0.07573762536048889,
0.04471955448389053,
0.09839942306280136,
-0.0366366021335125,
-0.014910745434463024,
-0.038290493190288544,
-0.004157486837357283,
0.0035021428484469652,
-0.10253123939037323,
0.010210976004600525,
-0.20345450937747955,
0.08307453989982605,
0.10270636528730392,
0.06676667183637619,
0.02742910385131836,
-0.0362318716943264,
-0.0667925775051117,
0.25237059593200684,
-0.002815698739141226,
-0.10668328404426575,
-0.1127677857875824,
0.008353691548109055,
0.05212542042136192,
-0.0717119425535202,
-0.01685418374836445,
-0.06953192502260208,
0.05163649842143059,
-0.06295778602361679,
-0.18401509523391724,
0.11052460223436356,
-0.1017158180475235,
-0.03602100536227226,
-0.03074205107986927,
0.20655128359794617,
-0.0229275431483984,
0.017450004816055298,
0.03533914312720299,
-0.017203107476234436,
-0.12376031279563904,
-0.10940014570951462,
-0.00910424068570137,
0.03399983420968056,
0.016542529687285423,
0.019793517887592316,
-0.019327858462929726,
-0.020633673295378685,
-0.04630846902728081,
-0.02369004674255848,
0.30900076031684875,
0.10507164150476456,
-0.04272105172276497,
0.15459398925304413,
0.10709783434867859,
-0.0600711815059185,
-0.28408196568489075,
-0.10407370328903198,
-0.06304783374071121,
-0.04829716309905052,
-0.10490930825471878,
-0.16487200558185577,
0.08876894414424896,
-0.06274804472923279,
-0.01031825877726078,
0.08690955489873886,
-0.25888800621032715,
-0.10046343505382538,
0.1738613098859787,
-0.03974304348230362,
0.42761602997779846,
-0.11511750519275665,
-0.07688063383102417,
-0.059557363390922546,
-0.14054203033447266,
0.17619290947914124,
-0.02313515916466713,
0.11123814433813095,
-0.013581633567810059,
0.19726161658763885,
0.04864867776632309,
-0.005463660694658756,
0.06398458033800125,
0.006872424855828285,
-0.05601346865296364,
-0.08738472312688828,
-0.07413862645626068,
-0.034433357417583466,
0.007769261486828327,
0.0252731591463089,
-0.10570403188467026,
0.041862260550260544,
-0.13225990533828735,
-0.05645161494612694,
-0.08047203719615936,
0.024553639814257622,
0.03202836960554123,
-0.05877682566642761,
0.0002908836759161204,
-0.05456312745809555,
-0.002050328766927123,
0.006600777618587017,
0.17500053346157074,
-0.12114488333463669,
0.14465242624282837,
0.039828427135944366,
0.16337530314922333,
-0.14578844606876373,
-0.03921099752187729,
-0.06765305250883102,
-0.04863235726952553,
0.07555956393480301,
-0.11757135391235352,
0.031244266778230667,
0.1095954030752182,
-0.03544945642352104,
0.08565875142812729,
0.1115889921784401,
-0.001894454937428236,
0.014071037992835045,
0.09120365977287292,
-0.22289280593395233,
-0.05497269332408905,
-0.08703796565532684,
0.09946760535240173,
0.048588965088129044,
0.10373834520578384,
0.21682271361351013,
0.010611442849040031,
-0.02622486650943756,
0.01778106391429901,
0.027375850826501846,
-0.0218130461871624,
0.07251810282468796,
0.00473052216693759,
0.035231225192546844,
-0.14525587856769562,
0.034637000411748886,
-0.014676899649202824,
-0.09331050515174866,
0.011406434699892998,
0.152308851480484,
-0.10709209740161896,
-0.12171599268913269,
-0.022759489715099335,
0.11030919849872589,
-0.1377595216035843,
-0.010425125248730183,
-0.04003053903579712,
-0.13672387599945068,
0.0700923427939415,
0.1055789515376091,
0.04309255629777908,
0.030645595863461494,
-0.09915973991155624,
-0.026001200079917908,
-0.036581143736839294,
0.009770985692739487,
0.030879279598593712,
-0.014365757815539837,
-0.05584661662578583,
0.059581656008958817,
-0.03637615218758583,
0.11410988867282867,
-0.08809465169906616,
-0.08823243528604507,
-0.15972834825515747,
0.03802056983113289,
-0.08022638410329819,
-0.08315174281597137,
-0.08056850731372833,
-0.03185850754380226,
0.0050226422026753426,
-0.03440695255994797,
-0.030070628970861435,
-0.02861727587878704,
-0.1024135947227478,
0.033035796135663986,
-0.045588500797748566,
-0.004198241978883743,
-0.06430172175168991,
0.02816985547542572,
0.0367879793047905,
-0.02466888166964054,
0.15243305265903473,
0.12658801674842834,
-0.11640331894159317,
0.07903425395488739,
-0.1665632724761963,
-0.07690224796533585,
0.10212711989879608,
0.029202355071902275,
0.0459396056830883,
0.06938072293996811,
0.018626824021339417,
0.0687672421336174,
0.07072538137435913,
0.04576364532113075,
0.004370557144284248,
-0.06313806027173996,
0.07907629758119583,
-0.08377579599618912,
-0.09219220280647278,
-0.05389650911092758,
0.005243865307420492,
0.033946868032217026,
0.08192242681980133,
0.09765111654996872,
-0.07111319154500961,
0.08602467179298401,
-0.05428827553987503,
0.04540250077843666,
0.02141127549111843,
-0.17503416538238525,
0.023340286687016487,
-0.08568137884140015,
0.04923897609114647,
0.0075888680294156075,
0.1879444122314453,
0.02753574587404728,
-0.04721079766750336,
0.02602860890328884,
0.06938059628009796,
0.02840481512248516,
-0.023796463385224342,
0.17750796675682068,
0.10205624997615814,
-0.031646307557821274,
-0.0816657766699791,
0.09124363213777542,
0.04638834670186043,
0.04327203333377838,
0.14291872084140778,
-0.04679443687200546,
-0.04527478665113449,
0.07925528287887573,
0.0020430702716112137,
0.021543651819229126,
-0.11144746840000153,
-0.15182720124721527,
-0.03696247190237045,
0.04232430085539818,
-0.0484943687915802,
0.1132049560546875,
0.15777255594730377,
-0.007877958007156849,
0.010392533615231514,
-0.014889244921505451,
-0.05139007419347763,
-0.19251251220703125,
-0.19251778721809387,
-0.08586124330759048,
-0.14589010179042816,
0.0008929565665312111,
-0.13366511464118958,
0.051847297698259354,
0.012139556929469109,
0.10546685755252838,
-0.04102649539709091,
0.04386810213327408,
0.04255952686071396,
-0.09925692528486252,
0.06879237294197083,
-0.0401858314871788,
0.0984947606921196,
-0.021494615823030472,
0.02535908855497837,
-0.05310136824846268,
0.029580621048808098,
0.01332192774862051,
0.040778063237667084,
-0.03706562891602516,
0.00833358895033598,
-0.11671995371580124,
-0.0818723514676094,
-0.06895536929368973,
0.06092004477977753,
0.031008170917630196,
0.18165671825408936,
0.02054525725543499,
-0.04284707456827164,
0.023010844364762306,
0.22627297043800354,
-0.07598285377025604,
-0.12120236456394196,
-0.060619983822107315,
0.18374325335025787,
-0.0184725783765316,
0.08237538486719131,
-0.047568000853061676,
0.005017929710447788,
-0.09000765532255173,
0.329784631729126,
0.30650514364242554,
-0.11026211082935333,
0.01250328030437231,
-0.008083890192210674,
0.03926708549261093,
0.12697643041610718,
0.0936170145869255,
0.11504438519477844,
0.31303107738494873,
-0.05801720172166824,
-0.03535287454724312,
-0.01323561742901802,
-0.017891496419906616,
-0.053613174706697464,
0.04281734302639961,
0.044483426958322525,
-0.06914927065372467,
-0.010515972971916199,
0.11914818733930588,
-0.2356942743062973,
0.07649324834346771,
-0.14444111287593842,
-0.16713377833366394,
-0.06138356029987335,
-0.0024000604171305895,
0.09698992222547531,
0.010968806222081184,
0.07937484234571457,
-0.013553940691053867,
-0.06262131035327911,
0.02487132139503956,
0.02089245803654194,
-0.19772084057331085,
0.013985641300678253,
0.08094234764575958,
-0.03812834620475769,
-0.06651341170072556,
-0.021226560696959496,
0.07495469599962234,
0.09216435998678207,
0.02902383543550968,
-0.019669706001877785,
0.04876154288649559,
-0.024474147707223892,
-0.09715831279754639,
0.044206999242305756,
0.02084456756711006,
0.00659699272364378,
-0.08183370530605316,
0.07584983855485916,
-0.1433980017900467,
0.03040739893913269,
0.005186491180211306,
-0.042245909571647644,
-0.025958534330129623,
0.028409715741872787,
-0.06554403901100159,
0.08284134417772293,
0.08188496530056,
-0.01943303272128105,
-0.014262179844081402,
-0.015752209350466728,
-0.00769417267292738,
-0.024568572640419006,
-0.05636901035904884,
-0.0860723927617073,
-0.1641896367073059,
-0.12626388669013977,
0.08419348299503326,
0.007222841493785381,
-0.2119470089673996,
0.02287725731730461,
-0.13305528461933136,
0.04862068220973015,
-0.12084878981113434,
0.09286662191152573,
0.09174729883670807,
0.01656573824584484,
-0.003460422856733203,
0.0053430236876010895,
0.03035377711057663,
0.07401631027460098,
-0.14823199808597565,
-0.06748411059379578
] |
null | null |
transformers
|
distilbert-base-uncased finetuned on the conll2003 dataset for NER.
|
{}
|
token-classification
|
adzcodez/TokenClassificationTest
|
[
"transformers",
"pytorch",
"distilbert",
"token-classification",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #distilbert #token-classification #autotrain_compatible #endpoints_compatible #region-us
|
distilbert-base-uncased finetuned on the conll2003 dataset for NER.
|
[] |
[
"TAGS\n#transformers #pytorch #distilbert #token-classification #autotrain_compatible #endpoints_compatible #region-us \n"
] |
[
39
] |
[
"passage: TAGS\n#transformers #pytorch #distilbert #token-classification #autotrain_compatible #endpoints_compatible #region-us \n"
] |
[
-0.052244774997234344,
0.06545011699199677,
-0.008639469742774963,
0.03241344913840294,
0.175429105758667,
0.02250700816512108,
0.0550207644701004,
0.09252465516328812,
0.03871307149529457,
-0.022530805319547653,
0.10618634521961212,
0.273651659488678,
-0.03965577855706215,
0.12325426936149597,
-0.1075274646282196,
-0.2894398868083954,
0.07356814295053482,
0.08766255527734756,
-0.020704636350274086,
0.10766047239303589,
0.09265963733196259,
-0.09288309514522552,
0.06689994037151337,
-0.027159038931131363,
-0.1595311313867569,
0.03207682818174362,
0.044056858867406845,
-0.12647634744644165,
0.09314973652362823,
0.022749418392777443,
0.19855894148349762,
0.040836676955223083,
-0.041019901633262634,
-0.1166224479675293,
0.023606499657034874,
0.01547040231525898,
-0.05521877855062485,
0.05600808560848236,
0.07643786072731018,
-0.09580914676189423,
-0.0219888798892498,
0.05825555697083473,
0.0448046512901783,
0.0391489677131176,
-0.11968714743852615,
-0.1352660208940506,
-0.01816619001328945,
0.03580756485462189,
0.07805011421442032,
0.041045643389225006,
0.028921404853463173,
0.17849011719226837,
-0.15044258534908295,
0.11443862318992615,
0.09517070651054382,
-0.26679694652557373,
-0.0031444041524082422,
0.1292523294687271,
0.0046164062805473804,
-0.018204741179943085,
-0.03606192395091057,
0.009302235208451748,
0.028193140402436256,
0.017363600432872772,
0.02679636888206005,
-0.08230440318584442,
-0.10839861631393433,
0.016260676085948944,
-0.09004238247871399,
-0.01903209649026394,
0.19663673639297485,
-0.04806896671652794,
0.046537913382053375,
0.00038961198879405856,
-0.10363880544900894,
-0.05469834432005882,
-0.024919254705309868,
-0.0019270313205197453,
-0.03369230031967163,
0.05059009790420532,
0.03156014531850815,
0.05163497105240822,
-0.0963120386004448,
0.025368325412273407,
-0.22909191250801086,
0.2339733988046646,
0.024133233353495598,
0.05665011703968048,
-0.16111432015895844,
0.062053192406892776,
0.02385959029197693,
-0.09018426388502121,
0.03903092071413994,
-0.10355667769908905,
-0.0030960580334067345,
-0.05790361762046814,
-0.03588084131479263,
0.008356519974768162,
0.0702262669801712,
0.18158429861068726,
0.05200057104229927,
0.05614180862903595,
0.002612014999613166,
0.07546422630548477,
0.041677769273519516,
0.11805400997400284,
0.010296301916241646,
-0.02163318358361721,
0.04629233106970787,
-0.13648034632205963,
-0.016466468572616577,
-0.061642713844776154,
-0.13327725231647491,
-0.025872502475976944,
0.08399870991706848,
0.08356815576553345,
0.01050769817084074,
0.07225167751312256,
-0.08453252911567688,
-0.052529312670230865,
0.11033248156309128,
-0.08564355969429016,
0.0288478322327137,
0.005862457677721977,
0.017038550227880478,
0.1171407401561737,
-0.03451232984662056,
0.007622796576470137,
-0.042828403413295746,
0.1595960259437561,
-0.06474862992763519,
-0.001240234007127583,
-0.0363583080470562,
-0.08434842526912689,
0.027693843469023705,
-0.156326562166214,
0.04852237179875374,
-0.17286783456802368,
-0.09806546568870544,
0.016751017421483994,
0.03526950627565384,
0.008116010576486588,
-0.026394419372081757,
-0.004372334573417902,
-0.0012534069828689098,
0.0153600312769413,
-0.06155451014637947,
-0.08574757725000381,
-0.06172619387507439,
0.08195546269416809,
-0.02693997509777546,
0.060245636850595474,
-0.11069967597723007,
0.06814084202051163,
-0.11584824323654175,
0.019731715321540833,
-0.128435418009758,
0.008492004126310349,
-0.0782536193728447,
0.1824658364057541,
-0.020359128713607788,
-0.07519999146461487,
-0.04163529351353645,
0.019199026748538017,
-0.053465165197849274,
0.11114098131656647,
-0.0921519547700882,
-0.1160159632563591,
0.1528247743844986,
-0.1134885773062706,
-0.10407502949237823,
0.0746447890996933,
-0.0088157644495368,
0.0004682547878473997,
0.06841260194778442,
0.14110232889652252,
0.13900595903396606,
-0.003963976167142391,
0.06762129813432693,
0.11217378824949265,
-0.13545498251914978,
-0.12923963367938995,
-0.0014080682303756475,
0.006586302071809769,
-0.11830677837133408,
0.059759289026260376,
0.062961645424366,
0.07556118816137314,
-0.06437582522630692,
-0.04070505499839783,
-0.01498289406299591,
-0.008558175526559353,
0.12396085262298584,
0.07168649137020111,
0.1095287874341011,
-0.059755146503448486,
0.02271360345184803,
0.04508836567401886,
0.015582275576889515,
0.025532227009534836,
0.014047916978597641,
-0.0934840589761734,
0.12625056505203247,
-0.02338298223912716,
-0.0005821973900310695,
-0.21271337568759918,
-0.09660927206277847,
0.015881618484854698,
0.08325602114200592,
-0.03829143941402435,
0.12962576746940613,
0.06025642156600952,
-0.041243039071559906,
0.011415323242545128,
-0.040019191801548004,
0.18258816003799438,
0.04388026148080826,
-0.06138233840465546,
-0.06826400011777878,
0.025650721043348312,
-0.076991967856884,
-0.01811777986586094,
-0.035301778465509415,
0.009278667159378529,
0.09442979097366333,
0.1519499272108078,
0.019947828724980354,
0.06776885688304901,
-0.03504212200641632,
0.07556719332933426,
-0.08044109493494034,
0.002912420779466629,
0.10565771907567978,
-0.00989543180912733,
-0.053044769912958145,
0.11022631078958511,
-0.12713485956192017,
0.3272199332714081,
0.181272491812706,
-0.29385820031166077,
-0.003046005265787244,
-0.03641106188297272,
-0.018806355074048042,
0.00916427280753851,
0.05233078822493553,
0.034322407096624374,
0.03929765149950981,
0.0028419592417776585,
0.16641035676002502,
-0.011225296184420586,
-0.03822474181652069,
0.006519339978694916,
-0.06200917810201645,
-0.04478897899389267,
0.06541416794061661,
0.08616290241479874,
-0.19261644780635834,
0.18459641933441162,
0.2121824026107788,
-0.0010108614806085825,
0.10495617240667343,
-0.023582153022289276,
0.03090338595211506,
0.0518505722284317,
-0.02975335717201233,
-0.018792832270264626,
-0.03734070807695389,
-0.15125466883182526,
-0.03696629777550697,
0.08334839344024658,
0.028509115800261497,
0.04089614748954773,
-0.11636026203632355,
-0.01932358369231224,
0.014268607832491398,
0.04961339756846428,
-0.0010756836272776127,
0.08872425556182861,
0.060733161866664886,
0.07255058735609055,
-0.008237011730670929,
-0.1290588527917862,
0.11743193119764328,
0.007673490792512894,
-0.056963708251714706,
0.17129658162593842,
-0.1473640352487564,
-0.31383106112480164,
-0.1296360343694687,
-0.21345824003219604,
-0.023827891796827316,
0.0504806824028492,
0.0722610354423523,
-0.10145039856433868,
-0.06586749106645584,
0.06935781240463257,
-0.015180780552327633,
-0.0512370727956295,
0.05835802108049393,
-0.05568331480026245,
0.060071028769016266,
-0.03204667940735817,
-0.06479284167289734,
-0.06208999827504158,
-0.03698456659913063,
-0.012320964597165585,
0.1567457765340805,
-0.0963636189699173,
0.06026304513216019,
0.1822625696659088,
-0.0069684311747550964,
0.05740158632397652,
-0.020088426768779755,
0.17352673411369324,
-0.046299975365400314,
-0.009632858447730541,
0.1779099404811859,
-0.0692555159330368,
0.08829127252101898,
0.16755379736423492,
0.036638252437114716,
-0.06414678692817688,
0.010849271900951862,
-0.034463342279195786,
-0.100287526845932,
-0.16952891647815704,
-0.1537368893623352,
-0.1119035929441452,
0.04267009347677231,
0.07575717568397522,
0.0701836571097374,
0.13684214651584625,
0.09786874055862427,
0.04321812093257904,
0.017315063625574112,
-0.035995811223983765,
0.0700153037905693,
0.21807728707790375,
0.0003610766143538058,
0.14700554311275482,
-0.04274760186672211,
-0.13744382560253143,
0.07540588825941086,
0.04706449434161186,
0.14107172191143036,
0.0942540243268013,
0.006239299196749926,
0.015146850608289242,
0.14917683601379395,
0.1745269000530243,
0.14059285819530487,
0.017734985798597336,
-0.02609814517199993,
-0.0006434522219933569,
0.01725636050105095,
-0.06414647400379181,
0.008624634705483913,
0.11569195240736008,
-0.1191977933049202,
-0.050888217985630035,
-0.11390431225299835,
0.07104727625846863,
0.09107852727174759,
0.050144948065280914,
-0.22190675139427185,
0.006544626783579588,
0.07200814038515091,
-0.016979407519102097,
-0.07316234707832336,
0.06569323688745499,
-0.06495853513479233,
-0.1333383321762085,
0.06712734699249268,
-0.0534096360206604,
0.11056811362504959,
-0.0787852481007576,
0.06447955220937729,
0.0010914376471191645,
-0.07596311718225479,
0.04729162156581879,
0.0908246785402298,
-0.2514210045337677,
0.20299942791461945,
-0.0095571493729949,
-0.06967992335557938,
-0.07454511523246765,
0.0015767465811222792,
0.03467825800180435,
0.19786208868026733,
0.0700954720377922,
0.012505165301263332,
-0.1048109382390976,
-0.2234649807214737,
-0.0233503058552742,
-0.005915478803217411,
0.09451191127300262,
-0.03211575001478195,
-0.023844879120588303,
-0.04092973843216896,
-0.02361360192298889,
-0.01684786193072796,
-0.0734662339091301,
0.03073509968817234,
-0.13056819140911102,
0.06875457614660263,
0.030536269769072533,
0.034657035022974014,
0.007696993183344603,
-0.05570661276578903,
-0.13823699951171875,
0.21050412952899933,
-0.1080079972743988,
-0.08218592405319214,
-0.12433517724275589,
-0.09483148902654648,
0.06636238843202591,
-0.09044130146503448,
0.08842648565769196,
-0.08486948162317276,
0.013108116574585438,
-0.03154774010181427,
-0.20467084646224976,
0.13401123881340027,
-0.11082442104816437,
-0.02522261254489422,
-0.06891456991434097,
0.13073186576366425,
-0.08182680606842041,
0.011720782145857811,
0.018967799842357635,
0.023967241868376732,
-0.075563445687294,
-0.08747212588787079,
-0.010821747593581676,
0.001845525810495019,
0.037828557193279266,
0.04940038546919823,
-0.06572411209344864,
-0.023288290947675705,
-0.010581763461232185,
0.05312959477305412,
0.24666495621204376,
0.16612420976161957,
-0.0824754610657692,
0.12982642650604248,
0.12894710898399353,
-0.035676129162311554,
-0.3294021487236023,
-0.08444348722696304,
-0.10177461802959442,
-0.04714391380548477,
-0.034336600452661514,
-0.13632649183273315,
0.15724940598011017,
0.03008347377181053,
-0.033163201063871384,
0.08886248618364334,
-0.14009535312652588,
-0.07499906420707703,
0.22673429548740387,
0.0026510029565542936,
0.3669813871383667,
-0.07680461555719376,
-0.08635268360376358,
-0.025124667212367058,
-0.16979077458381653,
0.11361277103424072,
0.053857848048210144,
0.06472647190093994,
-0.02734540030360222,
0.03583456575870514,
0.039804935455322266,
-0.05740131437778473,
0.09428419917821884,
0.04268606752157211,
0.04595416039228439,
-0.10883653163909912,
-0.10729821771383286,
0.029032990336418152,
-0.02159634232521057,
-0.0007541602244600654,
0.058053672313690186,
0.028985081240534782,
-0.12246084958314896,
-0.023612935096025467,
-0.0678027868270874,
0.09051620960235596,
0.040796294808387756,
-0.06403154879808426,
-0.001199939171783626,
-0.010966448113322258,
-0.007900480180978775,
-0.002793498570099473,
0.2534249424934387,
0.013181601651012897,
0.12926901876926422,
0.10509515553712845,
0.09405941516160965,
-0.1775425225496292,
-0.03129109367728233,
-0.07905349880456924,
-0.05925830453634262,
0.08846785128116608,
-0.03179163485765457,
0.08367018401622772,
0.15124507248401642,
-0.03966588154435158,
0.04840177670121193,
0.11612171679735184,
0.058352138847112656,
-0.04034952074289322,
0.15098534524440765,
-0.2076486051082611,
0.04868818819522858,
-0.02492622099816799,
-0.04794926568865776,
0.07618166506290436,
0.10211249440908432,
0.10336335003376007,
0.048923734575510025,
-0.03393935412168503,
0.015420389361679554,
-0.013829030096530914,
-0.02858223393559456,
0.09122753888368607,
0.06427517533302307,
0.04394911602139473,
-0.15176290273666382,
0.040900737047195435,
0.03761638328433037,
-0.13958147168159485,
-0.0464254654943943,
0.0697217807173729,
-0.1658141314983368,
-0.11456048488616943,
-0.02794528193771839,
0.09706581383943558,
-0.1334974765777588,
-0.04181411489844322,
-0.05867905914783478,
-0.13301482796669006,
0.06188027933239937,
0.15028776228427887,
0.136247456073761,
0.12274095416069031,
-0.05336078256368637,
-0.034185003489255905,
-0.023030806332826614,
-0.015652230009436607,
0.023418238386511803,
0.07147684693336487,
-0.17805200815200806,
0.03546195104718208,
-0.01693383790552616,
0.15492822229862213,
-0.0953034907579422,
-0.07528027892112732,
-0.15088234841823578,
0.04079398512840271,
-0.07412651926279068,
-0.06110387668013573,
-0.1001008078455925,
-0.011719630099833012,
0.030047280713915825,
-0.06397981196641922,
-0.02104705199599266,
-0.029714250937104225,
-0.1151234358549118,
0.053195931017398834,
0.006454015150666237,
0.024100622162222862,
-0.04535020515322685,
-0.0504457987844944,
0.0979907363653183,
-0.036605723202228546,
0.09341247379779816,
0.10229532420635223,
-0.07205236703157425,
0.08274209499359131,
-0.09577591717243195,
-0.13278907537460327,
0.14064034819602966,
0.0320785753428936,
0.11163925379514694,
0.03938208520412445,
0.02903309278190136,
0.07998000830411911,
0.001613617641851306,
0.0476076602935791,
0.05143759399652481,
-0.12446749210357666,
0.02909277193248272,
-0.05276395007967949,
-0.17986173927783966,
-0.042345430701971054,
-0.06872647255659103,
0.12603287398815155,
0.007533930707722902,
0.1708345115184784,
-0.012266214936971664,
0.0863659530878067,
-0.03828858956694603,
-0.011553848162293434,
-0.020341526716947556,
-0.20479705929756165,
-0.041985295712947845,
-0.050778187811374664,
0.007434189785271883,
-0.004249553196132183,
0.2224796861410141,
0.037340112030506134,
0.009118014015257359,
0.03802461549639702,
0.05996206775307655,
-0.0015094446716830134,
0.03855390474200249,
0.17918336391448975,
0.10312673449516296,
-0.04137527942657471,
-0.06405975669622421,
0.06956052780151367,
0.015424875542521477,
-0.045160602778196335,
0.0918571874499321,
0.06413926184177399,
-0.04401709884405136,
0.053807806223630905,
-0.003343704156577587,
0.027850152924656868,
-0.17438124120235443,
-0.14930444955825806,
-0.06135080009698868,
0.06310692429542542,
0.0377434678375721,
0.039659250527620316,
0.09506309777498245,
-0.02213427424430847,
0.04822809621691704,
-0.025164026767015457,
-0.04458346590399742,
-0.18628929555416107,
-0.12503530085086823,
-0.09636568278074265,
-0.10704224556684494,
0.00881970301270485,
-0.050167422741651535,
-0.03216883912682533,
0.10128732025623322,
0.04864560812711716,
-0.030861804261803627,
0.08754800260066986,
-0.00662412540987134,
-0.019116707146167755,
0.039133235812187195,
-0.022340750321745872,
-0.013549264520406723,
0.006568610668182373,
-0.0282303337007761,
-0.15163007378578186,
0.003921902272850275,
-0.046354990452528,
0.0020583050791174173,
-0.05757782235741615,
0.00981829222291708,
-0.12028232961893082,
-0.11447007954120636,
-0.02437315136194229,
0.04009396582841873,
-0.0847872719168663,
0.08624154329299927,
-0.00047820995678193867,
0.030567729845643044,
0.029093056917190552,
0.13849371671676636,
-0.06428845226764679,
-0.0643339455127716,
-0.057413212954998016,
0.2665606141090393,
0.07443614304065704,
0.11414091289043427,
0.017311030998826027,
0.008330088108778,
-0.0744415819644928,
0.27171096205711365,
0.24867983162403107,
-0.028929421678185463,
0.05582727864384651,
0.028896434232592583,
0.012948180548846722,
0.08476348966360092,
0.14009889960289001,
0.08254455775022507,
0.23512734472751617,
-0.0638090968132019,
-0.05845903977751732,
-0.037154510617256165,
-0.02417687699198723,
-0.1276051104068756,
0.0327400341629982,
0.04639188200235367,
-0.03432612121105194,
-0.08002371340990067,
0.08128068596124649,
-0.18061670660972595,
0.16302470862865448,
0.067081518471241,
-0.17339123785495758,
-0.06978224962949753,
-0.022737106308341026,
0.1390104442834854,
-0.01008431427180767,
0.06503362208604813,
-0.03611158952116966,
-0.09744249284267426,
0.04643089696764946,
0.008959464728832245,
-0.20362509787082672,
-0.026530101895332336,
0.07255852967500687,
-0.010322598740458488,
0.032567765563726425,
-0.027229296043515205,
0.04711361974477768,
0.07436743378639221,
0.08170277625322342,
-0.044838204979896545,
0.03216740861535072,
0.0020284915808588266,
-0.06917048245668411,
-0.00730140320956707,
0.015407062135636806,
0.013329383917152882,
-0.05252155661582947,
0.03724842891097069,
-0.18574868142604828,
0.03276543319225311,
-0.06670940667390823,
-0.028469715267419815,
-0.020635202527046204,
0.025576474145054817,
-0.03208548575639725,
0.055958010256290436,
0.07370726764202118,
0.013845095410943031,
-0.02977273054420948,
-0.05999566614627838,
-0.021721841767430305,
0.02047201246023178,
-0.12231554090976715,
-0.143850177526474,
-0.09623750299215317,
-0.06012764200568199,
0.07081804424524307,
-0.008848750963807106,
-0.05329444259405136,
-0.03402802348136902,
-0.09715230017900467,
0.02767670713365078,
-0.14826618134975433,
0.07919323444366455,
0.03656965494155884,
0.039436060935258865,
-0.007654102519154549,
-0.01974954642355442,
0.01355569064617157,
0.0562899075448513,
-0.13239650428295135,
-0.08947428315877914
] |
null | null |
transformers
|
## A conversational agent with many personalities (PersonaGPT)
PersonaGPT is an open-domain conversational agent designed to do 2 tasks:
1. decoding _personalized_ responses based on input personality facts (the "persona" profile of the bot).
2. incorporating _turn-level goals_ into its responses through "action codes" (e.g., "talk about work", "ask about favorite music").
It builds on the [DialoGPT-medium](https://huggingface.co/microsoft/DialoGPT-medium) pretrained model based on the [GPT-2](https://github.com/openai/gpt-2) architecture.
This model is trained on the [Persona-Chat](https://arxiv.org/pdf/1801.07243) dataset, with added special tokens to better distinguish between conversational history and personality traits for dyadic conversations. Furthermore, some active learning was used to train the model to do _controlled_ decoding using turn-level goals.
## Full Repo
Preprocessing, training and implementation details can be found in the [personaGPT repo](https://github.com/af1tang/personaGPT).
### How to Use
1. Load the model and define some helper functions.
```python
from transformers import GPT2Tokenizer, GPT2LMHeadModel
import torch
tokenizer = GPT2Tokenizer.from_pretrained("af1tang/personaGPT")
model = GPT2LMHeadModel.from_pretrained("af1tang/personaGPT")
if torch.cuda.is_available():
model = model.cuda()
## utility functions ##
flatten = lambda l: [item for sublist in l for item in sublist]
def to_data(x):
if torch.cuda.is_available():
x = x.cpu()
return x.data.numpy()
def to_var(x):
if not torch.is_tensor(x):
x = torch.Tensor(x)
if torch.cuda.is_available():
x = x.cuda()
return x
def display_dialog_history(dialog_hx):
for j, line in enumerate(dialog_hx):
msg = tokenizer.decode(line)
if j %2 == 0:
print(">> User: "+ msg)
else:
print("Bot: "+msg)
print()
def generate_next(bot_input_ids, do_sample=True, top_k=10, top_p=.92,
max_length=1000, pad_token=tokenizer.eos_token_id):
full_msg = model.generate(bot_input_ids, do_sample=True,
top_k=top_k, top_p=top_p,
max_length=max_length, pad_token_id=tokenizer.eos_token_id)
msg = to_data(full_msg.detach()[0])[bot_input_ids.shape[-1]:]
return msg
```
2. Give your chatbot partner a set of personalities.
```python
# get personality facts for conversation
personas = []
for i in range(3):
response = input(">> Fact %d: "%(i+1))+ tokenizer.eos_token
personas.append(response)
personas = tokenizer.encode(''.join(['<|p2|>'] + personas + ['<|sep|>'] + ['<|start|>']))
```
3. The first use of PersonaGPT is to do _personalized_ dialog generation. Use the following loop to interact with the model.
```python
# converse for 8 turns
dialog_hx = []
for step in range(8):
# encode the user input
user_inp = tokenizer.encode(input(">> User: ") + tokenizer.eos_token)
# append to the chat history
dialog_hx.append(user_inp)
# generated a response while limiting the total chat history to 1000 tokens,
bot_input_ids = to_var([personas + flatten(dialog_hx)]).long()
msg = generate_next(bot_input_ids)
dialog_hx.append(msg)
print("Bot: {}".format(tokenizer.decode(msg, skip_special_tokens=True)))
```
Example of personalized decoding:
| | Persona Facts |
|---|---|
| 1 | i am 64 years old but feel quite young. |
| 2 | i try to eat healthy but limit mcdonalds to once a week. |
| 3 | i regret working as a doctor for the last 20 years. |
| 4 | my secret hobby is making self-help youtube videos. |
| 5 | i've been to spain many times and wish i could go again. |
| Role | Response|
|---|---|
| Human | yo whats good fam? |
| Bot | hello, how are you doing? |
| Human | not bad what you doing today?|
| Bot | eating a lot healthier now that i'm in my 60s |
| Human | i'm more of a fast food guy |
| Bot | me too but i'm very healthy so i try to avoid it |
| Human | what do you do for fun? |
| Bot | i make videos for fun and eat very healthy |
| Human | nice what kind of videos? |
| Bot | i love making videos about self help stuff |
| Human | i think i need some help |
| Bot | what kind of help would you like to get |
| Human | i'm 21 and feel lost in life |
| Bot | i can relate, i'm only 65 |
4. The second use of PersonaGPT is to do _controlled response generation_. Use the following program to interact with the model.
```python
## available actions ##
action_space = [ 'ask about kids.', "ask about pets.", 'talk about work.',
'ask about marital status.', 'talk about travel.', 'ask about age and gender.',
'ask about hobbies.', 'ask about favorite food.', 'talk about movies.',
'talk about music.', 'talk about politics.']
# converse for 8 turns
dialog_hx = []
for step in range(8):
# choose an action
act = None
while act not in action_space:
display_dialog_history(dialog_hx)
print()
print(" actions: ")
for k,v in enumerate(action_space): print(k,v)
try:
act = action_space[int(input(" input [0-10]: " ))]
except:
act = None
print()
# format into prefix code
action_prefix = tokenizer.encode(''.join(['<|act|> '] + [act] + ['<|p1|>'] + [] + ['<|sep|>'] + ['<|start|>']))
bot_input_ids = to_var([action_prefix + flatten(dialog_hx)]).long()
# generate query conditioned on action
msg = generate_next(bot_input_ids, top_k=top_k, top_p=top_p, max_length=max_length)
dialog_hx.append(msg)
# generate bot response
bot_input_ids = to_var([personas+ flatten(dialog_hx)]).long()
msg = generate_next(bot_input_ids, top_k=top_k, top_p=top_p, max_length=max_length)
dialog_hx.append(msg)
display_dialog_history(dialog_hx)
```
Example of controlled response generation:
| | Bot 2's Persona Facts |
|---|---|
| 1 | i am a quiet engineer. |
| 2 | i'm single and am looking for love. |
| 3 | sadly, i don't have any relatable hobbies. |
| 4 | luckily, however, i am tall and athletic. |
| 5 | on friday nights, i watch re-runs of the simpsons alone. |
| Role | Action | Response |
|---|---|---|
| Bot 1 | talk about movies | hello do you like movies? |
| Bot 2 | - | i love the simpsons. how about you? |
| Bot 1 | talk about music | i like the walking dead. do you play an instrument? |
| Bot 2 | - | yes i play the violin. |
| Bot 1 | ask about marital status | are you married or single? |
| Bot 2 | - | i am single but i am looking for love. |
| Bot 1 | talk about work | my job is working in a factory. what's your occupation? |
| Bot 2 | - | engineer. i'm very quiet so no one hears me. |
| Bot 1 | talk about hobbies | do you have any hobbies? |
| Bot 2 | - | i watch reruns of the simpsons. |
| Bot 1 | ask about favorite food | what's your favorite food? |
| Bot 2 | - | i love pizza. how about yourself? |
| Bot 1 | ask about pets | i also love pizza. do you like animals? |
| Bot 2 | - | i have two dogs. what is your occupation? |
| Bot 1 | talk about work | i'm a factory worker. what's your dream job? |
| Bot 2 | - | i'd love to be a writer one day. |
|
{"license": "gpl-3.0", "tags": ["conversational"]}
|
text-generation
|
af1tang/personaGPT
|
[
"transformers",
"pytorch",
"gpt2",
"text-generation",
"conversational",
"arxiv:1801.07243",
"license:gpl-3.0",
"autotrain_compatible",
"endpoints_compatible",
"has_space",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[
"1801.07243"
] |
[] |
TAGS
#transformers #pytorch #gpt2 #text-generation #conversational #arxiv-1801.07243 #license-gpl-3.0 #autotrain_compatible #endpoints_compatible #has_space #text-generation-inference #region-us
|
A conversational agent with many personalities (PersonaGPT)
-----------------------------------------------------------
PersonaGPT is an open-domain conversational agent designed to do 2 tasks:
1. decoding *personalized* responses based on input personality facts (the "persona" profile of the bot).
2. incorporating *turn-level goals* into its responses through "action codes" (e.g., "talk about work", "ask about favorite music").
It builds on the DialoGPT-medium pretrained model based on the GPT-2 architecture.
This model is trained on the Persona-Chat dataset, with added special tokens to better distinguish between conversational history and personality traits for dyadic conversations. Furthermore, some active learning was used to train the model to do *controlled* decoding using turn-level goals.
Full Repo
---------
Preprocessing, training and implementation details can be found in the personaGPT repo.
### How to Use
1. Load the model and define some helper functions.
2. Give your chatbot partner a set of personalities.
3. The first use of PersonaGPT is to do *personalized* dialog generation. Use the following loop to interact with the model.
Example of personalized decoding:
4. The second use of PersonaGPT is to do *controlled response generation*. Use the following program to interact with the model.
Example of controlled response generation:
Role: Bot 1, Action: talk about movies, Response: hello do you like movies?
Role: Bot 2, Action: -, Response: i love the simpsons. how about you?
Role: Bot 1, Action: talk about music, Response: i like the walking dead. do you play an instrument?
Role: Bot 2, Action: -, Response: yes i play the violin.
Role: Bot 1, Action: ask about marital status, Response: are you married or single?
Role: Bot 2, Action: -, Response: i am single but i am looking for love.
Role: Bot 1, Action: talk about work, Response: my job is working in a factory. what's your occupation?
Role: Bot 2, Action: -, Response: engineer. i'm very quiet so no one hears me.
Role: Bot 1, Action: talk about hobbies, Response: do you have any hobbies?
Role: Bot 2, Action: -, Response: i watch reruns of the simpsons.
Role: Bot 1, Action: ask about favorite food, Response: what's your favorite food?
Role: Bot 2, Action: -, Response: i love pizza. how about yourself?
Role: Bot 1, Action: ask about pets, Response: i also love pizza. do you like animals?
Role: Bot 2, Action: -, Response: i have two dogs. what is your occupation?
Role: Bot 1, Action: talk about work, Response: i'm a factory worker. what's your dream job?
Role: Bot 2, Action: -, Response: i'd love to be a writer one day.
|
[
"### How to Use\n\n\n1. Load the model and define some helper functions.\n2. Give your chatbot partner a set of personalities.\n3. The first use of PersonaGPT is to do *personalized* dialog generation. Use the following loop to interact with the model.\n\n\nExample of personalized decoding:\n\n\n\n\n4. The second use of PersonaGPT is to do *controlled response generation*. Use the following program to interact with the model.\n\n\nExample of controlled response generation:\n\n\n\nRole: Bot 1, Action: talk about movies, Response: hello do you like movies?\nRole: Bot 2, Action: -, Response: i love the simpsons. how about you?\nRole: Bot 1, Action: talk about music, Response: i like the walking dead. do you play an instrument?\nRole: Bot 2, Action: -, Response: yes i play the violin.\nRole: Bot 1, Action: ask about marital status, Response: are you married or single?\nRole: Bot 2, Action: -, Response: i am single but i am looking for love.\nRole: Bot 1, Action: talk about work, Response: my job is working in a factory. what's your occupation?\nRole: Bot 2, Action: -, Response: engineer. i'm very quiet so no one hears me.\nRole: Bot 1, Action: talk about hobbies, Response: do you have any hobbies?\nRole: Bot 2, Action: -, Response: i watch reruns of the simpsons.\nRole: Bot 1, Action: ask about favorite food, Response: what's your favorite food?\nRole: Bot 2, Action: -, Response: i love pizza. how about yourself?\nRole: Bot 1, Action: ask about pets, Response: i also love pizza. do you like animals?\nRole: Bot 2, Action: -, Response: i have two dogs. what is your occupation?\nRole: Bot 1, Action: talk about work, Response: i'm a factory worker. what's your dream job?\nRole: Bot 2, Action: -, Response: i'd love to be a writer one day."
] |
[
"TAGS\n#transformers #pytorch #gpt2 #text-generation #conversational #arxiv-1801.07243 #license-gpl-3.0 #autotrain_compatible #endpoints_compatible #has_space #text-generation-inference #region-us \n",
"### How to Use\n\n\n1. Load the model and define some helper functions.\n2. Give your chatbot partner a set of personalities.\n3. The first use of PersonaGPT is to do *personalized* dialog generation. Use the following loop to interact with the model.\n\n\nExample of personalized decoding:\n\n\n\n\n4. The second use of PersonaGPT is to do *controlled response generation*. Use the following program to interact with the model.\n\n\nExample of controlled response generation:\n\n\n\nRole: Bot 1, Action: talk about movies, Response: hello do you like movies?\nRole: Bot 2, Action: -, Response: i love the simpsons. how about you?\nRole: Bot 1, Action: talk about music, Response: i like the walking dead. do you play an instrument?\nRole: Bot 2, Action: -, Response: yes i play the violin.\nRole: Bot 1, Action: ask about marital status, Response: are you married or single?\nRole: Bot 2, Action: -, Response: i am single but i am looking for love.\nRole: Bot 1, Action: talk about work, Response: my job is working in a factory. what's your occupation?\nRole: Bot 2, Action: -, Response: engineer. i'm very quiet so no one hears me.\nRole: Bot 1, Action: talk about hobbies, Response: do you have any hobbies?\nRole: Bot 2, Action: -, Response: i watch reruns of the simpsons.\nRole: Bot 1, Action: ask about favorite food, Response: what's your favorite food?\nRole: Bot 2, Action: -, Response: i love pizza. how about yourself?\nRole: Bot 1, Action: ask about pets, Response: i also love pizza. do you like animals?\nRole: Bot 2, Action: -, Response: i have two dogs. what is your occupation?\nRole: Bot 1, Action: talk about work, Response: i'm a factory worker. what's your dream job?\nRole: Bot 2, Action: -, Response: i'd love to be a writer one day."
] |
[
72,
484
] |
[
"passage: TAGS\n#transformers #pytorch #gpt2 #text-generation #conversational #arxiv-1801.07243 #license-gpl-3.0 #autotrain_compatible #endpoints_compatible #has_space #text-generation-inference #region-us \n"
] |
[
-0.008690566755831242,
0.07194530218839645,
-0.004417704418301582,
0.04941359907388687,
0.12068691104650497,
0.006080517079681158,
0.12151848524808884,
0.14581744372844696,
-0.022702211514115334,
-0.004843829199671745,
0.18134050071239471,
0.1885480433702469,
-0.0038946426939219236,
0.0846971943974495,
-0.06588372588157654,
-0.239728644490242,
0.050003066658973694,
0.049471694976091385,
0.03709102049469948,
0.11075831949710846,
0.1127287745475769,
-0.05230918526649475,
0.0958772674202919,
0.0026519133243709803,
-0.15265898406505585,
0.0037592530716210604,
0.055317915976047516,
-0.12337624281644821,
0.1195201501250267,
0.06058396026492119,
0.04403403401374817,
0.05478466674685478,
-0.03823859244585037,
-0.13641798496246338,
0.018722649663686752,
0.008677272126078606,
-0.08094105124473572,
0.07358956336975098,
0.04489816352725029,
-0.04733557999134064,
0.15245181322097778,
0.04245875030755997,
-0.028754398226737976,
0.06610436737537384,
-0.15495356917381287,
-0.10522020608186722,
-0.0851815864443779,
0.08619382232427597,
0.01227040309458971,
0.10189670324325562,
-0.007483647204935551,
0.12792493402957916,
-0.05204634368419647,
0.08602108806371689,
0.19085265696048737,
-0.4110372066497803,
-0.004268493503332138,
0.14677441120147705,
0.07320063561201096,
0.03552599623799324,
-0.03990459814667702,
0.0808083787560463,
0.04622182250022888,
0.007427984848618507,
0.01984826847910881,
-0.06545594334602356,
-0.10558710247278214,
0.06694266945123672,
-0.08671257644891739,
-0.05969112738966942,
0.2514026463031769,
-0.02212703786790371,
0.04264618083834648,
-0.06156178563833237,
-0.0689501091837883,
-0.0773315578699112,
0.0005616866983473301,
0.021839357912540436,
-0.032081492245197296,
0.06904751062393188,
0.012878823094069958,
-0.07662426680326462,
-0.1537509262561798,
-0.023915743455290794,
-0.18864472210407257,
0.12863677740097046,
-0.0059403288178145885,
0.06385774910449982,
-0.13960590958595276,
0.0903351679444313,
-0.06322117149829865,
-0.08837391436100006,
-0.0034873492550104856,
-0.08393850177526474,
0.08864171802997589,
0.03144732117652893,
-0.059277985244989395,
-0.006039605475962162,
0.07179979979991913,
0.1385709047317505,
-0.03526657074689865,
0.0011026653228327632,
-0.011256898753345013,
0.10390201956033707,
-0.014120837673544884,
0.09895795583724976,
-0.012964322231709957,
-0.006483737379312515,
0.043371669948101044,
-0.1048528328537941,
0.04996654391288757,
-0.06028443202376366,
-0.1987142115831375,
-0.0461287684738636,
-0.012452606111764908,
0.061019476503133774,
0.040770016610622406,
0.11245657503604889,
-0.03700665757060051,
0.012649076990783215,
0.09887390583753586,
-0.038981109857559204,
0.023519448935985565,
-0.004818851128220558,
0.016263874247670174,
0.04116776958107948,
0.04184390604496002,
0.03795432671904564,
-0.08281952142715454,
0.030154526233673096,
-0.0726931169629097,
0.005687651690095663,
-0.038429636508226395,
-0.0694945827126503,
0.05055205896496773,
-0.061902277171611786,
-0.00296788034029305,
-0.17195814847946167,
-0.11015378683805466,
0.016225595027208328,
0.003733911318704486,
-0.04827193170785904,
-0.09031306207180023,
0.005224693566560745,
-0.03365635126829147,
0.07542777806520462,
-0.0824563279747963,
0.005025808699429035,
-0.0659351572394371,
0.08965878933668137,
-0.039823781698942184,
0.08747480064630508,
-0.17073722183704376,
0.06735701113939285,
-0.07857753336429596,
-0.01702527329325676,
-0.025089669972658157,
0.01946980319917202,
-0.03898533061146736,
0.07125400006771088,
-0.03091159649193287,
-0.04092993214726448,
-0.043433934450149536,
0.03895539417862892,
-0.006589878350496292,
0.1537022739648819,
-0.07164279371500015,
-0.06239889934659004,
0.21152247488498688,
-0.05719909444451332,
-0.15480349957942963,
0.09150703251361847,
0.019601624459028244,
0.016928136348724365,
0.05786014720797539,
0.2127470225095749,
-0.029166070744395256,
-0.07616051286458969,
0.037154387682676315,
0.12817394733428955,
-0.099916473031044,
-0.0913759246468544,
0.059661123901605606,
-0.05334577336907387,
-0.058779314160346985,
0.038035694509744644,
0.037890542298555374,
0.06695100665092468,
-0.0289117731153965,
-0.04208744689822197,
-0.05375334993004799,
-0.0024643694050610065,
0.06601298600435257,
-0.012708907946944237,
0.08051051944494247,
-0.08701783418655396,
-0.07263477146625519,
-0.014051515609025955,
0.011754353530704975,
0.008319665677845478,
0.05002757906913757,
-0.035912878811359406,
0.11368291079998016,
-0.0013884243089705706,
0.0301576629281044,
-0.12046635895967484,
-0.041007429361343384,
-0.03588216006755829,
0.12854018807411194,
0.04728425666689873,
0.12455129623413086,
0.04628319665789604,
-0.04977821558713913,
-0.003522018436342478,
0.025865310803055763,
0.10944467037916183,
0.019342485815286636,
-0.06576069444417953,
-0.07025045156478882,
0.051936157047748566,
-0.04572467878460884,
0.03423347696661949,
-0.06050444766879082,
0.01863381639122963,
0.0650615468621254,
0.07795849442481995,
-0.03654610365629196,
0.047514259815216064,
-0.01103799045085907,
0.008682009764015675,
-0.06856340169906616,
-0.0035710264928638935,
0.1130223497748375,
0.024739671498537064,
-0.06204712390899658,
0.24925342202186584,
-0.18441173434257507,
0.23108936846256256,
0.211004838347435,
-0.23219765722751617,
0.0009957685833796859,
-0.066445492208004,
-0.03553330525755882,
0.018517034128308296,
0.0378296785056591,
-0.027679746970534325,
0.10330864042043686,
-0.03181558474898338,
0.17456097900867462,
-0.06141113117337227,
-0.025417698547244072,
-0.0020065060816705227,
-0.07242728769779205,
-0.031123913824558258,
0.08012943714857101,
0.09842628240585327,
-0.14622537791728973,
0.19528982043266296,
0.226418599486351,
0.020781811326742172,
0.15347334742546082,
0.02489813230931759,
0.008800390176475048,
0.02302411012351513,
-0.012340796180069447,
-0.04356665536761284,
-0.01931913197040558,
-0.19261065125465393,
-0.03394746780395508,
0.09858736395835876,
0.035729460418224335,
0.08115196228027344,
-0.14610418677330017,
-0.0649474710226059,
0.0044101933017373085,
-0.03285824507474899,
-0.029940608888864517,
0.12054029107093811,
0.021908991038799286,
0.1405881643295288,
0.01430258434265852,
-0.03553639352321625,
0.10564035922288895,
0.03659738972783089,
-0.09493425488471985,
0.15892146527767181,
-0.14028558135032654,
-0.3039955198764801,
-0.13869091868400574,
-0.164347842335701,
-0.06629711389541626,
0.04137459024786949,
0.14154201745986938,
-0.07723183184862137,
-0.03486129269003868,
-0.022523459047079086,
0.016445217654109,
-0.09952794760465622,
-0.001845228485763073,
-0.043941911309957504,
0.008668116293847561,
-0.06074739620089531,
-0.12972302734851837,
-0.07345546036958694,
-0.019559543579816818,
-0.07058613002300262,
0.11421611160039902,
-0.04657519981265068,
0.06966380774974823,
0.1657983660697937,
0.0034091651905328035,
0.04962208867073059,
-0.054470494389534,
0.1943134069442749,
-0.05843600630760193,
-0.013555923476815224,
0.22249765694141388,
0.015151601284742355,
0.07650262117385864,
0.12562167644500732,
0.02028399147093296,
-0.05720144137740135,
0.006185542792081833,
-0.04584887996315956,
-0.08548057079315186,
-0.22460448741912842,
-0.12083631008863449,
-0.12668974697589874,
0.07584039866924286,
0.03994152322411537,
0.06979823857545853,
0.139553040266037,
0.06427723169326782,
-0.022427385672926903,
0.018163304775953293,
0.010771515779197216,
0.10438153892755508,
0.29452773928642273,
-0.059475623071193695,
0.1530797779560089,
-0.07493390887975693,
-0.07440414279699326,
0.10050858557224274,
0.07391800731420517,
0.10954450070858002,
0.0742572620511055,
0.07037979364395142,
0.05084134265780449,
0.10976248979568481,
0.11262897402048111,
0.04703579470515251,
0.025155864655971527,
-0.042882371693849564,
-0.03704727441072464,
-0.041908301413059235,
-0.023688877001404762,
0.0626545175909996,
0.025185368955135345,
-0.1538403332233429,
-0.030207982286810875,
-0.11305759847164154,
0.06911452114582062,
0.06450407207012177,
0.07624505460262299,
-0.19431741535663605,
-0.008586905896663666,
0.07186315208673477,
0.013721801340579987,
-0.10844298452138901,
0.07629746943712234,
0.0049170805141329765,
-0.1194683238863945,
0.057260662317276,
-0.02442917786538601,
0.10913687944412231,
0.00464162090793252,
0.06822676211595535,
-0.05742624029517174,
-0.08248724043369293,
0.02037825994193554,
0.12786202132701874,
-0.303471177816391,
0.24589227139949799,
0.0015310011804103851,
-0.06340346485376358,
-0.09178931266069412,
-0.0009344600257463753,
0.004465441685169935,
0.11426690965890884,
0.11332479119300842,
0.012217901647090912,
-0.05494430288672447,
-0.057125598192214966,
-0.006432782392948866,
0.017388882115483284,
0.0813741385936737,
0.01551913283765316,
-0.03679492697119713,
-0.0542081855237484,
0.007287231739610434,
0.004778450820595026,
0.05100181698799133,
-0.011568610556423664,
-0.18260374665260315,
0.09671914577484131,
0.10871865600347519,
0.04020951688289642,
-0.004901536740362644,
-0.020884312689304352,
-0.16997317969799042,
0.24674467742443085,
-0.05823211371898651,
-0.08417320251464844,
-0.10397377610206604,
-0.0655067190527916,
0.03269723802804947,
-0.05731992423534393,
0.05481554940342903,
-0.08275917172431946,
-0.0028047487139701843,
-0.08220803737640381,
-0.17338235676288605,
0.1296451985836029,
-0.08546409010887146,
-0.0506913922727108,
-0.013538715429604053,
0.16507211327552795,
-0.11089038848876953,
0.0318836010992527,
0.025826051831245422,
0.05243249610066414,
-0.15104907751083374,
-0.12398919463157654,
0.04304583743214607,
-0.0131941894069314,
0.09193605929613113,
-0.03570229932665825,
-0.050865162163972855,
-0.02554541453719139,
0.00684686005115509,
-0.04732108488678932,
0.2998587191104889,
0.2128366380929947,
-0.12065640091896057,
0.18924234807491302,
0.08118943870067596,
-0.07509469240903854,
-0.3036978840827942,
-0.10274189710617065,
-0.12076403200626373,
-0.050952035933732986,
-0.019733304157853127,
-0.12320701777935028,
0.01391171757131815,
0.031121395528316498,
-0.061988573521375656,
0.11012183874845505,
-0.2284049540758133,
-0.10564126819372177,
0.14493191242218018,
-0.043179143220186234,
0.3532416820526123,
-0.14080393314361572,
-0.0862128958106041,
-0.03924451023340225,
-0.21369630098342896,
0.16290128231048584,
-0.04223935306072235,
0.11990125477313995,
-0.036637261509895325,
0.11960306018590927,
0.028483152389526367,
-0.05051538348197937,
0.11769857257604599,
-0.027110103517770767,
-0.0006365728913806379,
-0.13781709969043732,
-0.07924327999353409,
0.050275664776563644,
-0.0075717042200267315,
0.06170440837740898,
-0.11156953126192093,
0.022849490866065025,
-0.13431857526302338,
-0.022308990359306335,
-0.09116796404123306,
0.06593615561723709,
0.008708554320037365,
-0.06692474335432053,
-0.0649685263633728,
-0.049817390739917755,
-0.015378039330244064,
-0.0099040437489748,
0.2238103151321411,
-0.06410696357488632,
0.16343893110752106,
0.15725845098495483,
0.0769408717751503,
-0.16578321158885956,
-0.027238622307777405,
-0.048724204301834106,
-0.07842439413070679,
0.06681810319423676,
-0.1660337746143341,
0.025037623941898346,
0.09866871684789658,
-0.03202575072646141,
0.08365576714277267,
0.08698645234107971,
0.006199948955327272,
0.002370285801589489,
0.12259235233068466,
-0.22498644888401031,
-0.024865049868822098,
-0.055965982377529144,
0.05291168764233589,
0.0856119841337204,
0.07060597091913223,
0.16861213743686676,
-0.01395485084503889,
-0.03507225215435028,
0.010860729962587357,
0.026713931933045387,
-0.06401123851537704,
0.0257263146340847,
0.033121611922979355,
0.017947912216186523,
-0.12382996827363968,
0.049643270671367645,
0.025804290547966957,
-0.12209835648536682,
0.026988763362169266,
0.12898097932338715,
-0.10827729851007462,
-0.13835111260414124,
-0.06996531039476395,
0.001786443404853344,
-0.1701311469078064,
-0.03399040922522545,
-0.015142773278057575,
-0.12305241823196411,
0.06773132085800171,
0.08777289092540741,
0.061336617916822433,
0.06492724269628525,
-0.024378255009651184,
-0.04453715682029724,
-0.002181921387091279,
-0.01557866856455803,
-0.06416509300470352,
0.025227244943380356,
-0.0884678065776825,
0.07496137171983719,
-0.010558967478573322,
0.1192178949713707,
-0.06648924201726913,
-0.038353871554136276,
-0.15047486126422882,
0.0046208444982767105,
-0.08698351681232452,
-0.06785193085670471,
-0.08730873465538025,
-0.03884764015674591,
-0.0031193855684250593,
-0.05118608474731445,
-0.05738477408885956,
-0.012052464298903942,
-0.11672914773225784,
-0.00382249616086483,
-0.038912586867809296,
0.051755741238594055,
-0.10049466043710709,
-0.009956011548638344,
0.05402730777859688,
0.0026648524217307568,
0.13081301748752594,
0.09054920822381973,
-0.07682273536920547,
0.04904286935925484,
-0.16423024237155914,
-0.07113972306251526,
0.08693711459636688,
0.01390333566814661,
0.034647200256586075,
0.037191566079854965,
0.006623726338148117,
0.07683868706226349,
0.0248662531375885,
0.05033007264137268,
0.025127852335572243,
-0.1251719892024994,
0.0265251025557518,
-0.04545216262340546,
-0.11951258033514023,
-0.034394629299640656,
-0.03651782125234604,
0.05564713478088379,
0.015384029597043991,
0.10572615265846252,
-0.04994286969304085,
0.04716247692704201,
-0.07690117508172989,
0.03467411920428276,
-0.009527561254799366,
-0.15122322738170624,
-0.07876892387866974,
-0.07606935501098633,
0.01710676960647106,
0.0071780201978981495,
0.2676020562648773,
0.07101750373840332,
-0.09015047550201416,
0.057330500334501266,
0.09721370786428452,
0.030257664620876312,
-0.018506791442632675,
0.19801223278045654,
0.059272464364767075,
-0.032282013446092606,
-0.09453605115413666,
0.0794183760881424,
0.0065530212596058846,
-0.004970034584403038,
0.1322980374097824,
0.04387211799621582,
0.027479002252221107,
0.07094453275203705,
0.03238515555858612,
-0.04650556296110153,
-0.12640222907066345,
-0.1034957766532898,
-0.01576213166117668,
0.08657126128673553,
-0.08658164739608765,
0.07380014657974243,
0.1698591262102127,
-0.0390390045940876,
0.015428908169269562,
-0.06582784652709961,
-0.029396625235676765,
-0.1619492471218109,
-0.14894084632396698,
-0.07489679008722305,
-0.15783727169036865,
0.006639527622610331,
-0.0767245814204216,
0.10034891217947006,
0.06081444397568703,
0.049003589898347855,
-0.052046410739421844,
0.01930910535156727,
-0.030204102396965027,
-0.09006756544113159,
0.025300955399870872,
-0.022928765043616295,
0.07442862540483475,
-0.06665629148483276,
-0.004863648675382137,
-0.06085609644651413,
-0.0338258296251297,
0.014665859751403332,
0.06747269630432129,
0.0075838081538677216,
0.004287927411496639,
-0.11521928012371063,
-0.06297034025192261,
-0.05488716810941696,
0.08591684699058533,
0.02296573668718338,
0.18413661420345306,
0.00020516134100034833,
0.006344030145555735,
0.03741433098912239,
0.2175363302230835,
-0.07661324739456177,
-0.07041408866643906,
-0.02765081450343132,
0.1867961436510086,
0.024921124801039696,
0.08651364594697952,
-0.038640622049570084,
-0.0007020952762104571,
-0.05975870043039322,
0.31570708751678467,
0.33403822779655457,
-0.05891024321317673,
0.02893299236893654,
0.0058679222129285336,
0.031160995364189148,
0.11307363957166672,
0.1129157766699791,
0.11177149415016174,
0.27051231265068054,
-0.07806956022977829,
-0.03832538053393364,
-0.040070049464702606,
0.021706989035010338,
-0.10798019915819168,
0.0958726555109024,
0.010081716813147068,
-0.08849462866783142,
-0.011790183372795582,
0.06433829665184021,
-0.14688032865524292,
0.12778502702713013,
-0.07157599180936813,
-0.19744472205638885,
-0.047352056950330734,
0.043024659156799316,
0.1419718712568283,
-0.00368441641330719,
0.08214427530765533,
-0.021198708564043045,
-0.0679006576538086,
0.06625082343816757,
0.005959674715995789,
-0.2184535562992096,
0.010541243478655815,
0.07161442935466766,
-0.02849164791405201,
0.05115227401256561,
-0.021629348397254944,
0.08958473801612854,
0.09909144788980484,
0.05983691290020943,
-0.06989958137273788,
0.07586127519607544,
0.009918290190398693,
-0.07800696045160294,
0.023387635126709938,
-0.04721249267458916,
0.009029122069478035,
-0.08009070158004761,
0.05598518252372742,
-0.12861661612987518,
0.05142996832728386,
0.023332180455327034,
-0.03352508321404457,
-0.02774852141737938,
0.009748940356075764,
-0.06615689396858215,
0.08873233944177628,
0.05273972824215889,
-0.022407453507184982,
-0.05732666701078415,
-0.048325784504413605,
-0.04511452466249466,
0.0037717591039836407,
-0.10574802756309509,
-0.07864590734243393,
-0.09114353358745575,
-0.0569598563015461,
0.08744558691978455,
0.013108116574585438,
-0.1767115294933319,
-0.010714259929955006,
-0.07416227459907532,
0.057913586497306824,
-0.1387699544429779,
0.0513220950961113,
0.09155476838350296,
-0.014542403630912304,
-0.014018375426530838,
-0.050531864166259766,
0.042687833309173584,
0.06698276102542877,
-0.09341496229171753,
-0.07487643510103226
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# opus-mt-en-de-finetuned-en-to-de
This model is a fine-tuned version of [Helsinki-NLP/opus-mt-en-de](https://huggingface.co/Helsinki-NLP/opus-mt-en-de) on the wmt16 dataset.
It achieves the following results on the evaluation set:
- Loss: 1.6798
- Bleu: 26.4396
- Gen Len: 24.8156
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 0.0002
- train_batch_size: 8
- eval_batch_size: 8
- seed: 42
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- num_epochs: 1
- mixed_precision_training: Native AMP
### Training results
| Training Loss | Epoch | Step | Validation Loss | Bleu | Gen Len |
|:-------------:|:-----:|:------:|:---------------:|:-------:|:-------:|
| 2.0864 | 1.0 | 568611 | 1.6798 | 26.4396 | 24.8156 |
### Framework versions
- Transformers 4.12.5
- Pytorch 1.9.0.dev20210415+cu101
- Datasets 1.16.1
- Tokenizers 0.10.3
|
{"license": "apache-2.0", "tags": ["generated_from_trainer"], "datasets": ["wmt16"], "metrics": ["bleu"], "model-index": [{"name": "opus-mt-en-de-finetuned-en-to-de", "results": [{"task": {"type": "text2text-generation", "name": "Sequence-to-sequence Language Modeling"}, "dataset": {"name": "wmt16", "type": "wmt16", "args": "de-en"}, "metrics": [{"type": "bleu", "value": 26.4396, "name": "Bleu"}]}]}]}
|
text2text-generation
|
afreireosorio/opus-mt-en-de-finetuned-en-to-de
|
[
"transformers",
"pytorch",
"tensorboard",
"marian",
"text2text-generation",
"generated_from_trainer",
"dataset:wmt16",
"license:apache-2.0",
"model-index",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tensorboard #marian #text2text-generation #generated_from_trainer #dataset-wmt16 #license-apache-2.0 #model-index #autotrain_compatible #endpoints_compatible #region-us
|
opus-mt-en-de-finetuned-en-to-de
================================
This model is a fine-tuned version of Helsinki-NLP/opus-mt-en-de on the wmt16 dataset.
It achieves the following results on the evaluation set:
* Loss: 1.6798
* Bleu: 26.4396
* Gen Len: 24.8156
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 0.0002
* train\_batch\_size: 8
* eval\_batch\_size: 8
* seed: 42
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* num\_epochs: 1
* mixed\_precision\_training: Native AMP
### Training results
### Framework versions
* Transformers 4.12.5
* Pytorch 1.9.0.dev20210415+cu101
* Datasets 1.16.1
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0002\n* train\\_batch\\_size: 8\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 1\n* mixed\\_precision\\_training: Native AMP",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.12.5\n* Pytorch 1.9.0.dev20210415+cu101\n* Datasets 1.16.1\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #marian #text2text-generation #generated_from_trainer #dataset-wmt16 #license-apache-2.0 #model-index #autotrain_compatible #endpoints_compatible #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0002\n* train\\_batch\\_size: 8\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 1\n* mixed\\_precision\\_training: Native AMP",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.12.5\n* Pytorch 1.9.0.dev20210415+cu101\n* Datasets 1.16.1\n* Tokenizers 0.10.3"
] |
[
69,
112,
4,
39
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #marian #text2text-generation #generated_from_trainer #dataset-wmt16 #license-apache-2.0 #model-index #autotrain_compatible #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0002\n* train\\_batch\\_size: 8\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 1\n* mixed\\_precision\\_training: Native AMP### Training results### Framework versions\n\n\n* Transformers 4.12.5\n* Pytorch 1.9.0.dev20210415+cu101\n* Datasets 1.16.1\n* Tokenizers 0.10.3"
] |
[
-0.1024523600935936,
0.11060822755098343,
-0.004617050290107727,
0.08965777605772018,
0.11128261685371399,
0.0029447409324347973,
0.14956974983215332,
0.15517103672027588,
-0.11950268596410751,
0.0535762719810009,
0.13070601224899292,
0.14208988845348358,
0.041838809847831726,
0.14933589100837708,
-0.052616626024246216,
-0.26501932740211487,
0.03892980515956879,
0.04848332703113556,
-0.0414278581738472,
0.12314870208501816,
0.09016139805316925,
-0.12777043879032135,
0.08535362035036087,
0.032988790422677994,
-0.17147298157215118,
-0.004682359751313925,
-0.007844340056180954,
-0.07687075436115265,
0.11277633905410767,
0.02219858020544052,
0.10021096467971802,
0.0403994619846344,
0.06411111354827881,
-0.17770670354366302,
0.00899976585060358,
0.0551336333155632,
0.009222762659192085,
0.10372016578912735,
0.0687718540430069,
-0.016741154715418816,
0.11748779565095901,
-0.07274988293647766,
0.06103285402059555,
0.028972884640097618,
-0.12422866374254227,
-0.2660285532474518,
-0.11153857409954071,
0.055507950484752655,
0.06065629795193672,
0.08480282127857208,
-0.003130444325506687,
0.16386236250400543,
-0.037367623299360275,
0.1010008454322815,
0.263681560754776,
-0.285442590713501,
-0.05446023494005203,
0.005721589084714651,
0.041524309664964676,
0.0625874325633049,
-0.07171005010604858,
-0.02151958830654621,
0.03014247864484787,
0.04411277920007706,
0.14068110287189484,
-0.007741105277091265,
-0.03510495275259018,
-0.008479969576001167,
-0.13904744386672974,
-0.06104153394699097,
0.16186299920082092,
0.03650603070855141,
-0.03833702206611633,
-0.057544514536857605,
-0.06742952018976212,
-0.17086541652679443,
-0.04287886247038841,
-0.02028515748679638,
0.05173088610172272,
-0.035055674612522125,
-0.07831643521785736,
-0.019898369908332825,
-0.07860846817493439,
-0.0462907999753952,
-0.06317350268363953,
0.13412019610404968,
0.0540740042924881,
0.021011020988225937,
-0.06047710403800011,
0.07744726538658142,
-0.04060902073979378,
-0.1508268117904663,
-0.010092520155012608,
0.012337488122284412,
0.022567225620150566,
-0.035257406532764435,
-0.03840826079249382,
-0.12280845642089844,
0.011639599688351154,
0.13788847625255585,
-0.11439201980829239,
0.07440764456987381,
-0.007706087082624435,
0.040526509284973145,
-0.07339950650930405,
0.17426297068595886,
-0.036189161241054535,
0.009665842168033123,
0.0068564084358513355,
0.05872705578804016,
0.042890530079603195,
-0.025377873331308365,
-0.10380562394857407,
0.028182754293084145,
0.10604383796453476,
0.022420665249228477,
-0.03392315283417702,
0.060427822172641754,
-0.03981267288327217,
-0.03395227715373039,
0.0381547175347805,
-0.10011306405067444,
0.03839949890971184,
-0.0026822714135050774,
-0.0793776586651802,
0.0036281286738812923,
0.026271948590874672,
0.003613948356360197,
-0.04319142922759056,
0.09021385759115219,
-0.07899424433708191,
0.02035234309732914,
-0.08647073060274124,
-0.13515475392341614,
0.034989964216947556,
-0.09042941778898239,
0.003373486455529928,
-0.08807208389043808,
-0.1551346331834793,
-0.011638297699391842,
0.059286314994096756,
-0.04566716030240059,
-0.040908463299274445,
-0.045335300266742706,
-0.08325902372598648,
0.03674976900219917,
-0.016833942383527756,
0.0794995054602623,
-0.06445123255252838,
0.07703646272420883,
0.03679082915186882,
0.07716767489910126,
-0.029809745028614998,
0.04952940344810486,
-0.08178386837244034,
0.031373847275972366,
-0.21382106840610504,
0.05263200402259827,
-0.057018719613552094,
0.06922316551208496,
-0.10989411920309067,
-0.10831692814826965,
0.005487949587404728,
-0.01831667684018612,
0.10202505439519882,
0.09681413322687149,
-0.16549815237522125,
-0.0730351135134697,
0.19521377980709076,
-0.09210366755723953,
-0.11988141387701035,
0.11808986961841583,
-0.04634319618344307,
0.016451053321361542,
0.05271811783313751,
0.2012578248977661,
0.04027426242828369,
-0.08837731182575226,
-0.00841249618679285,
-0.051285598427057266,
0.04793822392821312,
-0.048376306891441345,
0.06590788811445236,
-0.005768928676843643,
0.07307326793670654,
0.011599583551287651,
0.008665679953992367,
0.028611058369278908,
-0.0955130085349083,
-0.08070208132266998,
-0.053432539105415344,
-0.07180861383676529,
0.02452803961932659,
0.045422762632369995,
0.07087358832359314,
-0.12126942723989487,
-0.10858678072690964,
0.06296185404062271,
0.08014833182096481,
-0.07497257739305496,
0.052057307213544846,
-0.09839465469121933,
0.10499908030033112,
-0.04400493949651718,
-0.004607821349054575,
-0.17792315781116486,
-0.010897022671997547,
0.029705269262194633,
-0.03777506947517395,
0.026208927854895592,
-0.036969322711229324,
0.07420480251312256,
0.06949105113744736,
-0.03917472064495087,
-0.02282610349357128,
-0.03624830022454262,
0.005571030080318451,
-0.10376400500535965,
-0.21066756546497345,
-0.0420406311750412,
-0.03192746639251709,
0.07663731276988983,
-0.16129803657531738,
0.04847724363207817,
0.05307692289352417,
0.11220265179872513,
0.027865219861268997,
-0.02442067861557007,
-0.0132026681676507,
0.06875739991664886,
-0.04223846271634102,
-0.0655367523431778,
0.06276669353246689,
0.01729782298207283,
-0.08238266408443451,
-0.0017650624504312873,
-0.14010155200958252,
0.13429929316043854,
0.13778015971183777,
-0.047584984451532364,
-0.05327737331390381,
-0.005340846721082926,
-0.052480604499578476,
-0.03154788166284561,
-0.02755899541079998,
0.03421413525938988,
0.16253386437892914,
0.014150857925415039,
0.15041334927082062,
-0.09124674648046494,
-0.04490876570343971,
0.04556461423635483,
-0.030149757862091064,
-0.0046601677313447,
0.11109128594398499,
0.05861562862992287,
-0.09178057312965393,
0.12953506410121918,
0.13437242805957794,
-0.060232896357774734,
0.1298825889825821,
-0.060129232704639435,
-0.07034071534872055,
-0.03185281902551651,
-0.018783539533615112,
0.019182326272130013,
0.09576236456632614,
-0.13089172542095184,
-0.014676881954073906,
0.03586215898394585,
0.037904489785432816,
0.01749727874994278,
-0.1943994164466858,
-0.00008282328781206161,
0.04668605700135231,
-0.05369660258293152,
-0.043593183159828186,
-0.009447949938476086,
0.014614199288189411,
0.09727969765663147,
0.009801231324672699,
-0.05897286534309387,
0.023806899785995483,
0.00912508461624384,
-0.06834360212087631,
0.18910348415374756,
-0.09840931743383408,
-0.15668945014476776,
-0.1163964718580246,
-0.08530417084693909,
-0.05378848314285278,
-0.008696815930306911,
0.08000461012125015,
-0.09012196213006973,
-0.04432258382439613,
-0.08987751603126526,
0.009985039010643959,
-0.014344603754580021,
0.018823621794581413,
0.04342387244105339,
-0.007610805332660675,
0.07350973039865494,
-0.12288283556699753,
-0.02405601553618908,
-0.028599457815289497,
-0.006668138317763805,
0.05753869190812111,
0.023093124851584435,
0.10965880751609802,
0.13625645637512207,
-0.035271577537059784,
0.04263782128691673,
-0.03072519227862358,
0.2158782035112381,
-0.0646187886595726,
-0.01303309015929699,
0.14348764717578888,
0.0011449616868048906,
0.07692480832338333,
0.10473553836345673,
0.060340967029333115,
-0.08409272134304047,
-0.014121871441602707,
0.021617267280817032,
-0.040879033505916595,
-0.23601758480072021,
-0.03099627047777176,
-0.04080135375261307,
0.0045338585041463375,
0.09761334210634232,
0.03373463451862335,
0.054650358855724335,
0.05656662583351135,
0.02114335261285305,
0.04096537083387375,
-0.01550486870110035,
0.10550085455179214,
0.13731680810451508,
0.05438699945807457,
0.1426079273223877,
-0.051364485174417496,
-0.023156238719820976,
0.04954332113265991,
0.0037244735285639763,
0.23177552223205566,
0.007819830439984798,
0.1795416921377182,
0.06553912907838821,
0.15004894137382507,
0.017751267179846764,
0.05689150467514992,
-0.00799661222845316,
-0.014198721386492252,
-0.015499170869588852,
-0.04823499917984009,
-0.022697383537888527,
0.018579915165901184,
-0.07161272317171097,
0.03875236213207245,
-0.12375252693891525,
0.03104625642299652,
0.05026732757687569,
0.2888566553592682,
0.02930922619998455,
-0.34704405069351196,
-0.10642806440591812,
0.0019258428364992142,
-0.04293318837881088,
-0.028925729915499687,
0.017379149794578552,
0.06904561817646027,
-0.0731070414185524,
0.06965002417564392,
-0.07757160812616348,
0.11290275305509567,
-0.049186136573553085,
0.03473770618438721,
0.046740010380744934,
0.11019483208656311,
-0.0018020786810666323,
0.06111247092485428,
-0.27359405159950256,
0.2816948890686035,
0.018430355936288834,
0.07329299300909042,
-0.06869218498468399,
0.015158945694565773,
0.019730733707547188,
0.06048274412751198,
0.057172179222106934,
-0.0118199922144413,
-0.12195328623056412,
-0.14436447620391846,
-0.08483383804559708,
0.010676720179617405,
0.08556374162435532,
0.02167128212749958,
0.10802502930164337,
-0.012086162343621254,
0.0070413327775895596,
0.05125750973820686,
-0.011463682167232037,
-0.07404166460037231,
-0.09401652216911316,
0.020259350538253784,
0.034364379942417145,
-0.0282069593667984,
-0.0723208487033844,
-0.10728118568658829,
-0.07661233097314835,
0.16747957468032837,
0.020201314240694046,
-0.05240076780319214,
-0.11850320547819138,
0.05331253260374069,
0.0917484313249588,
-0.08670243620872498,
0.04163104295730591,
-0.007728259079158306,
0.1045926958322525,
0.00392682570964098,
-0.07407677173614502,
0.11153654009103775,
-0.06341463327407837,
-0.17074570059776306,
-0.048554178327322006,
0.10573145747184753,
0.03121810406446457,
0.06310276687145233,
-0.010305795818567276,
0.03755582496523857,
-0.02694608084857464,
-0.07816371321678162,
0.037241529673337936,
0.004438555333763361,
0.08381066471338272,
-0.03093990683555603,
-0.024668369442224503,
0.026634976267814636,
-0.07089371979236603,
-0.03509386628866196,
0.17069396376609802,
0.2574041187763214,
-0.09379739314317703,
0.0592869408428669,
0.05308440700173378,
-0.06504455208778381,
-0.15715141594409943,
0.016797197982668877,
0.05148731544613838,
-0.00028632686007767916,
0.009664013050496578,
-0.19676260650157928,
0.049630239605903625,
0.10262876749038696,
-0.01653232052922249,
0.06967220455408096,
-0.32932716608047485,
-0.1286747306585312,
0.1097308024764061,
0.1276828944683075,
0.08103685826063156,
-0.16149269044399261,
-0.044660039246082306,
-0.02111033909022808,
-0.15053316950798035,
0.11468245089054108,
-0.11055584251880646,
0.11076481640338898,
-0.034284558147192,
0.09620249271392822,
0.009713291190564632,
-0.05885010212659836,
0.1220373809337616,
-0.006487166043370962,
0.08003488183021545,
-0.06796253472566605,
0.020359424874186516,
0.09768027812242508,
-0.07115127146244049,
0.050066351890563965,
-0.08950535953044891,
0.03480686992406845,
-0.09075626730918884,
-0.020353903993964195,
-0.06978250294923782,
0.018245775252580643,
-0.032971277832984924,
-0.040551599115133286,
-0.05226702243089676,
0.00977471936494112,
0.07031840831041336,
-0.018827788531780243,
0.18510130047798157,
0.018728330731391907,
0.1414845585823059,
0.15459801256656647,
0.09085904061794281,
-0.11586970090866089,
-0.05553093180060387,
0.00038780272006988525,
-0.02614101767539978,
0.04920060560107231,
-0.16496440768241882,
0.036415986716747284,
0.13853836059570312,
0.010327520780265331,
0.12498020380735397,
0.06857692450284958,
-0.06344922631978989,
0.01903623901307583,
0.05278141424059868,
-0.15244004130363464,
-0.12076522409915924,
0.006316369399428368,
0.030881265178322792,
-0.09954269975423813,
0.06346239149570465,
0.11511122435331345,
-0.06224669888615608,
-0.020096970722079277,
0.0018634794978424907,
0.020089231431484222,
-0.024482259526848793,
0.1902618408203125,
0.03638104349374771,
0.06018444895744324,
-0.10031695663928986,
0.09523383527994156,
0.05412525683641434,
-0.10021989792585373,
0.047664206475019455,
0.11224290728569031,
-0.08362217247486115,
-0.030356736853718758,
0.04871903732419014,
0.16085150837898254,
-0.06179280951619148,
-0.06027679517865181,
-0.1635173261165619,
-0.1202859878540039,
0.09521575272083282,
0.17382849752902985,
0.06830400973558426,
0.00031749976915307343,
-0.04067128151655197,
0.0005209363298490644,
-0.12083274871110916,
0.0899040475487709,
0.061933401972055435,
0.07003635913133621,
-0.13102245330810547,
0.13738156855106354,
0.0021693226881325245,
0.03351713344454765,
-0.011856752447783947,
0.012372259981930256,
-0.09716124832630157,
0.006891600787639618,
-0.15222109854221344,
-0.005773062352091074,
-0.055857740342617035,
-0.007392967119812965,
-0.01740972511470318,
-0.043958310037851334,
-0.06443822383880615,
0.02886020950973034,
-0.11068376153707504,
-0.029864486306905746,
0.013199004344642162,
0.03521764278411865,
-0.11805392056703568,
-0.01841876283288002,
0.01114001777023077,
-0.07368572056293488,
0.0702173113822937,
0.04654828459024429,
0.0016955329338088632,
0.028615381568670273,
-0.06181147322058678,
0.004074395634233952,
0.05847329646348953,
0.002615629928186536,
0.06366613507270813,
-0.128355011343956,
-0.017781661823391914,
0.007317828014492989,
0.01890811324119568,
0.016993025317788124,
0.0883963331580162,
-0.11721030622720718,
-0.004166158847510815,
-0.011865253560245037,
-0.0668160542845726,
-0.060899700969457626,
0.05913778394460678,
0.10653122514486313,
0.011182648129761219,
0.17379632592201233,
-0.07603906095027924,
0.03686472401022911,
-0.20456404983997345,
0.007033640518784523,
0.005264453124254942,
-0.12515458464622498,
-0.07217031717300415,
-0.03902193903923035,
0.06482827663421631,
-0.0735962986946106,
0.13191868364810944,
0.0020227187778800726,
0.00602423632517457,
0.04665343090891838,
-0.03899458795785904,
-0.03818387910723686,
0.01506925467401743,
0.1750103384256363,
0.026395395398139954,
-0.037576377391815186,
0.0669606477022171,
0.034220147877931595,
0.08285332471132278,
0.12093249708414078,
0.20336878299713135,
0.149173304438591,
0.03809057176113129,
0.10257925093173981,
0.04435659572482109,
-0.043481890112161636,
-0.1702069640159607,
0.06868649274110794,
-0.0228983536362648,
0.13265353441238403,
-0.009739428758621216,
0.1842077374458313,
0.1265956610441208,
-0.1495484560728073,
0.06407636404037476,
-0.04017239063978195,
-0.08254510909318924,
-0.11877413839101791,
-0.08661863207817078,
-0.08712857216596603,
-0.15633989870548248,
-0.0033661804627627134,
-0.12127666175365448,
0.04810621961951256,
0.04696632921695709,
0.021640479564666748,
-0.012418788857758045,
0.1205761730670929,
0.025168413296341896,
0.010122356936335564,
0.05211441218852997,
-0.001501144259236753,
-0.03988984599709511,
-0.06258708983659744,
-0.07055214792490005,
0.015851818025112152,
-0.0006464962498284876,
0.04454025253653526,
-0.008454132825136185,
-0.02097458951175213,
0.04098966345191002,
-0.0243467316031456,
-0.11214097589254379,
0.01375284418463707,
0.021575815975666046,
0.06804361194372177,
0.050108328461647034,
0.009212738834321499,
-0.0017184690805152059,
-0.010777346789836884,
0.1923620104789734,
-0.06606882065534592,
-0.06515263766050339,
-0.10498738288879395,
0.22723031044006348,
0.033130399882793427,
-0.03516561910510063,
0.03287170082330704,
-0.06746935099363327,
-0.015837032347917557,
0.19627924263477325,
0.19496403634548187,
-0.022204603999853134,
-0.016122721135616302,
-0.004921445623040199,
-0.011302784085273743,
-0.020396340638399124,
0.08498874306678772,
0.11992385238409042,
0.03381255269050598,
-0.0737653523683548,
-0.025312840938568115,
-0.06472054868936539,
-0.011272795498371124,
-0.05752035975456238,
0.0804121196269989,
0.028997287154197693,
-0.005998612847179174,
-0.03090188279747963,
0.051788050681352615,
-0.049443718045949936,
-0.06320230662822723,
0.029198555275797844,
-0.2098490595817566,
-0.158389613032341,
-0.004379444755613804,
0.07709762454032898,
-0.0010544187389314175,
0.06214329972863197,
-0.005915307905524969,
-0.004808052442967892,
0.09844919294118881,
-0.010199028067290783,
-0.0904044508934021,
-0.08617492765188217,
0.09488542377948761,
-0.15319108963012695,
0.19767872989177704,
-0.03394902125000954,
0.033781442791223526,
0.13655740022659302,
0.04941030591726303,
-0.10305862873792648,
0.05866600200533867,
0.04045279324054718,
-0.04872649535536766,
0.007831888273358345,
0.11845891922712326,
-0.027918456122279167,
0.0678979679942131,
0.04496896639466286,
-0.12378691136837006,
-0.009015928953886032,
-0.07238824665546417,
-0.04734392464160919,
-0.018591519445180893,
-0.03654739633202553,
-0.048490893095731735,
0.11829482764005661,
0.200966015458107,
-0.03658676892518997,
-0.0045423139818012714,
-0.06690403074026108,
0.027586892247200012,
0.06808434426784515,
-0.0024679629132151604,
-0.05171113833785057,
-0.24604491889476776,
0.013838988728821278,
0.06982554495334625,
-0.0016317990375682712,
-0.2464699000120163,
-0.09776165336370468,
0.007817600853741169,
-0.05810769647359848,
-0.09694226831197739,
0.08111278712749481,
0.09583716094493866,
0.05261152982711792,
-0.06372624635696411,
-0.021249106153845787,
-0.06678833067417145,
0.16216787695884705,
-0.14591540396213531,
-0.06335698068141937
] |
null | null |
transformers
|
# aggb DialogGPT spanish model
|
{"tags": ["conversational"]}
|
text-generation
|
aggb/DialogGPT-small-AGGB-B
|
[
"transformers",
"pytorch",
"gpt2",
"text-generation",
"conversational",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
# aggb DialogGPT spanish model
|
[
"# aggb DialogGPT spanish model"
] |
[
"TAGS\n#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n",
"# aggb DialogGPT spanish model"
] |
[
51,
9
] |
[
"passage: TAGS\n#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n# aggb DialogGPT spanish model"
] |
[
-0.06067728251218796,
0.06893843412399292,
-0.00569493742659688,
0.04204516112804413,
0.1417158842086792,
-0.0009830440394580364,
0.1116136983036995,
0.10950817167758942,
0.01702086068689823,
-0.023230433464050293,
0.10894568264484406,
0.17519201338291168,
0.000951242633163929,
0.09999680519104004,
-0.07243235409259796,
-0.26513829827308655,
0.05839693173766136,
0.008094584569334984,
-0.0019018654711544514,
0.09289867430925369,
0.09591364115476608,
-0.023134347051382065,
0.07778409868478775,
0.014134084805846214,
-0.14158961176872253,
0.024790333583950996,
0.01845017820596695,
-0.1544007807970047,
0.13127416372299194,
0.11225569248199463,
-0.005158933810889721,
0.028445303440093994,
-0.08383574336767197,
-0.10366638004779816,
0.02564476802945137,
-0.04192867875099182,
-0.06963076442480087,
0.05330765247344971,
0.043652988970279694,
-0.09084165096282959,
0.12744706869125366,
0.0892995223402977,
-0.0129100251942873,
0.06845779716968536,
-0.2138075977563858,
-0.06978379935026169,
0.00949908047914505,
0.014974190853536129,
0.062407128512859344,
0.07910650223493576,
-0.048061493784189224,
0.017146125435829163,
-0.07305294275283813,
0.049757666885852814,
0.07699446380138397,
-0.3254273533821106,
-0.027273811399936676,
0.151959627866745,
0.037812698632478714,
0.0768548995256424,
-0.01986447162926197,
0.118411585688591,
0.021374370902776718,
-0.016391603276133537,
-0.046349771320819855,
-0.09240788221359253,
-0.10916616022586823,
0.03147682175040245,
-0.07709088176488876,
-0.05463293939828873,
0.23821255564689636,
-0.0398065559566021,
0.05977470427751541,
-0.10208559781312943,
-0.08728484064340591,
0.0052878363057971,
-0.053530819714069366,
-0.04255422577261925,
-0.06551237404346466,
0.0963253527879715,
0.019283927977085114,
-0.09928878396749496,
-0.11958245933055878,
0.0001751142553985119,
-0.18659086525440216,
0.17729315161705017,
0.015104807913303375,
0.012169409543275833,
-0.15190698206424713,
0.12113001942634583,
-0.04660818353295326,
-0.09910410642623901,
0.000014773570001125336,
-0.07737038284540176,
0.007337626535445452,
0.033535320311784744,
-0.048532068729400635,
-0.027768300846219063,
0.08970926702022552,
0.10453999042510986,
-0.026075107976794243,
0.004660230129957199,
-0.0005310272099450231,
0.04516734182834625,
0.06226009503006935,
0.1202317625284195,
-0.011554969474673271,
-0.06774549931287766,
0.020196357741951942,
-0.14069880545139313,
-0.013563854619860649,
-0.05307846888899803,
-0.2165955752134323,
-0.06751875579357147,
0.03627358004450798,
0.08033694326877594,
0.035587843507528305,
0.1013251543045044,
-0.05455147475004196,
-0.05875859037041664,
0.04994481801986694,
-0.012248107232153416,
-0.012584814801812172,
-0.011869308538734913,
-0.004014507867395878,
0.10564344376325607,
-0.010332946665585041,
0.03771539404988289,
-0.09525255858898163,
-0.052299864590168,
-0.06773240864276886,
-0.043871551752090454,
-0.028806505724787712,
-0.012148389592766762,
0.0009751562029123306,
-0.027265425771474838,
0.0070106470957398415,
-0.1515663117170334,
-0.1584579348564148,
-0.015585641376674175,
0.010215613059699535,
-0.10937964171171188,
-0.08114156126976013,
-0.11048272252082825,
0.03793454170227051,
0.04401461407542229,
-0.06581497937440872,
-0.005362090189009905,
-0.05074802041053772,
0.09952719509601593,
0.025402501225471497,
0.09774657338857651,
-0.1109805703163147,
0.07884441316127777,
-0.0974830836057663,
-0.01667933166027069,
-0.12208378314971924,
0.1753373146057129,
-0.004512576386332512,
0.04945118725299835,
-0.06274256855249405,
0.009923478588461876,
-0.07758206874132156,
0.09246425330638885,
-0.04242541640996933,
0.24740594625473022,
-0.0409044474363327,
-0.116375632584095,
0.2847351133823395,
-0.01884574256837368,
-0.12599517405033112,
0.10896655917167664,
0.010909294709563255,
0.14900261163711548,
0.14221137762069702,
0.21111243963241577,
-0.05660400539636612,
-0.010792914777994156,
0.10977410525083542,
0.09773781895637512,
-0.06502611935138702,
-0.01825574040412903,
0.06401246786117554,
-0.06214094161987305,
-0.07451434433460236,
0.060181815177202225,
0.029626643285155296,
0.07022684067487717,
-0.025904927402734756,
-0.034430988132953644,
0.023997202515602112,
0.033576011657714844,
0.08832071721553802,
-0.02589740976691246,
0.10978053510189056,
0.00047927803825587034,
-0.06041340529918671,
-0.012723980471491814,
-0.014112813398241997,
-0.02265806496143341,
0.003192791249603033,
-0.07441612333059311,
0.1182616651058197,
-0.08965440094470978,
0.07863613218069077,
-0.11561384052038193,
-0.08388175815343857,
-0.04217361658811569,
0.18102630972862244,
0.05267498642206192,
0.13252609968185425,
0.04982946068048477,
-0.059008724987506866,
-0.0257892906665802,
0.03830227255821228,
0.13261280953884125,
-0.036602240055799484,
-0.0543208047747612,
-0.09707411378622055,
0.09668333828449249,
-0.04599738121032715,
0.13401995599269867,
-0.05816541984677315,
0.003868616186082363,
0.0533737912774086,
0.04765777662396431,
-0.02428579144179821,
0.025342151522636414,
0.003940125927329063,
-0.010246235877275467,
-0.05104631930589676,
-0.007058951538056135,
0.11251868307590485,
0.009879887104034424,
-0.07553446292877197,
0.2340041697025299,
-0.15877720713615417,
0.1636580526828766,
0.1567421704530716,
-0.1753719002008438,
-0.01561553031206131,
-0.17467498779296875,
-0.025758106261491776,
0.02945706434547901,
0.050717584788799286,
-0.033138155937194824,
0.24777677655220032,
-0.021465031430125237,
0.1869477927684784,
-0.08263381570577621,
-0.032229240983724594,
-0.012567083351314068,
-0.07714710384607315,
-0.0009503546170890331,
0.08815011382102966,
0.0783226415514946,
-0.15451020002365112,
0.19782686233520508,
0.056023456156253815,
0.08588750660419464,
0.22011244297027588,
0.05251466482877731,
0.006362462416291237,
0.03842369467020035,
0.034643132239580154,
-0.023776471614837646,
-0.0391140915453434,
-0.2754574716091156,
-0.008725077845156193,
0.07571444660425186,
0.04399111121892929,
0.13519908487796783,
-0.09502268582582474,
-0.04789372533559799,
0.004862048663198948,
-0.04048728197813034,
0.07986587285995483,
0.13369327783584595,
-0.0103360116481781,
0.11705970764160156,
0.043404169380664825,
-0.03187282383441925,
0.08625069260597229,
0.0401359498500824,
-0.08112341910600662,
0.20064111053943634,
-0.10080672800540924,
-0.40052106976509094,
-0.13193878531455994,
-0.20289501547813416,
-0.06322557479143143,
0.10606003552675247,
0.11079840362071991,
-0.13758042454719543,
-0.005483174696564674,
0.060114987194538116,
0.12028739601373672,
-0.057796888053417206,
-0.024377834051847458,
-0.032794442027807236,
0.0036274055019021034,
-0.12223949283361435,
-0.08923203498125076,
-0.06182006001472473,
-0.014637012034654617,
-0.08621927350759506,
0.09523797035217285,
-0.15272000432014465,
0.06736580282449722,
0.22877517342567444,
0.07695819437503815,
0.061408765614032745,
-0.034848857671022415,
0.22694429755210876,
-0.1381811499595642,
-0.005199402570724487,
0.19516795873641968,
0.007356311194598675,
0.036960966885089874,
0.14169812202453613,
-0.015095967799425125,
-0.10460050404071808,
-0.002987675368785858,
-0.04513360559940338,
-0.09066328406333923,
-0.2304529845714569,
-0.15197935700416565,
-0.12660875916481018,
0.03741080313920975,
0.022199250757694244,
0.05143919214606285,
0.2328723967075348,
0.12163211405277252,
-0.03025107830762863,
0.017811261117458344,
0.05994422733783722,
0.10002350807189941,
0.31878918409347534,
-0.07167834043502808,
0.1177637130022049,
-0.022104233503341675,
-0.16514235734939575,
0.1043388694524765,
0.11077909171581268,
0.07297200709581375,
0.06299979984760284,
0.07957889139652252,
0.0313350185751915,
-0.014656733721494675,
0.08733964711427689,
0.011153506115078926,
0.008209003135561943,
-0.05993979796767235,
-0.07696419954299927,
-0.04840844124555588,
-0.045955657958984375,
0.036684952676296234,
0.03353576362133026,
-0.1671994924545288,
-0.045920394361019135,
-0.0927063524723053,
0.08363659679889679,
0.00547376275062561,
0.05464500933885574,
-0.18341492116451263,
-0.035849377512931824,
0.06968269497156143,
-0.011638985946774483,
-0.13689561188220978,
0.035523559898138046,
-0.003085422795265913,
-0.17340129613876343,
0.04546663910150528,
-0.02680329978466034,
0.10749143362045288,
-0.06862534582614899,
0.08282443881034851,
-0.12854407727718353,
-0.036738231778144836,
0.018204137682914734,
0.12716469168663025,
-0.30232304334640503,
0.22554826736450195,
0.006871817167848349,
-0.07473623752593994,
-0.10856149345636368,
0.007874694652855396,
-0.030147815123200417,
0.0787496417760849,
0.1236310750246048,
-0.01076577976346016,
0.09710283577442169,
0.02198220230638981,
-0.015226438641548157,
0.03930860757827759,
0.03527897223830223,
-0.07176057249307632,
-0.030669810250401497,
-0.03899543359875679,
0.011485142633318901,
-0.027524394914507866,
-0.026646040380001068,
0.01824869215488434,
-0.23516252636909485,
0.0869421735405922,
0.03695577755570412,
0.15725234150886536,
0.038981422781944275,
-0.010558617301285267,
-0.1088905781507492,
0.32351794838905334,
0.016564134508371353,
-0.10908971726894379,
-0.12064537405967712,
0.005863752216100693,
0.0429115891456604,
-0.05757923424243927,
0.05273802578449249,
-0.0880008339881897,
0.017939068377017975,
-0.0637546256184578,
-0.14820389449596405,
0.165313258767128,
-0.10986296832561493,
-0.014049861580133438,
-0.032565899193286896,
0.17962893843650818,
-0.029432060196995735,
0.008784536272287369,
0.06505241245031357,
-0.002807420678436756,
-0.09796548634767532,
-0.09211606532335281,
-0.010049151256680489,
0.02149728685617447,
-0.012497680261731148,
0.026717575266957283,
-0.030287910252809525,
-0.10725647956132889,
-0.014833463355898857,
-0.058385685086250305,
0.29655468463897705,
0.1352936327457428,
-0.03936655819416046,
0.2012949287891388,
0.14403466880321503,
-0.06609942018985748,
-0.23208019137382507,
-0.09961526095867157,
-0.048516180366277695,
-0.02527925744652748,
-0.0710536539554596,
-0.21417130529880524,
0.024152692407369614,
0.02989538013935089,
-0.03719031810760498,
0.08817235380411148,
-0.355338454246521,
-0.08569838106632233,
0.10930509865283966,
-0.023654650896787643,
0.4539892077445984,
-0.09756575524806976,
-0.11408815532922745,
-0.06257171928882599,
-0.17066195607185364,
0.12120668590068817,
0.06995691359043121,
0.11763500422239304,
-0.011217819526791573,
0.14658494293689728,
0.057320401072502136,
0.0072843655943870544,
0.1500457525253296,
0.016075359657406807,
-0.07155671715736389,
-0.08802370727062225,
-0.04928392171859741,
0.03477887436747551,
0.03379245847463608,
0.02395194210112095,
-0.04225626587867737,
0.008973951451480389,
-0.1345977634191513,
-0.09856291115283966,
-0.07150092720985413,
0.028339045122265816,
0.027078647166490555,
-0.07830434292554855,
0.01419147476553917,
-0.04634426161646843,
-0.015905704349279404,
0.014465941116213799,
0.11912316828966141,
-0.12341219186782837,
0.15468928217887878,
0.06411443650722504,
0.11507207155227661,
-0.10160146653652191,
-0.04373052343726158,
-0.08055433630943298,
-0.03776463493704796,
0.07550027966499329,
-0.08891668170690536,
0.01331073883920908,
0.09739114344120026,
-0.060250576585531235,
0.09142076224088669,
0.06863376498222351,
-0.016562994569540024,
0.03235291689634323,
0.1285613626241684,
-0.19337113201618195,
-0.0722050592303276,
-0.08426833152770996,
0.028477758169174194,
0.09943374991416931,
0.07975482195615768,
0.19215968251228333,
0.04536160081624985,
-0.059139035642147064,
0.015741536393761635,
0.003368254518136382,
-0.03185289725661278,
0.065956711769104,
-0.01739758439362049,
0.006125294137746096,
-0.15103155374526978,
0.02208070270717144,
0.03168569877743721,
-0.1039971262216568,
0.04006873443722725,
0.12392114847898483,
-0.09265255928039551,
-0.1206343024969101,
-0.06255112588405609,
0.08497168123722076,
-0.23949873447418213,
-0.014709671959280968,
-0.03482796996831894,
-0.11990422755479813,
0.06673550605773926,
0.06437835097312927,
0.04456675797700882,
0.05961854010820389,
-0.06685337424278259,
-0.02187858149409294,
0.040195032954216,
-0.026735270395874977,
0.01683356985449791,
-0.013379044830799103,
-0.04482510685920715,
0.006032882258296013,
-0.027864225208759308,
0.07141721248626709,
-0.09544705599546432,
-0.11363118886947632,
-0.18122278153896332,
0.03036956861615181,
-0.138518288731575,
-0.07330118119716644,
-0.10681041330099106,
-0.05226831138134003,
-0.031427595764398575,
-0.04344119876623154,
-0.011524373665452003,
-0.05227168649435043,
-0.1145378053188324,
0.051210690289735794,
-0.04956172779202461,
0.03615986555814743,
-0.05519307404756546,
0.02824845165014267,
0.04168590158224106,
-0.011988118290901184,
0.1618538200855255,
0.09525106847286224,
-0.09081947803497314,
0.07251320779323578,
-0.22159382700920105,
-0.02281600795686245,
0.12552231550216675,
0.0024862943682819605,
0.061070747673511505,
0.047827430069446564,
0.04648585245013237,
0.07267270237207413,
0.04476243257522583,
0.06850659847259521,
0.03657657280564308,
-0.1320439577102661,
0.06471502780914307,
-0.04934731498360634,
-0.13812361657619476,
-0.026791205629706383,
0.01646755449473858,
0.03140658140182495,
0.012698601931333542,
0.05249207466840744,
-0.0850282534956932,
0.0625251978635788,
-0.05055764690041542,
0.027115728706121445,
-0.0071122907102108,
-0.12128383666276932,
-0.050378598272800446,
-0.09694336354732513,
0.018297553062438965,
0.04595715552568436,
0.23654940724372864,
0.08112745732069016,
0.03188236430287361,
0.005193243734538555,
0.07051444053649902,
0.11435262858867645,
-0.014561767689883709,
0.1869465708732605,
0.12747104465961456,
-0.0046874950639903545,
-0.08428400754928589,
0.13795879483222961,
0.026283934712409973,
-0.0073272185400128365,
0.08330688625574112,
-0.04614920914173126,
-0.0345161110162735,
0.0860869288444519,
-0.012837204150855541,
0.002518290653824806,
-0.08694775402545929,
-0.08414361625909805,
-0.048289716243743896,
0.03260527551174164,
-0.0656052902340889,
0.07945728302001953,
0.15488258004188538,
-0.04482322186231613,
0.049691714346408844,
0.033359237015247345,
-0.031168699264526367,
-0.1601424366235733,
-0.20251081883907318,
-0.06238380819559097,
-0.1703559160232544,
0.003447939408943057,
-0.11893477290868759,
0.05170166492462158,
0.016772037371993065,
0.07347045093774796,
-0.08028526604175568,
0.08184395730495453,
0.05039016157388687,
-0.1659088134765625,
0.04550383239984512,
-0.01974577270448208,
0.14440831542015076,
-0.06577908992767334,
0.03582857549190521,
-0.0949230045080185,
0.10468652844429016,
0.016550669446587563,
0.07326408475637436,
-0.012854993343353271,
-0.017335202544927597,
-0.10702986270189285,
-0.0440070815384388,
-0.07116791605949402,
0.09237249940633774,
-0.002882573287934065,
0.1554674655199051,
-0.009772347286343575,
-0.049380794167518616,
0.0346917062997818,
0.2636575698852539,
-0.0439457930624485,
-0.0928041860461235,
-0.0687619149684906,
0.2182471752166748,
-0.005865894258022308,
0.13613274693489075,
-0.04439365863800049,
-0.004054400138556957,
-0.07331520318984985,
0.32753169536590576,
0.2809849679470062,
-0.09678438305854797,
-0.0065175993368029594,
0.01952429860830307,
0.042809173464775085,
0.15066872537136078,
0.11176338791847229,
0.05605354160070419,
0.3836125135421753,
-0.06940445303916931,
-0.03275982290506363,
-0.0065399352461099625,
-0.006421531550586224,
-0.07507025450468063,
0.0817653015255928,
0.023224109783768654,
-0.07168684899806976,
0.0009496244601905346,
0.1260613650083542,
-0.23446644842624664,
0.06326205283403397,
-0.14287599921226501,
-0.17539721727371216,
-0.104485884308815,
-0.030595330521464348,
0.06745156645774841,
0.05450209975242615,
0.10084255784749985,
0.015467597171664238,
-0.09158825874328613,
0.05502936616539955,
0.044560134410858154,
-0.19644513726234436,
-0.016432998701930046,
0.09129070490598679,
-0.018170751631259918,
-0.021914783865213394,
-0.04713951051235199,
0.06296433508396149,
0.09084250032901764,
0.039600979536771774,
-0.027670003473758698,
0.017916318029165268,
0.005738585256040096,
-0.024037839844822884,
0.08394187688827515,
-0.048245370388031006,
0.03732571378350258,
-0.10924898833036423,
0.0827309787273407,
-0.1424626260995865,
0.05776709318161011,
0.03778844326734543,
-0.0017702733166515827,
-0.027546510100364685,
0.06424251943826675,
-0.08321640640497208,
0.05328003317117691,
0.06979529559612274,
-0.009413596242666245,
-0.016074281185865402,
-0.04738710820674896,
0.03010874055325985,
0.004019599407911301,
-0.021340344101190567,
-0.103310227394104,
-0.16568942368030548,
-0.09847268462181091,
0.063545823097229,
-0.020835204049944878,
-0.19770780205726624,
0.026709116995334625,
-0.11940228939056396,
0.08087227493524551,
-0.12496048212051392,
0.09395519644021988,
0.08135351538658142,
0.032242536544799805,
0.018644556403160095,
0.0051509300246834755,
0.018327096477150917,
0.06826384365558624,
-0.09634597599506378,
-0.03884032368659973
] |
null | null |
transformers
|
bert-base-uncased model trained on the tobacco800 dataset for the task of page-stream-segmentation.
[Link](https://github.com/agiagoulas/page-stream-segmentation) to the GitHub Repo with the model implementation.
|
{}
|
text-classification
|
agiagoulas/bert-pss
|
[
"transformers",
"pytorch",
"jax",
"bert",
"text-classification",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #jax #bert #text-classification #autotrain_compatible #endpoints_compatible #region-us
|
bert-base-uncased model trained on the tobacco800 dataset for the task of page-stream-segmentation.
Link to the GitHub Repo with the model implementation.
|
[] |
[
"TAGS\n#transformers #pytorch #jax #bert #text-classification #autotrain_compatible #endpoints_compatible #region-us \n"
] |
[
39
] |
[
"passage: TAGS\n#transformers #pytorch #jax #bert #text-classification #autotrain_compatible #endpoints_compatible #region-us \n"
] |
[
-0.01809483766555786,
0.06673542410135269,
-0.007573992013931274,
0.02542104199528694,
0.19456367194652557,
0.04259642958641052,
0.07180475443601608,
0.11746370047330856,
0.065577931702137,
-0.0401817262172699,
0.11666674166917801,
0.23135067522525787,
-0.03061060607433319,
0.11772982031106949,
-0.10850205272436142,
-0.29288947582244873,
0.06133841350674629,
0.06536684185266495,
0.00420401943847537,
0.11130789667367935,
0.08473420888185501,
-0.09304895997047424,
0.0721178650856018,
-0.03993720933794975,
-0.13622891902923584,
0.041352782398462296,
0.05172041431069374,
-0.13071946799755096,
0.09408676624298096,
0.04389055818319321,
0.15670926868915558,
0.026289479807019234,
-0.05535130202770233,
-0.1438279002904892,
0.03692479059100151,
0.0005937846144661307,
-0.08006128668785095,
0.05240355059504509,
0.08632553368806839,
-0.10694190114736557,
0.013523316942155361,
0.043739791959524155,
0.027750981971621513,
0.04982184246182442,
-0.14666184782981873,
-0.09967022389173508,
-0.0019326872425153852,
0.029109641909599304,
0.06774714589118958,
0.05822955071926117,
0.0010195786599069834,
0.14306534826755524,
-0.1304115504026413,
0.12180308252573013,
0.11148694157600403,
-0.29323816299438477,
-0.01943984627723694,
0.09444136917591095,
0.04154133424162865,
0.05044957250356674,
-0.056781791150569916,
0.043631937354803085,
0.02468041144311428,
0.000946011976338923,
0.014533239416778088,
-0.07892842590808868,
-0.1071673110127449,
0.032310470938682556,
-0.07385499775409698,
-0.04111378639936447,
0.21185067296028137,
-0.049293242394924164,
0.06449735909700394,
-0.02123348042368889,
-0.08135563135147095,
-0.056714851409196854,
-0.03172755241394043,
0.008735861629247665,
-0.03984520584344864,
0.06747489422559738,
0.02921498380601406,
0.013714022003114223,
-0.10257585346698761,
0.021764980629086494,
-0.19535581767559052,
0.1972673535346985,
0.012189923785626888,
0.05059242621064186,
-0.1859312504529953,
0.04326876252889633,
0.02285129576921463,
-0.10319478064775467,
0.05386510491371155,
-0.09923137724399567,
0.028100797906517982,
-0.04234573617577553,
-0.06257519870996475,
-0.04020916298031807,
0.08136691898107529,
0.12868155539035797,
0.04702077805995941,
0.06588312983512878,
-0.029047340154647827,
0.08873984217643738,
0.03929751738905907,
0.12775880098342896,
0.04212596267461777,
-0.0374809093773365,
0.05176495015621185,
-0.11413440853357315,
-0.014094071462750435,
-0.07464064657688141,
-0.15375252068042755,
-0.016235198825597763,
0.08695993572473526,
0.07627255469560623,
0.011462262831628323,
0.08710302412509918,
-0.06540420651435852,
-0.03694767504930496,
0.06530382484197617,
-0.07531051337718964,
0.027230482548475266,
0.023209715262055397,
0.02529485896229744,
0.09126318246126175,
-0.0292835533618927,
0.0028054488357156515,
-0.06840378791093826,
0.13082213699817657,
-0.06151483207941055,
0.005951870232820511,
-0.04267766699194908,
-0.07654917240142822,
0.03411971405148506,
-0.11691025644540787,
0.03771146014332771,
-0.16825729608535767,
-0.09779638797044754,
0.0062947659753263,
0.03158242627978325,
-0.0005342107615433633,
-0.03975434601306915,
-0.030474286526441574,
0.004082287661731243,
0.05146385729312897,
-0.056856293231248856,
-0.06116197258234024,
-0.07278820127248764,
0.09648430347442627,
-0.033643417060375214,
0.07996653765439987,
-0.10526403039693832,
0.07087257504463196,
-0.09409809857606888,
-0.02970806322991848,
-0.1284375637769699,
0.022813236340880394,
-0.046654168516397476,
0.18474748730659485,
0.009493359364569187,
-0.037707120180130005,
-0.04483810439705849,
0.05696108564734459,
-0.07555562257766724,
0.1717727780342102,
-0.0793110728263855,
-0.11701413244009018,
0.20788724720478058,
-0.07954380661249161,
-0.13876761496067047,
0.08493359386920929,
-0.01615220494568348,
0.017958471551537514,
0.10674294829368591,
0.19496823847293854,
0.08540180325508118,
-0.003906393423676491,
0.08904492110013962,
0.11408630758523941,
-0.07959926128387451,
-0.1108965128660202,
0.005469737574458122,
0.007978282868862152,
-0.15063118934631348,
0.05894370377063751,
0.07545538246631622,
0.07129368931055069,
-0.0513894185423851,
-0.04089389741420746,
-0.003922690637409687,
-0.005997346714138985,
0.11302582919597626,
0.06324242800474167,
0.12578195333480835,
-0.08385848253965378,
-0.004017953295260668,
-0.0010093949967995286,
-0.010608415119349957,
0.025180591270327568,
0.0166823398321867,
-0.06790076941251755,
0.10298717767000198,
0.004510062281042337,
0.032306913286447525,
-0.2183763086795807,
-0.08230480551719666,
-0.0019306221511214972,
0.12755239009857178,
-0.019294047728180885,
0.11435849964618683,
0.053465988487005234,
-0.06457269191741943,
-0.013794570229947567,
-0.023813555017113686,
0.17901155352592468,
0.022746099159121513,
-0.06343037635087967,
-0.08461648225784302,
0.059753432869911194,
-0.07299339026212692,
-0.012081671506166458,
-0.08169719576835632,
0.012660477310419083,
0.06630835682153702,
0.10648472607135773,
0.019467314705252647,
0.05848253145813942,
-0.02924266830086708,
0.05626295506954193,
-0.06475984305143356,
0.027140533551573753,
0.1179659441113472,
-0.006632041186094284,
-0.056153301149606705,
0.15833188593387604,
-0.13553740084171295,
0.32089653611183167,
0.19996315240859985,
-0.29291605949401855,
-0.003792791860178113,
-0.017230169847607613,
0.002490597777068615,
0.025653522461652756,
0.034587517380714417,
0.009043003432452679,
0.09345389157533646,
-0.008162388578057289,
0.20210638642311096,
-0.030431343242526054,
-0.04563094303011894,
-0.006599363870918751,
-0.045313142240047455,
-0.035156700760126114,
0.0899621918797493,
0.0700104758143425,
-0.21636348962783813,
0.19578737020492554,
0.23056425154209137,
0.014083887450397015,
0.16511985659599304,
-0.0034332124050706625,
0.03841010108590126,
0.07709190249443054,
-0.054916925728321075,
-0.024765321984887123,
-0.06462280452251434,
-0.1797722727060318,
-0.0519074946641922,
0.07089265435934067,
0.026776624843478203,
0.05498277395963669,
-0.10467023402452469,
-0.041482336819171906,
-0.001121786772273481,
0.03263681009411812,
-0.03239976987242699,
0.07532784342765808,
0.06611774861812592,
0.10848522931337357,
0.007556584198027849,
-0.07854785025119781,
0.10835256427526474,
-0.004704562947154045,
-0.08111206442117691,
0.18040013313293457,
-0.14344702661037445,
-0.34762370586395264,
-0.13627612590789795,
-0.20350822806358337,
-0.009711001999676228,
0.04890860617160797,
0.09789098799228668,
-0.11364968121051788,
-0.041684478521347046,
0.04273238033056259,
-0.01656302809715271,
-0.07931388914585114,
0.04226658120751381,
-0.06387604027986526,
0.07653088867664337,
-0.05215940251946449,
-0.05663241818547249,
-0.07558713108301163,
-0.03065023384988308,
-0.011166092939674854,
0.1459382027387619,
-0.12642274796962738,
0.06581402570009232,
0.16313746571540833,
-0.005721473600715399,
0.0682130977511406,
-0.034285202622413635,
0.17564783990383148,
-0.08818710595369339,
-0.03183111548423767,
0.16446498036384583,
-0.0764211043715477,
0.06765677034854889,
0.15842288732528687,
0.03146690875291824,
-0.06440786272287369,
0.02619570679962635,
-0.04366680607199669,
-0.08154077082872391,
-0.2115747332572937,
-0.12536035478115082,
-0.11997322738170624,
0.06674559414386749,
0.0812050923705101,
0.06874731183052063,
0.1319999098777771,
0.054959509521722794,
0.02062775380909443,
0.013091953471302986,
0.011458709836006165,
0.08337462693452835,
0.21880970895290375,
-0.0013363112229853868,
0.14448416233062744,
-0.048022862523794174,
-0.13040171563625336,
0.0763016939163208,
0.00851297378540039,
0.08838732540607452,
0.1124393567442894,
0.02980091981589794,
-0.003721019485965371,
0.08009619265794754,
0.1710960417985916,
0.12011133134365082,
0.02604288049042225,
-0.025675294920802116,
-0.02881801873445511,
-0.00018281576922163367,
-0.07662030309438705,
0.018537059426307678,
0.07926761358976364,
-0.12852145731449127,
-0.07732073217630386,
-0.1605582982301712,
0.07910539954900742,
0.0842241495847702,
0.04793410003185272,
-0.21289688348770142,
0.011515513993799686,
0.09760864078998566,
-0.03371579200029373,
-0.09820050001144409,
0.07870060950517654,
-0.05545899644494057,
-0.14528445899486542,
0.08979272842407227,
-0.0350475050508976,
0.14132331311702728,
-0.0825844332575798,
0.08438211679458618,
-0.038387857377529144,
-0.11987438797950745,
0.03140055760741234,
0.10854005068540573,
-0.28070196509361267,
0.2154713273048401,
0.012321638874709606,
-0.06612604111433029,
-0.07430197298526764,
-0.024338167160749435,
0.04101184755563736,
0.20623712241649628,
0.07433765381574631,
0.0005549621419049799,
-0.08459877222776413,
-0.17614704370498657,
-0.014856216497719288,
0.01006323006004095,
0.11086399853229523,
-0.04531329125165939,
-0.0162801556289196,
-0.04873865842819214,
-0.032460931688547134,
-0.027329416945576668,
-0.036445118486881256,
0.02981879748404026,
-0.16195650398731232,
0.057034727185964584,
0.02994452603161335,
0.0795973539352417,
0.01832551695406437,
-0.054240815341472626,
-0.10906636714935303,
0.20394474267959595,
-0.07091160863637924,
-0.06390447169542313,
-0.11201474070549011,
-0.07614374160766602,
0.015175776556134224,
-0.08341611921787262,
0.04845348000526428,
-0.0809352919459343,
0.02468218095600605,
-0.053535107523202896,
-0.21068227291107178,
0.14016199111938477,
-0.10740610957145691,
-0.024658169597387314,
-0.07782392203807831,
0.13864511251449585,
-0.0789090171456337,
0.024532439187169075,
0.030736668035387993,
0.0298960842192173,
-0.09730913490056992,
-0.06820393353700638,
0.003594380570575595,
0.012733125127851963,
0.050164107233285904,
0.035532619804143906,
-0.09746024757623672,
-0.06438707560300827,
-0.030386028811335564,
0.013417999260127544,
0.2935657799243927,
0.16158808767795563,
-0.06732630729675293,
0.15693336725234985,
0.13342545926570892,
-0.07602952420711517,
-0.3328520953655243,
-0.0860983356833458,
-0.09572628885507584,
-0.04271673038601875,
-0.039317432790994644,
-0.16259606182575226,
0.1202382817864418,
-0.010392402298748493,
-0.02311943843960762,
0.08732824772596359,
-0.14364519715309143,
-0.08308403193950653,
0.18343095481395721,
-0.016732893884181976,
0.4004720151424408,
-0.11624830961227417,
-0.09180434048175812,
-0.05751292034983635,
-0.12759536504745483,
0.14510828256607056,
0.017240293323993683,
0.07660862803459167,
-0.009753178805112839,
0.03606290742754936,
0.04270001873373985,
-0.038564011454582214,
0.09122075885534286,
-0.01076544914394617,
0.013019079342484474,
-0.11043259501457214,
-0.11935783922672272,
0.015380576252937317,
-0.021142421290278435,
-0.02111625112593174,
-0.015980258584022522,
0.002749914303421974,
-0.17170843482017517,
-0.036437150090932846,
-0.0778099000453949,
0.05227721482515335,
0.03218749910593033,
-0.03192440792918205,
0.018695617094635963,
-0.01605111174285412,
-0.003546775784343481,
0.0006239944486878812,
0.2988184094429016,
-0.045211080461740494,
0.19050680100917816,
0.09056143462657928,
0.13118194043636322,
-0.15948843955993652,
0.012801270000636578,
-0.065861776471138,
-0.06396611779928207,
0.07773881405591965,
-0.08855278044939041,
0.07514625042676926,
0.12905162572860718,
-0.05594771355390549,
0.07226260006427765,
0.11423998326063156,
0.051702432334423065,
-0.03501610830426216,
0.159623384475708,
-0.2296721488237381,
0.03815079107880592,
-0.05544954165816307,
-0.0018353760242462158,
0.06467600911855698,
0.04922626167535782,
0.1242470070719719,
0.03997598588466644,
-0.05610830336809158,
0.005762273445725441,
-0.004814090207219124,
-0.003274239832535386,
0.05463983863592148,
0.058361686766147614,
0.04381057992577553,
-0.134842187166214,
0.04394451528787613,
0.05474484711885452,
-0.17748622596263885,
-0.01261796522885561,
0.14715729653835297,
-0.15453575551509857,
-0.12290617823600769,
0.006111426278948784,
0.15460239350795746,
-0.08248449862003326,
-0.04330691695213318,
-0.07279440015554428,
-0.12399396300315857,
0.0693398267030716,
0.19586825370788574,
0.12060099095106125,
0.0778300017118454,
-0.04378306493163109,
-0.042797669768333435,
0.00712791969999671,
0.007344595156610012,
-0.007872226648032665,
0.023717369884252548,
-0.11883671581745148,
0.01251294557005167,
-0.011050865054130554,
0.15153150260448456,
-0.09586972743272781,
-0.07557052373886108,
-0.19212299585342407,
0.041787270456552505,
-0.06242881715297699,
-0.03224949538707733,
-0.07766050845384598,
-0.022732099518179893,
0.004333932884037495,
-0.05241712927818298,
-0.04193229600787163,
-0.06532690674066544,
-0.1307416558265686,
0.032795269042253494,
-0.017392637208104134,
0.04657064378261566,
-0.05510725453495979,
-0.048384327441453934,
0.10124502331018448,
-0.03388887643814087,
0.08910121768712997,
0.10563872754573822,
-0.07282912731170654,
0.11062116175889969,
-0.1283218264579773,
-0.11185171455144882,
0.12193284928798676,
0.024926789104938507,
0.07325582951307297,
0.059187017381191254,
0.03799671307206154,
0.06631976366043091,
0.01165806408971548,
0.06664728373289108,
0.05780986323952675,
-0.12386669963598251,
0.04742882400751114,
-0.019212204962968826,
-0.1837417036294937,
-0.04135940968990326,
-0.042410314083099365,
0.09787000715732574,
-0.003808269975706935,
0.15671633183956146,
-0.05287281423807144,
0.09793701767921448,
-0.042132921516895294,
0.012803508900105953,
-0.023692073300480843,
-0.2205476611852646,
-0.07365892827510834,
-0.08585358411073685,
0.026862455531954765,
-0.004772793967276812,
0.22753120958805084,
0.07045473903417587,
0.03557093068957329,
0.05545854941010475,
0.0576339028775692,
-0.003454787889495492,
0.03151566907763481,
0.18229901790618896,
0.0989723727107048,
-0.05670816823840141,
-0.04705695062875748,
0.07534193992614746,
0.028033806011080742,
0.01864645630121231,
0.12431015819311142,
0.07321953028440475,
-0.012309289537370205,
0.07366625964641571,
-0.022612236440181732,
0.04702895134687424,
-0.10678938031196594,
-0.16222508251667023,
-0.03640706464648247,
0.08139578998088837,
0.01754310168325901,
0.06620614230632782,
0.10388407856225967,
-0.02347712777554989,
0.04392615333199501,
-0.0683072879910469,
-0.047119513154029846,
-0.19352172315120697,
-0.08100902289152145,
-0.10630819201469421,
-0.1066746711730957,
0.0025971289724111557,
-0.07839768379926682,
-0.006405447609722614,
0.07019750773906708,
0.04243552312254906,
-0.04944124445319176,
0.05090705305337906,
0.027607275173068047,
-0.058226849883794785,
0.08426842838525772,
-0.0374758280813694,
0.019005026668310165,
-0.01705215685069561,
-0.015734592452645302,
-0.1414513885974884,
-0.025720052421092987,
-0.04888613149523735,
0.03702576458454132,
-0.06654901802539825,
0.007938358001410961,
-0.1370028704404831,
-0.13525722920894623,
-0.022378528490662575,
0.045916568487882614,
-0.05562008172273636,
0.1206224113702774,
0.0006713551701977849,
0.010930516757071018,
0.049446847289800644,
0.2095327526330948,
-0.06220695748925209,
-0.029235756024718285,
-0.04284632205963135,
0.256413072347641,
0.07069311290979385,
0.1109071746468544,
-0.008587480522692204,
0.0032135164365172386,
-0.0791885107755661,
0.3118537962436676,
0.2932111918926239,
-0.04928450286388397,
0.04837615042924881,
0.01816660352051258,
0.03399038687348366,
0.1385296732187271,
0.14495183527469635,
0.08771999180316925,
0.22766800224781036,
-0.07150748372077942,
-0.021090956404805183,
-0.019412636756896973,
-0.015336482785642147,
-0.11512982845306396,
0.06193622201681137,
0.06605460494756699,
-0.04299676790833473,
-0.06651457399129868,
0.09843623638153076,
-0.19360409677028656,
0.13690705597400665,
0.0028531919233500957,
-0.2249731421470642,
-0.07498964667320251,
-0.03641308471560478,
0.14264386892318726,
-0.0050855111330747604,
0.080597423017025,
-0.0021270429715514183,
-0.10633035004138947,
0.020437195897102356,
0.014396386221051216,
-0.2136639505624771,
-0.04259442165493965,
0.07654968649148941,
-0.05085624009370804,
0.014857068657875061,
-0.025858590379357338,
0.028786558657884598,
0.07043676823377609,
0.051624055951833725,
-0.02011995017528534,
0.024657901376485825,
-0.000578725419472903,
-0.04388389736413956,
0.00410191947594285,
0.026280947029590607,
0.001854399568401277,
-0.06594856828451157,
0.0698620155453682,
-0.1482185274362564,
0.0447433777153492,
-0.11199703812599182,
-0.06083088740706444,
-0.007630004547536373,
0.05529272183775902,
-0.049834296107292175,
0.05740377679467201,
0.10056748241186142,
0.005429130047559738,
-0.041779059916734695,
-0.05086144804954529,
-0.03192179277539253,
0.010067981667816639,
-0.11767444759607315,
-0.15252991020679474,
-0.09751367568969727,
-0.094491146504879,
0.09518249332904816,
0.004092264920473099,
-0.16197417676448822,
0.00041187918395735323,
-0.10356339812278748,
0.055325187742710114,
-0.16911841928958893,
0.08710888773202896,
0.04155683144927025,
0.016016297042369843,
-0.014515646733343601,
-0.07223077863454819,
0.057068344205617905,
0.07699248194694519,
-0.12468259036540985,
-0.09319192171096802
] |
null | null | null |
# Text to Speech Model
## Being used for the `Audio Labeler` effect in Audacity
metadata:
```
{
metadata = {
'sample_rate': 16000,
'domain_tags': ['speech'],
'short_description': 'I will label your speech into text :]',
'long_description':
'This is an Audacity wrapper for the model, '
'forked from the repository '
'facebook/s2t-medium-librispeech-asr'
'This model was trained by Changhan Wang'
'and Yun Tang and Xutai Ma and Anne Wu'
'and Dmytro Okhonko and Juan Pino.',
'tags': ['speech-to-text'],
'effect_type': 'waveform-to-labels',
'multichannel': False,
'labels': ["<pad>", "<s>", "</s>", "<unk>", "|", "E", "T", "A", "O", "N", "I", "H", "S", "R", "D", "L", "U", "M", "W", "C", "F", "G", "Y", "P", "B", "V", "K", "'", "X", "J", "Q", "Z"],
}
```
|
{"tags": ["audacity"], "inference": false}
| null |
aguilara42/audacity-Wav2Vec2-Base
|
[
"audacity",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#audacity #region-us
|
# Text to Speech Model
## Being used for the 'Audio Labeler' effect in Audacity
metadata:
|
[
"# Text to Speech Model",
"## Being used for the 'Audio Labeler' effect in Audacity\n\nmetadata:"
] |
[
"TAGS\n#audacity #region-us \n",
"# Text to Speech Model",
"## Being used for the 'Audio Labeler' effect in Audacity\n\nmetadata:"
] |
[
10,
5,
20
] |
[
"passage: TAGS\n#audacity #region-us \n# Text to Speech Model## Being used for the 'Audio Labeler' effect in Audacity\n\nmetadata:"
] |
[
-0.032159287482500076,
0.05291532352566719,
-0.00547912809997797,
-0.012529702857136726,
0.13071362674236298,
0.0016076655592769384,
0.2124342918395996,
0.012021099217236042,
0.009365793317556381,
-0.017446111887693405,
0.019379638135433197,
-0.014896025881171227,
-0.010075335390865803,
0.03229719400405884,
-0.07290994375944138,
-0.14937689900398254,
0.044553227722644806,
-0.07193011045455933,
0.1821019947528839,
0.06269270926713943,
0.06444428861141205,
-0.05475393682718277,
0.002829942386597395,
0.005829951725900173,
-0.006270827259868383,
0.054960232228040695,
0.06463272124528885,
-0.07664825767278671,
0.09331115335226059,
-0.02817608043551445,
0.07468833774328232,
0.008188161067664623,
-0.030396437272429466,
-0.2709285616874695,
0.023786714300513268,
-0.06483938544988632,
0.023877132683992386,
-0.031873058527708054,
-0.06468792259693146,
-0.07606910169124603,
-0.035506926476955414,
0.2694067060947418,
0.046582065522670746,
0.03594853729009628,
-0.19407671689987183,
-0.15587358176708221,
0.015031601302325726,
-0.06590414047241211,
-0.024039413779973984,
0.09134665876626968,
-0.0850895494222641,
0.018536537885665894,
0.010637362487614155,
0.04287048429250717,
-0.02166340872645378,
-0.18060772120952606,
0.014001088216900826,
-0.0011453457409515977,
0.0700465589761734,
0.22185876965522766,
-0.11275187879800797,
0.06788815557956696,
0.07701341062784195,
0.017277805134654045,
-0.09841306507587433,
-0.07656507194042206,
0.039723433554172516,
0.018249420449137688,
-0.0636836513876915,
-0.07004822045564651,
0.46573036909103394,
0.03873514384031296,
-0.04794750362634659,
-0.027800017967820168,
-0.005895532667636871,
-0.010182728059589863,
-0.0360184982419014,
-0.012433761730790138,
-0.04605872929096222,
0.11297942698001862,
0.03610647842288017,
0.043541498482227325,
-0.09782526642084122,
0.002563292160630226,
-0.09676976501941681,
0.2831324636936188,
-0.01546375174075365,
0.0650918185710907,
-0.16631755232810974,
-0.08493254333734512,
-0.0543118491768837,
-0.10586762428283691,
0.10715962946414948,
-0.06712218374013901,
0.036346640437841415,
-0.023210519924759865,
0.020892471075057983,
-0.22388823330402374,
0.11880674213171005,
0.09996441006660461,
-0.10548265278339386,
0.01813674159348011,
-0.1478613317012787,
0.1069260835647583,
0.13575558364391327,
0.09365488588809967,
-0.016304483637213707,
-0.0485270619392395,
-0.06979240477085114,
-0.05267219617962837,
0.027571136131882668,
-0.07354427874088287,
-0.09096015244722366,
0.10277191549539566,
-0.017971232533454895,
0.0537305548787117,
-0.04337453097105026,
-0.09927220642566681,
-0.15426033735275269,
-0.010443652048707008,
-0.1116417869925499,
0.0029336109291762114,
-0.047429319471120834,
-0.04387076944112778,
0.06200481578707695,
0.017684422433376312,
-0.09508825093507767,
0.10686872154474258,
0.023842085152864456,
0.15651746094226837,
-0.031233156099915504,
-0.021934613585472107,
0.06930878013372421,
-0.05423858016729355,
-0.008033364079892635,
0.0717904269695282,
0.12353189289569855,
-0.1091819629073143,
0.048979584127664566,
-0.09570662677288055,
0.025420889258384705,
0.03252030164003372,
-0.012333298102021217,
-0.08548005670309067,
0.004149792715907097,
0.08242958784103394,
-0.06946101039648056,
-0.2059430330991745,
-0.13616421818733215,
0.06586380302906036,
-0.023721609264612198,
0.0870545506477356,
-0.18201471865177155,
0.0795280933380127,
-0.10285208374261856,
0.00914688128978014,
-0.029646029695868492,
0.02964252233505249,
-0.0009780362015590072,
0.08822569996118546,
0.014859105460345745,
0.05266841500997543,
-0.14504680037498474,
0.046482354402542114,
-0.03171399235725403,
0.16000919044017792,
-0.15548495948314667,
-0.09414863586425781,
0.1423582285642624,
-0.10455532371997833,
-0.08348642289638519,
0.17798958718776703,
-0.014586140401661396,
0.11142734438180923,
0.14308226108551025,
0.2970397472381592,
-0.04468648508191109,
-0.1839863359928131,
0.051512617617845535,
0.05808999389410019,
-0.03996463865041733,
0.07449797540903091,
0.08839718252420425,
-0.07836667448282242,
-0.09133883565664291,
-0.022944234311580658,
0.29006361961364746,
0.05811311677098274,
-0.06086968258023262,
-0.08381737768650055,
0.045875173062086105,
-0.022694384679198265,
0.03632596880197525,
-0.016624847427010536,
-0.07570980489253998,
-0.050786979496479034,
0.005042946897447109,
-0.09313510358333588,
0.0013131119776517153,
-0.0109312878921628,
0.034686341881752014,
-0.08899357914924622,
0.08128803223371506,
-0.04867897182703018,
0.026382576674222946,
-0.10930808633565903,
0.1603257656097412,
-0.06701717525720596,
0.14279459416866302,
0.10628849267959595,
0.0020922250114381313,
0.0011155635584145784,
-0.09107226878404617,
-0.03982445225119591,
0.008564837276935577,
-0.015100350603461266,
0.04187491536140442,
-0.011113299988210201,
-0.09382617473602295,
0.23312754929065704,
-0.07591483741998672,
0.041581399738788605,
0.07965154200792313,
-0.07603782415390015,
0.08374522626399994,
-0.09757360070943832,
0.10040128231048584,
-0.02103569731116295,
0.04351336136460304,
0.0549660325050354,
0.0016539618372917175,
0.06789466738700867,
0.0099997129291296,
-0.023935643956065178,
-0.10666163265705109,
0.12874914705753326,
-0.17452771961688995,
0.19734475016593933,
0.1429893970489502,
-0.15496911108493805,
0.004916923586279154,
0.020579952746629715,
0.023767048493027687,
-0.03202325850725174,
-0.06970411539077759,
-0.04664456099271774,
0.21336326003074646,
-0.013775989413261414,
0.08316482603549957,
-0.010606054216623306,
0.09252160787582397,
0.021370502188801765,
-0.027032887563109398,
-0.055647097527980804,
0.04164893552660942,
-0.13313131034374237,
0.033405475318431854,
0.05700521916151047,
0.30517879128456116,
0.05501345545053482,
0.25679248571395874,
-0.05075538158416748,
-0.007154252380132675,
0.04612841457128525,
-0.07140698283910751,
-0.06371939182281494,
0.13148906826972961,
-0.20917701721191406,
0.05360528826713562,
0.026491621509194374,
0.06799398362636566,
0.0501730814576149,
-0.059976231306791306,
-0.047437261790037155,
-0.04394502565264702,
-0.024776365607976913,
-0.1456580013036728,
-0.015306344255805016,
-0.034062184393405914,
0.05116391181945801,
0.020518457517027855,
-0.10644209384918213,
0.06322960555553436,
-0.042368821799755096,
-0.0419456847012043,
0.032166481018066406,
-0.16946730017662048,
-0.26712408661842346,
-0.16041940450668335,
-0.0831364244222641,
-0.007792309857904911,
0.06428693979978561,
0.0739341601729393,
-0.07646814733743668,
-0.009552152827382088,
0.030779024586081505,
0.11373861134052277,
-0.04356810078024864,
-0.053275126963853836,
-0.02102566324174404,
-0.004557464737445116,
-0.01811366155743599,
-0.054221827536821365,
-0.001473054988309741,
-0.05413064360618591,
0.12876442074775696,
0.05906665325164795,
-0.07851536571979523,
0.12292679399251938,
0.23745989799499512,
0.08498798310756683,
-0.020393503829836845,
0.0017869907896965742,
0.18739306926727295,
-0.1721750795841217,
-0.1071300059556961,
0.08671882748603821,
-0.16154667735099792,
0.006085169967263937,
0.32872024178504944,
0.03032199665904045,
-0.09837542474269867,
0.04972400516271591,
-0.01900564879179001,
-0.11281478404998779,
-0.1663629114627838,
-0.11870183795690536,
-0.08452077209949493,
0.11888253688812256,
-0.043641287833452225,
0.03870074078440666,
-0.04399288445711136,
-0.007317115552723408,
0.03490496426820755,
-0.08330628275871277,
0.05561713129281998,
0.012446558102965355,
0.21725550293922424,
-0.09482724964618683,
0.05511863902211189,
-0.018410854041576385,
-0.16635549068450928,
0.11069109290838242,
0.1626168191432953,
0.012517085298895836,
0.2541961967945099,
0.06974061578512192,
0.08311212807893753,
-0.11812376976013184,
0.08502443134784698,
0.05478085204958916,
0.053990982472896576,
-0.031658582389354706,
-0.06446395069360733,
-0.06023454666137695,
-0.0023700795136392117,
0.07814116775989532,
0.1087716594338417,
-0.06561785191297531,
-0.002802568953484297,
-0.02477484568953514,
0.06985988467931747,
-0.07987046986818314,
0.19010493159294128,
-0.019688565284013748,
0.030118390917778015,
0.08574127405881882,
-0.029360434040427208,
-0.10307266563177109,
0.2332683801651001,
0.1155669316649437,
-0.001961293863132596,
0.021330153569579124,
0.0655306950211525,
0.11644935607910156,
-0.12864866852760315,
0.06718256324529648,
-0.13228045403957367,
-0.11094168573617935,
-0.05250536650419235,
-0.004675550851970911,
-0.16399888694286346,
0.19286122918128967,
0.009911075234413147,
-0.043093241751194,
0.0167535413056612,
-0.049846891313791275,
0.04552171751856804,
0.0784723311662674,
0.14347676932811737,
0.01013164222240448,
-0.016583917662501335,
-0.011002058163285255,
-0.05675544589757919,
0.01977851800620556,
0.12963788211345673,
0.06273594498634338,
-0.07158301025629044,
-0.012059038504958153,
0.08289308100938797,
0.008078613318502903,
-0.018655959516763687,
-0.1459655463695526,
-0.0776779055595398,
-0.00983572006225586,
0.28577920794487,
0.04988826438784599,
0.02932584658265114,
-0.033063698559999466,
-0.07929601520299911,
-0.09981074184179306,
0.13212448358535767,
-0.017489606514573097,
-0.03446120396256447,
-0.13539470732212067,
0.06853080540895462,
0.024821676313877106,
-0.026605790480971336,
-0.0478534996509552,
0.0023385868407785892,
-0.08627618849277496,
0.0073769488371908665,
0.11656806617975235,
-0.027381639927625656,
0.0981207937002182,
-0.08087293803691864,
0.1687714159488678,
0.029291128739714622,
0.05496589466929436,
0.022531628608703613,
0.024064334109425545,
-0.032580506056547165,
-0.09218335151672363,
0.06943506747484207,
-0.22088679671287537,
-0.1693437099456787,
0.2003563940525055,
0.03350093960762024,
-0.1804540604352951,
-0.09864583611488342,
-0.08477995544672012,
0.18343223631381989,
0.15903930366039276,
0.02224813960492611,
0.18548545241355896,
0.21202898025512695,
-0.06544077396392822,
-0.3110140562057495,
-0.09431836754083633,
-0.11398009955883026,
0.019307676702737808,
0.03474680334329605,
-0.14363405108451843,
0.11012103408575058,
-0.11152219772338867,
-0.06011127308011055,
0.1508464515209198,
-0.16467836499214172,
-0.09938384592533112,
0.2772983908653259,
-0.04367620497941971,
0.3407064378261566,
-0.10715062916278839,
-0.09683065116405487,
-0.13074801862239838,
-0.11600903421640396,
0.006562636699527502,
-0.0999533161520958,
0.11590023338794708,
0.06127859279513359,
0.11083050817251205,
0.03793388605117798,
0.06159200519323349,
0.11200468242168427,
0.14378492534160614,
0.011494869366288185,
-0.011484360322356224,
-0.05829382687807083,
0.08029823005199432,
0.08270364254713058,
-0.06502954661846161,
-0.01903989352285862,
0.0012522891629487276,
-0.08783484995365143,
-0.0455981120467186,
-0.007759617175906897,
-0.0071264952421188354,
0.049481816589832306,
-0.08196763694286346,
-0.04713570326566696,
-0.036943696439266205,
-0.03655340522527695,
0.006366550922393799,
0.2076186239719391,
-0.15304556488990784,
0.07274512201547623,
0.09966029971837997,
0.13175854086875916,
-0.18293240666389465,
0.10459160804748535,
0.007813984528183937,
-0.07926338911056519,
0.0895557552576065,
-0.10896579176187515,
0.09468622505664825,
0.08790294826030731,
-0.0037920798640698195,
0.11998365074396133,
0.06627339869737625,
-0.014481456950306892,
0.10166287422180176,
0.1414673775434494,
-0.1350570023059845,
-0.052839480340480804,
-0.0596768781542778,
-0.07174953073263168,
0.05055660009384155,
-0.06431212276220322,
0.15666067600250244,
0.056773591786623,
0.05175347998738289,
-0.014382761903107166,
-0.009354027919471264,
-0.1167992353439331,
0.08218471705913544,
0.011840611696243286,
0.01948201283812523,
-0.08754440397024155,
0.13889141380786896,
0.001529265777207911,
-0.12754924595355988,
-0.024215662851929665,
-0.03967978432774544,
0.014386111870408058,
-0.019288575276732445,
-0.23297032713890076,
0.1509493589401245,
-0.0748530849814415,
-0.06574825942516327,
-0.026325484737753868,
-0.1282154619693756,
-0.02302142232656479,
-0.013666900806128979,
0.03143847733736038,
0.0984523594379425,
-0.08793234825134277,
-0.05245497077703476,
0.021742239594459534,
0.05498240143060684,
-0.0323520302772522,
-0.06431901454925537,
-0.10871734470129013,
-0.1874810755252838,
-0.03974509984254837,
0.07068060338497162,
-0.08365841209888458,
-0.12174759060144424,
-0.13491931557655334,
0.11383970826864243,
-0.13917067646980286,
0.03058353066444397,
0.05645815283060074,
0.011692233383655548,
0.03555211424827576,
0.006423648446798325,
-0.07577814161777496,
-0.056707970798015594,
-0.12266149371862411,
0.030796702951192856,
0.06187117099761963,
0.05317080020904541,
-0.02000219002366066,
0.0010708555346354842,
0.0942324846982956,
0.010248062200844288,
0.13022898137569427,
0.08868380635976791,
-0.13907203078269958,
0.07580665498971939,
-0.36508554220199585,
-0.048082493245601654,
0.10675011575222015,
-0.04306062310934067,
-0.06597135961055756,
0.014611276797950268,
-0.027859127148985863,
0.060365624725818634,
-0.020514437928795815,
0.016067439690232277,
-0.02401399239897728,
-0.07114654779434204,
-0.06393368542194366,
-0.07820369303226471,
-0.15585118532180786,
0.0017158566042780876,
-0.07014743983745575,
0.06865331530570984,
0.033804167062044144,
0.11295519024133682,
-0.042176373302936554,
0.027563896030187607,
0.05947582796216011,
0.04958691447973251,
0.03784012049436569,
-0.10171904414892197,
-0.11640872806310654,
-0.08419996500015259,
-0.025862429291009903,
-0.04349952191114426,
0.2887563705444336,
0.003893469925969839,
0.002809311030432582,
0.038262031972408295,
0.09008945524692535,
-0.11423518508672714,
-0.0064889490604400635,
0.24845346808433533,
0.12019079923629761,
-0.09315086901187897,
-0.12354306876659393,
-0.013987207785248756,
0.027096793055534363,
0.13966825604438782,
-0.034748680889606476,
0.08260713517665863,
0.08005015552043915,
0.03643602505326271,
0.05581047385931015,
0.11435610055923462,
0.03348466381430626,
0.013094651512801647,
0.056529391556978226,
0.052504099905490875,
0.01933867856860161,
-0.008638345636427402,
0.08346535265445709,
0.01325785368680954,
0.0702558159828186,
-0.11109395325183868,
-0.03606848791241646,
-0.154701828956604,
0.09956175833940506,
-0.03917502239346504,
-0.11829525232315063,
0.051620252430438995,
-0.1139768734574318,
-0.014947960153222084,
0.07256577908992767,
0.002559895860031247,
-0.0615975446999073,
0.12242545932531357,
-0.19598811864852905,
-0.1174805760383606,
0.145709827542305,
-0.07642930001020432,
0.04041333124041557,
-0.0037274821661412716,
-0.05128965899348259,
0.05942513793706894,
-0.0032276324927806854,
-0.001460560830309987,
0.01813412643969059,
-0.02413937821984291,
-0.0406067930161953,
-0.24629738926887512,
-0.08828435093164444,
-0.0056678494438529015,
0.05421185493469238,
0.021998729556798935,
0.04933533817529678,
0.0647541955113411,
-0.02143416926264763,
0.0582834854722023,
0.18559421598911285,
-0.006566247437149286,
-0.08449877798557281,
-0.11217375099658966,
0.01182799506932497,
-0.04055169224739075,
0.13957662880420685,
-0.1008836030960083,
-0.04773413762450218,
-0.06231015920639038,
0.07940448820590973,
0.2183203250169754,
-0.1723977029323578,
-0.005740466993302107,
0.03118075430393219,
0.05813515931367874,
0.03385520726442337,
0.04473905637860298,
0.047569118440151215,
0.1237165555357933,
0.017587076872587204,
0.021138887852430344,
-0.1003667339682579,
-0.06587110459804535,
0.057530712336301804,
-0.003356450702995062,
0.06600143015384674,
-0.031115656718611717,
-0.09232188761234283,
0.22702515125274658,
-0.29215896129608154,
0.050236769020557404,
-0.08445757627487183,
-0.13474704325199127,
-0.06668882071971893,
-0.06325376778841019,
0.0469292588531971,
0.10524735599756241,
0.01717456802725792,
-0.028541624546051025,
-0.1022113710641861,
-0.13025490939617157,
0.012780563905835152,
-0.09955018758773804,
0.023647215217351913,
-0.019413502886891365,
-0.0894784927368164,
-0.008676232770085335,
-0.01180372852832079,
0.040745045989751816,
0.02663281559944153,
0.12564322352409363,
0.11388154327869415,
0.16069801151752472,
0.0072143180295825005,
-0.00039923525764606893,
-0.06387959420681,
0.12366218864917755,
-0.013975093141198158,
0.05252593755722046,
0.08744056522846222,
0.03518523648381233,
0.013046865351498127,
-0.0979919582605362,
-0.07783383131027222,
0.003541984362527728,
0.031481124460697174,
-0.09384171664714813,
0.10999564081430435,
-0.026190489530563354,
0.051885657012462616,
-0.03647006303071976,
0.010038333013653755,
0.012756163254380226,
-0.019069144502282143,
-0.12876498699188232,
-0.026144549250602722,
-0.014516361057758331,
-0.0746421366930008,
0.11018963158130646,
-0.09261544048786163,
-0.36639219522476196,
0.07646175473928452,
-0.06481839716434479,
0.1306096762418747,
-0.024608254432678223,
0.0667654424905777,
0.08606778085231781,
0.03223246708512306,
-0.006500975228846073,
-0.17327971756458282,
0.11905751377344131,
0.07340704649686813,
-0.014408880844712257,
-0.07029905170202255
] |
null | null | null |
# Labeler With Timestamps
## Being used for the `Audio Labeler` effect in Audacity
This is a audio labeler model which is used in Audacity's labeler effect.
metadata:
```
{
"sample_rate": 48000,
"domain_tags": ["Music"],
"tags": ["Audio Labeler"],
"effect_type": "waveform-to-labels",
"multichannel": false,
"labels": ["Acoustic Guitar", "Auxiliary Percussion", "Brass", "Clean Electric Guitar", "Distorted Electric Guitar", "Double Bass", "Drum Set", "Electric Bass", "Flute", "piano", "Reeds", "Saxophone", "Strings", "Trumpet", "Voice"],
"short_description": "Use me to label some instruments!",
"long_description": "An audio labeler, which outputs label predictions and time ranges for the labels. This model can label various instruments listed in the labels section."
}
```
|
{"tags": ["audacity"], "inference": false}
| null |
aguilara42/openl3-labeler-w-timestamps
|
[
"audacity",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#audacity #region-us
|
# Labeler With Timestamps
## Being used for the 'Audio Labeler' effect in Audacity
This is a audio labeler model which is used in Audacity's labeler effect.
metadata:
|
[
"# Labeler With Timestamps",
"## Being used for the 'Audio Labeler' effect in Audacity\n\nThis is a audio labeler model which is used in Audacity's labeler effect. \n\nmetadata:"
] |
[
"TAGS\n#audacity #region-us \n",
"# Labeler With Timestamps",
"## Being used for the 'Audio Labeler' effect in Audacity\n\nThis is a audio labeler model which is used in Audacity's labeler effect. \n\nmetadata:"
] |
[
10,
7,
40
] |
[
"passage: TAGS\n#audacity #region-us \n# Labeler With Timestamps## Being used for the 'Audio Labeler' effect in Audacity\n\nThis is a audio labeler model which is used in Audacity's labeler effect. \n\nmetadata:"
] |
[
-0.021484505385160446,
-0.0006438632844947278,
-0.005067161750048399,
0.018337098881602287,
0.05285165458917618,
0.051790207624435425,
0.22809752821922302,
-0.02128700539469719,
0.07179230451583862,
0.04418132081627846,
0.00492812180891633,
-0.04134761542081833,
0.0004073689051438123,
0.014410305768251419,
-0.05807766690850258,
-0.11423502117395401,
0.05318465456366539,
0.0027831371407955885,
0.14243218302726746,
0.10906808078289032,
0.037421125918626785,
-0.03748791292309761,
0.015337055549025536,
0.02590627409517765,
-0.07439304888248444,
-0.01827181689441204,
0.07663801312446594,
-0.1162421703338623,
0.10875940322875977,
-0.024547215551137924,
0.1139574870467186,
0.011637337505817413,
0.0004541972593870014,
-0.14214883744716644,
0.013121322728693485,
0.015870077535510063,
0.024494854733347893,
-0.02153090201318264,
-0.023856857791543007,
0.009746860712766647,
-0.083816297352314,
0.23894289135932922,
0.0664057582616806,
-0.00969631690531969,
-0.20188815891742706,
-0.22404679656028748,
-0.09084179252386093,
-0.10739291459321976,
0.004756412468850613,
0.0490371398627758,
-0.0543861947953701,
0.09276431053876877,
-0.043161772191524506,
0.031687818467617035,
0.08228517323732376,
-0.2126762717962265,
0.01638895832002163,
0.07198253273963928,
0.1466739922761917,
0.18836431205272675,
-0.05098732188344002,
0.08834094554185867,
0.08505135029554367,
-0.014379620552062988,
-0.016056768596172333,
-0.08335728943347931,
0.13410186767578125,
-0.011556998826563358,
-0.09106345474720001,
-0.05527634918689728,
0.5256308913230896,
0.008073483593761921,
-0.07996555417776108,
0.01653292588889599,
-0.03708580136299133,
0.009111994877457619,
-0.057205505669116974,
-0.07035591453313828,
0.014127888716757298,
0.0312060434371233,
0.09739303588867188,
0.12827852368354797,
-0.06300453841686249,
-0.004282607231289148,
-0.0353289432823658,
0.3585406541824341,
-0.015461822971701622,
0.06933444738388062,
-0.14780087769031525,
-0.11841633915901184,
-0.00484044011682272,
-0.06330302357673645,
0.06329280138015747,
-0.01707923226058483,
-0.015314407646656036,
-0.07428644597530365,
-0.030275540426373482,
-0.19959770143032074,
0.0239147637039423,
0.1922265887260437,
-0.10960890352725983,
0.01757093146443367,
-0.11754702031612396,
0.10381396859884262,
0.1550574153661728,
0.1755087971687317,
0.01399944443255663,
-0.09360602498054504,
-0.03711871802806854,
-0.05178084596991539,
0.018119335174560547,
-0.07467194646596909,
-0.1203484907746315,
0.003606916405260563,
0.08772862702608109,
-0.005448746029287577,
-0.12272675335407257,
-0.10438061505556107,
-0.14663578569889069,
-0.0401589497923851,
0.0037264458369463682,
-0.03471369668841362,
0.015775229781866074,
-0.07291935384273529,
0.06292800605297089,
0.05641881376504898,
-0.028370194137096405,
0.061318203806877136,
0.09652682393789291,
0.2034740447998047,
-0.06862051784992218,
-0.04358157142996788,
0.013286505825817585,
-0.0944499522447586,
-0.007912985980510712,
0.05450157821178436,
0.1130652129650116,
-0.06443953514099121,
0.012059970758855343,
-0.08191393315792084,
0.03745824471116066,
0.07213488221168518,
-0.05025411397218704,
0.00242071645334363,
0.060772620141506195,
0.09258454293012619,
-0.024805467575788498,
-0.17067119479179382,
-0.1252039074897766,
0.03164743632078171,
-0.08342541754245758,
0.15662729740142822,
-0.11623119562864304,
0.09062706679105759,
-0.0650658905506134,
0.04718049243092537,
-0.1104879230260849,
-0.055287592113018036,
-0.014869136735796928,
0.004122885875403881,
-0.002884294604882598,
0.017479922622442245,
-0.08286096900701523,
0.08338399976491928,
-0.04309766739606857,
0.05989504233002663,
-0.19367079436779022,
-0.1260310709476471,
0.17534612119197845,
-0.14640302956104279,
-0.045514341443777084,
0.12003584951162338,
0.002150110434740782,
0.08241105824708939,
0.09046270698308945,
0.09736472368240356,
0.058002058416604996,
-0.19176232814788818,
0.09319666028022766,
-0.01988665759563446,
-0.043303538113832474,
0.0070592244155704975,
0.08272061496973038,
-0.02439957857131958,
-0.04209648445248604,
-0.025481626391410828,
0.2435605823993683,
0.036264386028051376,
-0.06420319527387619,
-0.042213212698698044,
0.04024505615234375,
0.015946930274367332,
0.07790469378232956,
-0.01035181526094675,
-0.04128796607255936,
-0.015576517209410667,
-0.010334626771509647,
0.0368223637342453,
-0.046076949685811996,
0.13618651032447815,
0.053620100021362305,
-0.04193887859582901,
0.051555268466472626,
0.01616564206779003,
0.011450044810771942,
-0.1377691924571991,
0.11049740016460419,
0.02333139069378376,
0.09019296616315842,
0.057246554642915726,
-0.0050320872105658054,
-0.004398754332214594,
-0.11768411099910736,
-0.008046472445130348,
0.03624694421887398,
-0.03705732524394989,
0.0075672161765396595,
0.005749550648033619,
-0.04319573938846588,
0.19982707500457764,
-0.05833710357546806,
-0.016767514869570732,
0.14189119637012482,
-0.06242610514163971,
0.094996377825737,
-0.1271081119775772,
0.09056445956230164,
-0.027975162491202354,
0.008967814967036247,
0.09832466393709183,
-0.031396448612213135,
0.08509203791618347,
-0.004561265464872122,
-0.0584564208984375,
-0.05204380303621292,
0.14304734766483307,
-0.14863286912441254,
0.13474692404270172,
0.09811428189277649,
0.02335822395980358,
-0.11332233995199203,
0.06877093762159348,
0.0314069464802742,
-0.04998651146888733,
-0.04519324004650116,
-0.023626601323485374,
0.1721908301115036,
0.015140045434236526,
0.04786427691578865,
-0.0074551720172166824,
0.09076029807329178,
0.04407624900341034,
-0.012242430821061134,
-0.05969199910759926,
-0.047708459198474884,
-0.12668460607528687,
0.10573811829090118,
0.0010336339473724365,
0.350055068731308,
0.05134793370962143,
0.17534898221492767,
-0.04449750855565071,
-0.05920090898871422,
0.012056415900588036,
-0.12444210052490234,
-0.004740368574857712,
0.10270487517118454,
-0.11464513093233109,
0.024277957156300545,
0.03619319573044777,
0.08212264627218246,
-0.06898751109838486,
-0.09334809333086014,
0.009126500226557255,
-0.019483700394630432,
0.05287101864814758,
-0.05913432314991951,
0.022474166005849838,
-0.01992453634738922,
0.03406579792499542,
0.08350486308336258,
-0.14223431050777435,
0.05789046734571457,
-0.014491894282400608,
-0.012532847933471203,
0.10257723927497864,
-0.14472141861915588,
-0.2891426980495453,
-0.19624750316143036,
-0.08518832176923752,
0.034790050238370895,
0.028780968859791756,
0.07209110260009766,
0.036252766847610474,
-0.011335141025483608,
0.07293584197759628,
0.16482123732566833,
-0.005761728622019291,
-0.028893698006868362,
0.0007436582236550748,
-0.013605151325464249,
-0.058115795254707336,
-0.0649532824754715,
0.026550473645329475,
-0.0562545508146286,
0.07952843606472015,
0.09646628051996231,
-0.05103760212659836,
0.1036817654967308,
0.2728244960308075,
0.05923086032271385,
-0.025361625477671623,
0.03125392645597458,
0.17647133767604828,
-0.19338639080524445,
0.006980955135077238,
0.12757015228271484,
-0.12372341752052307,
0.01010411698371172,
0.2134244292974472,
0.03452664986252785,
-0.08430557698011398,
0.01494095753878355,
0.025962108746170998,
-0.15896068513393402,
-0.08445219695568085,
-0.1227358803153038,
-0.10085370391607285,
0.05185725539922714,
-0.03441533073782921,
0.018296627327799797,
-0.033048637211322784,
-0.014218373224139214,
0.08099888265132904,
-0.0143593680113554,
0.024497278034687042,
-0.05483734980225563,
0.15062864124774933,
-0.09468375146389008,
0.0772961750626564,
0.020381903275847435,
-0.09199268370866776,
0.10506660491228104,
0.20482592284679413,
0.10585767030715942,
0.21446315944194794,
-0.0016521638026461005,
0.11428239941596985,
-0.15044303238391876,
0.14183612167835236,
0.04478134587407112,
0.05431683734059334,
-0.026906060054898262,
-0.0862727239727974,
-0.047132477164268494,
0.012507380917668343,
0.06577476114034653,
-0.026141580194234848,
-0.10212116688489914,
0.034652505069971085,
0.008729012683033943,
-0.017680905759334564,
0.0036890241317451,
0.15594594180583954,
-0.08333175629377365,
0.030340585857629776,
0.04300851747393608,
0.04005967080593109,
-0.10001058131456375,
0.21149621903896332,
-0.02823803387582302,
-0.03222974017262459,
-0.003745042020455003,
0.04110971838235855,
0.08343156427145004,
-0.14952799677848816,
-0.0018625571392476559,
-0.025877583771944046,
-0.09708667546510696,
-0.06843140721321106,
-0.0680423304438591,
-0.08458024263381958,
0.1322202980518341,
-0.009345171973109245,
-0.053615421056747437,
0.06041434779763222,
-0.01025574654340744,
0.0110896285623312,
0.17080256342887878,
0.09851231426000595,
0.034868720918893814,
-0.10945840179920197,
0.014785755425691605,
-0.014660876244306564,
-0.044587261974811554,
0.09216324239969254,
0.11667495965957642,
-0.09419670701026917,
-0.005596071947365999,
0.06717099249362946,
0.04681786522269249,
0.0641000047326088,
-0.09002681076526642,
-0.016234586015343666,
0.04291970655322075,
0.2629513144493103,
-0.007236740086227655,
-0.004813991021364927,
-0.05849563330411911,
-0.18254528939723969,
-0.16057088971138,
0.2357768714427948,
0.00551609368994832,
-0.049157872796058655,
-0.24055224657058716,
0.12458933144807816,
0.03016638569533825,
0.06019039452075958,
-0.05400712788105011,
0.05368025228381157,
-0.052789971232414246,
0.012589472346007824,
0.11660688370466232,
0.0023232242092490196,
0.0999569445848465,
-0.09381167590618134,
0.13038797676563263,
0.01847541518509388,
0.012302043847739697,
-0.020896652713418007,
0.02948508970439434,
0.004276649560779333,
-0.06764915585517883,
0.08325237035751343,
-0.26703161001205444,
-0.18711677193641663,
0.14760367572307587,
-0.017820335924625397,
-0.19044965505599976,
0.03098556026816368,
-0.05834845453500748,
0.05085662007331848,
0.1423172652721405,
0.009495090693235397,
0.20603500306606293,
0.1399824172258377,
-0.025159304961562157,
-0.33388087153434753,
-0.05143028870224953,
-0.045972324907779694,
0.015732472762465477,
0.04291923716664314,
-0.16003818809986115,
0.12336684763431549,
0.016799015924334526,
-0.05440633371472359,
0.23861414194107056,
-0.23513120412826538,
-0.12660576403141022,
0.2122175395488739,
-0.06457522511482239,
0.2644694745540619,
-0.06212148070335388,
-0.04672051966190338,
-0.03514842316508293,
-0.1799527257680893,
0.012854089960455894,
-0.003894312772899866,
0.06716112047433853,
0.00604367908090353,
0.10770870000123978,
0.013084123842418194,
0.05448165535926819,
0.03274652361869812,
0.15878601372241974,
0.03369911387562752,
0.018853850662708282,
-0.13668116927146912,
0.18280509114265442,
0.05741563439369202,
-0.10356690734624863,
0.14527180790901184,
-0.008830551989376545,
-0.035573579370975494,
-0.030047202482819557,
-0.010649016126990318,
-0.008924041874706745,
-0.009016573429107666,
-0.06948252022266388,
-0.0672823116183281,
0.015485942363739014,
-0.08117838948965073,
-0.048946596682071686,
0.20216596126556396,
-0.05159614235162735,
0.011027611792087555,
0.12403208017349243,
-0.011435006745159626,
-0.26176539063453674,
0.028023652732372284,
-0.011821278370916843,
-0.058301571756601334,
0.14020512998104095,
-0.09313308447599411,
0.11063060909509659,
0.04986250400543213,
0.03896137326955795,
0.07439582049846649,
0.07209175825119019,
-0.009818561375141144,
0.08371114730834961,
0.09869398921728134,
-0.09444840252399445,
-0.027821596711874008,
0.0011558390688151121,
-0.13931778073310852,
0.03972447291016579,
-0.08768053352832794,
0.10970877856016159,
0.06300700455904007,
0.014435716904699802,
0.02541528269648552,
-0.026219038292765617,
-0.051854196935892105,
0.03441007807850838,
-0.06393726170063019,
0.033102672547101974,
-0.05841590091586113,
0.11295750737190247,
0.0365445539355278,
-0.06363508850336075,
-0.06558698415756226,
-0.0705740749835968,
-0.037514086812734604,
0.019596023485064507,
-0.14307817816734314,
0.08929405361413956,
-0.23767414689064026,
-0.0584879145026207,
0.04094400256872177,
-0.19466911256313324,
0.0029014800675213337,
0.024716651067137718,
0.03797433525323868,
0.11869176477193832,
-0.018185090273618698,
-0.059497933834791183,
0.010908928699791431,
0.06400460004806519,
-0.1176125556230545,
-0.003620689967647195,
-0.1001986414194107,
-0.14130334556102753,
-0.05868416652083397,
0.029526878148317337,
-0.10568644851446152,
-0.11890694499015808,
-0.14850857853889465,
0.1059892326593399,
-0.08497946709394455,
0.03201470524072647,
0.08367466181516647,
-0.00280367280356586,
0.02105284482240677,
0.021649803966283798,
-0.05931747704744339,
-0.025826692581176758,
-0.06417136639356613,
0.025843366980552673,
0.008523713797330856,
0.07865966856479645,
0.0352700799703598,
-0.04719173535704613,
0.09318393468856812,
-0.002589520998299122,
0.07731382548809052,
0.010508579201996326,
-0.059428587555885315,
0.010786592960357666,
-0.2688494026660919,
-0.0012296898057684302,
0.08465216308832169,
-0.01340681966394186,
-0.015460592694580555,
-0.035417817533016205,
-0.022898469120264053,
-0.009045785292983055,
-0.0924508348107338,
0.022860487923026085,
-0.16531339287757874,
-0.04812479764223099,
-0.03044496849179268,
-0.1157197654247284,
-0.1271408200263977,
-0.004615178797394037,
-0.04251336678862572,
0.10755633562803268,
0.062401071190834045,
0.039347585290670395,
-0.012389088049530983,
-0.0043200175277888775,
0.06835009902715683,
0.018591677770018578,
0.032895367592573166,
-0.12574823200702667,
-0.11910491436719894,
-0.04357936233282089,
-0.046493496745824814,
-0.00045691151171922684,
0.29997023940086365,
0.05617842078208923,
0.05909380689263344,
0.040856510400772095,
0.11521424353122711,
-0.18266475200653076,
-0.0018162823980674148,
0.17294585704803467,
0.07575566321611404,
-0.05981313809752464,
-0.05247589573264122,
0.03960612416267395,
0.016241108998656273,
0.006442406680434942,
0.02243826724588871,
0.03410492464900017,
0.06481055915355682,
0.020537616685032845,
-0.019840165972709656,
0.038949351757764816,
-0.08548671007156372,
0.013296794146299362,
-0.0007909836713224649,
0.08623629063367844,
0.07151569426059723,
0.007140942383557558,
0.0016894114669412374,
0.0022441933397203684,
0.13880868256092072,
-0.09242458641529083,
0.0023987730965018272,
-0.09070748090744019,
0.22647464275360107,
-0.04325670748949051,
-0.12169764190912247,
0.038826365023851395,
-0.10475980490446091,
0.006399885285645723,
0.021193863824009895,
-0.041773345321416855,
0.001963880844414234,
0.13854971528053284,
-0.319352388381958,
-0.01908038556575775,
0.10468185693025589,
-0.05477023124694824,
-0.026111433282494545,
0.01109049841761589,
-0.06028540059924126,
0.008812285028398037,
-0.03588986396789551,
-0.011119775474071503,
-0.06035342067480087,
-0.006628931500017643,
-0.03466998413205147,
-0.211971715092659,
-0.041656192392110825,
0.009380905888974667,
0.016507409512996674,
0.04839109256863594,
-0.14962239563465118,
0.036059118807315826,
-0.015684083104133606,
0.016732703894376755,
0.12131481617689133,
0.040341634303331375,
-0.04087461903691292,
-0.0879555270075798,
-0.04029614105820656,
-0.016562217846512794,
0.1649085283279419,
-0.06949940323829651,
-0.021617960184812546,
-0.06810110062360764,
0.05687868595123291,
0.05716446042060852,
-0.09010352939367294,
-0.04603949189186096,
0.05267687141895294,
0.048909034579992294,
-0.002316117286682129,
0.11230386048555374,
0.005894436035305262,
0.19003604352474213,
0.048014890402555466,
-0.06532235443592072,
-0.12203668802976608,
-0.016009889543056488,
-0.019967423751950264,
-0.026607157662510872,
0.12088795751333237,
-0.015542862936854362,
-0.0735861286520958,
0.164265975356102,
-0.23197025060653687,
-0.03211001306772232,
0.05905170366168022,
-0.1742759644985199,
-0.061509229242801666,
-0.09256364405155182,
0.02207442931830883,
0.0020379936322569847,
0.06619436293840408,
-0.050982385873794556,
-0.12229273468255997,
-0.1578919142484665,
-0.010168825276196003,
-0.01863432116806507,
0.01847742684185505,
-0.026376839727163315,
-0.017067529261112213,
0.11589868366718292,
-0.01960325613617897,
0.03148339316248894,
-0.0004135226190555841,
0.08781499415636063,
0.07510780543088913,
0.06403015553951263,
-0.05204116180539131,
-0.05012733116745949,
-0.1063343957066536,
0.07812801003456116,
-0.04503290727734566,
0.046771563589572906,
0.03632231801748276,
0.06484950333833694,
0.013368056155741215,
-0.10554923862218857,
-0.06601636856794357,
0.017392858862876892,
0.017682379111647606,
-0.05966072529554367,
0.11206098645925522,
-0.04563582316040993,
0.06506180763244629,
-0.051277030259370804,
-0.007694774307310581,
0.012419302016496658,
0.033992357552051544,
0.011566379107534885,
-0.02209188975393772,
0.02886912412941456,
-0.03064168244600296,
0.0551360547542572,
-0.12101724743843079,
-0.2759518325328827,
0.017512356862425804,
-0.047146447002887726,
0.10899578779935837,
-0.02772471494972706,
0.019445661455392838,
0.07968158274888992,
0.02958609163761139,
0.01568187028169632,
-0.2527823746204376,
0.08299526572227478,
0.07532165944576263,
-0.03713420778512955,
-0.07192482799291611
] |
null | null |
transformers
|
Hello World!
|
{}
| null |
ahanadeb/wav2vec2-large-indian-instrument-classification-v1
|
[
"transformers",
"pytorch",
"wav2vec2",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #wav2vec2 #endpoints_compatible #region-us
|
Hello World!
|
[] |
[
"TAGS\n#transformers #pytorch #wav2vec2 #endpoints_compatible #region-us \n"
] |
[
27
] |
[
"passage: TAGS\n#transformers #pytorch #wav2vec2 #endpoints_compatible #region-us \n"
] |
[
-0.056049708276987076,
-0.023880459368228912,
-0.009941229596734047,
-0.045777201652526855,
0.1026921197772026,
0.025796571746468544,
0.04540160298347473,
0.06409712880849838,
0.0667881891131401,
-0.026479115709662437,
0.13335591554641724,
0.1894386112689972,
-0.015913376584649086,
0.011616523377597332,
-0.05476301163434982,
-0.20311494171619415,
0.09227653592824936,
0.10180522501468658,
-0.07965491712093353,
0.09707731008529663,
0.04666462168097496,
-0.09196235984563828,
0.054953593760728836,
-0.004915427882224321,
-0.14264844357967377,
0.034000836312770844,
0.02304997853934765,
-0.06704047322273254,
0.12624284625053406,
0.03716568648815155,
0.2049930840730667,
0.02752644754946232,
-0.08141659945249557,
-0.18141327798366547,
0.017582967877388,
0.02813860960304737,
-0.07665177434682846,
0.04707350581884384,
0.1055818721652031,
-0.05399071052670479,
-0.011673579923808575,
0.03258862718939781,
-0.02726084552705288,
0.04478646069765091,
-0.134717658162117,
-0.1584613025188446,
-0.038869865238666534,
0.0597311295568943,
0.04022585600614548,
0.06905175745487213,
0.055634379386901855,
0.19678020477294922,
-0.1349053978919983,
0.09183314442634583,
0.187090665102005,
-0.3026168644428253,
0.027303993701934814,
0.09532101452350616,
0.05107829347252846,
0.04260595142841339,
0.008582904003560543,
0.05086372792720795,
0.004927795846015215,
0.0217070821672678,
-0.03534238785505295,
-0.05335351079702377,
-0.14429178833961487,
0.08312772214412689,
-0.08787330240011215,
-0.11937636137008667,
0.1624542623758316,
-0.02576322853565216,
0.06194527819752693,
0.0646606907248497,
-0.14801043272018433,
-0.06030409410595894,
0.01739170029759407,
0.004965967498719692,
-0.018744034692645073,
0.06848488003015518,
0.02942117489874363,
-0.03313097357749939,
-0.13676786422729492,
-0.022988544777035713,
-0.21222689747810364,
0.2894209623336792,
0.01642712764441967,
0.13399071991443634,
-0.24227465689182281,
0.0746312290430069,
-0.027374640107154846,
-0.0891118198633194,
0.029704848304390907,
-0.09199165552854538,
0.019396988674998283,
0.03747553378343582,
-0.08124855905771255,
0.023289361968636513,
0.06403994560241699,
0.06272052973508835,
-0.011978020891547203,
0.020777050405740738,
0.0181735772639513,
0.11908363550901413,
-0.006985166575759649,
0.04781866446137428,
-0.028468048200011253,
0.028074961155653,
0.028511393815279007,
-0.19456346333026886,
0.02460748888552189,
-0.050196632742881775,
-0.09389146417379379,
-0.12350776046514511,
0.006122938822954893,
0.11189384013414383,
0.04451106861233711,
-0.011834569275379181,
-0.08624555170536041,
-0.005899661220610142,
0.08410146832466125,
-0.04445348680019379,
-0.0003441265143919736,
0.03805895149707794,
0.04535682126879692,
0.24179796874523163,
0.011243296787142754,
-0.0008604824542999268,
-0.0535254031419754,
0.09389353543519974,
-0.05547023564577103,
-0.0037952314596623182,
-0.03000624291598797,
-0.03576141223311424,
0.07675288617610931,
-0.1271163523197174,
0.044096991419792175,
-0.16156034171581268,
-0.0739792212843895,
0.05194997414946556,
0.03675982728600502,
0.03351656720042229,
0.01589188352227211,
0.03779645636677742,
-0.0349208228290081,
0.001338578062132001,
-0.07298662513494492,
-0.008883634582161903,
-0.03511681780219078,
0.0773068442940712,
-0.011502469889819622,
0.08074912428855896,
-0.14807860553264618,
0.04276278614997864,
-0.06101793795824051,
0.0415109284222126,
-0.048949774354696274,
-0.05319468304514885,
-0.03180956840515137,
0.13780301809310913,
-0.02540525421500206,
-0.06098378077149391,
-0.1185903400182724,
0.016402065753936768,
-0.006016947794705629,
0.12516267597675323,
-0.06553404778242111,
-0.1071026399731636,
0.27477890253067017,
-0.13063976168632507,
-0.16843122243881226,
0.055448777973651886,
0.04089845344424248,
-0.06753213703632355,
0.030053826048970222,
0.1237545758485794,
0.04432718828320503,
-0.10116013884544373,
0.07209847122430801,
0.1396065354347229,
-0.17409054934978485,
-0.1816788911819458,
0.04443104937672615,
-0.02802370861172676,
-0.08845988661050797,
0.016420045867562294,
-0.0032956283539533615,
0.08590853959321976,
-0.09417451173067093,
-0.03845597058534622,
-0.060156092047691345,
-0.03476392477750778,
0.07557497918605804,
0.018837669864296913,
0.06760556250810623,
-0.035364579409360886,
0.023285405710339546,
0.018543632701039314,
0.03321564197540283,
0.011096838861703873,
0.08162946254014969,
-0.04968946799635887,
0.10701564699411392,
-0.09145887196063995,
0.013134950771927834,
-0.1666003167629242,
-0.07219857722520828,
-0.015146346762776375,
0.06141708791255951,
-0.08290433883666992,
0.19092606008052826,
0.11371037364006042,
-0.050705473870038986,
0.02678750641644001,
-0.04571988061070442,
0.04906075820326805,
0.0395919494330883,
-0.019834063947200775,
-0.05404963344335556,
-0.019205855205655098,
-0.06710115820169449,
-0.08350396156311035,
-0.0033999152947217226,
-0.010184470564126968,
0.13569872081279755,
0.16477012634277344,
0.010604782961308956,
0.04088578745722771,
-0.01959436573088169,
0.047963086515665054,
-0.026396263390779495,
0.014821557328104973,
0.09677062928676605,
-0.032833706587553024,
-0.06940294802188873,
0.18792815506458282,
-0.053749777376651764,
0.33313921093940735,
0.20847292244434357,
-0.32604095339775085,
0.07709469646215439,
0.030369620770215988,
-0.02151068113744259,
0.011022777296602726,
0.0721905454993248,
-0.02977682091295719,
0.04454423487186432,
0.04792125150561333,
0.11306259781122208,
-0.03328302130103111,
-0.01315088476985693,
0.03165321424603462,
-0.06337528675794601,
-0.027283478528261185,
0.061659831553697586,
0.07240104675292969,
-0.10767564922571182,
0.14440296590328217,
0.2860623300075531,
0.029356699436903,
0.07286395132541656,
-0.0515817366540432,
-0.0385173074901104,
0.04257344454526901,
0.01288505457341671,
-0.04635733738541603,
0.006975907366722822,
-0.23849186301231384,
-0.016224171966314316,
0.09271696954965591,
0.03935651853680611,
0.09839507937431335,
-0.16960866749286652,
-0.04092130437493324,
0.047842785716056824,
-0.007540654856711626,
-0.10124830901622772,
0.08537615835666656,
0.034017838537693024,
0.07338255643844604,
-0.004606396891176701,
-0.024891892448067665,
0.10197193920612335,
-0.010744930244982243,
-0.06604664027690887,
0.13251623511314392,
-0.1310635805130005,
-0.22701339423656464,
-0.15556824207305908,
-0.09059266746044159,
0.012829037383198738,
0.020029380917549133,
0.06088358536362648,
-0.0984833613038063,
-0.04047348350286484,
0.09340295940637589,
0.048800546675920486,
-0.11554478853940964,
0.06088588386774063,
0.03625568374991417,
0.06712671369314194,
-0.05531323328614235,
-0.09261354058980942,
-0.03399547562003136,
-0.03282833844423294,
-0.0592164546251297,
0.11400482058525085,
-0.08691544830799103,
0.08821390569210052,
0.12714698910713196,
0.03303316980600357,
0.06908903270959854,
0.037190500646829605,
0.14900065958499908,
-0.042503830045461655,
-0.05133412033319473,
0.22044725716114044,
-0.05265473201870918,
0.09977665543556213,
0.07976467162370682,
0.04158017784357071,
-0.045153532177209854,
-0.044910646975040436,
-0.09580627083778381,
-0.11251644790172577,
-0.23339508473873138,
-0.11632336676120758,
-0.11841809004545212,
-0.009050658904016018,
-0.01312223356217146,
0.04410753399133682,
0.039445068687200546,
0.08139973133802414,
0.027636824175715446,
-0.06938033550977707,
-0.06520840525627136,
0.04459462687373161,
0.1636964976787567,
-0.01982896961271763,
0.06501677632331848,
-0.10369744151830673,
-0.07047824561595917,
0.08818766474723816,
0.1071975901722908,
0.18486493825912476,
0.10196351259946823,
0.04591067135334015,
0.06266649812459946,
0.16440556943416595,
0.16713009774684906,
0.17864565551280975,
0.022444644942879677,
-0.04257665574550629,
0.012160129845142365,
-0.004034236539155245,
-0.06408117711544037,
0.033074941486120224,
0.10836131125688553,
-0.14248988032341003,
-0.03663000836968422,
-0.19721247255802155,
0.07823971658945084,
0.0892072543501854,
0.04842750355601311,
-0.16599872708320618,
0.005733931437134743,
0.07285651564598083,
-0.0035274308174848557,
-0.03903355821967125,
0.06266865879297256,
0.04927125200629234,
-0.1105148121714592,
0.07735903561115265,
-0.018776731565594673,
0.10476468503475189,
-0.02309798076748848,
0.06368087977170944,
-0.05830521881580353,
-0.1166868731379509,
0.07732642441987991,
0.07979556173086166,
-0.20966772735118866,
0.2791883647441864,
-0.013891634531319141,
-0.06865277141332626,
-0.06718925386667252,
-0.027612829580903053,
-0.02581564523279667,
0.17256613075733185,
0.07876265794038773,
0.05417891591787338,
-0.012564106844365597,
-0.11767007410526276,
0.01336336974054575,
0.03516869619488716,
0.12126684188842773,
0.016263192519545555,
-0.037808869034051895,
0.0009015029645524919,
-0.03137049078941345,
-0.024771707132458687,
0.05426473543047905,
0.03131110966205597,
-0.10276723653078079,
0.04921581596136093,
0.06303322315216064,
0.06185920163989067,
-0.02645063027739525,
-0.012230822816491127,
-0.09006264805793762,
0.14630304276943207,
-0.12268799543380737,
-0.05209750682115555,
-0.08763470500707626,
-0.17107807099819183,
0.12655727565288544,
-0.0834343358874321,
0.08076118677854538,
-0.07428360730409622,
-0.006555586121976376,
-0.08958844095468521,
-0.1809639185667038,
0.10604351758956909,
-0.13064587116241455,
-0.029752030968666077,
-0.032393425703048706,
0.19508127868175507,
-0.06743763387203217,
-0.008848597295582294,
-0.0014133988879621029,
0.003170338924974203,
-0.11443919688463211,
-0.10556800663471222,
0.034433647990226746,
0.08721771091222763,
0.005253299605101347,
0.047525517642498016,
0.03620229288935661,
0.025709807872772217,
0.04834788665175438,
0.00755248824134469,
0.2353367656469345,
0.17548684775829315,
-0.06377674639225006,
0.14873339235782623,
0.17302420735359192,
-0.01574062556028366,
-0.285009503364563,
-0.10883672535419464,
-0.13371849060058594,
-0.033931996673345566,
-0.017847992479801178,
-0.10845586657524109,
0.10945626348257065,
0.022901205345988274,
-0.039283327758312225,
0.12088476866483688,
-0.24612922966480255,
-0.0442446731030941,
0.11107932031154633,
0.028848791494965553,
0.47273901104927063,
-0.0826442763209343,
-0.09518272429704666,
-0.018864387646317482,
-0.3578135371208191,
0.11924706399440765,
-0.057230640202760696,
0.047379009425640106,
-0.033936817198991776,
0.09227076172828674,
0.02967250347137451,
-0.10269278287887573,
0.14281408488750458,
0.01876791939139366,
0.01127137616276741,
-0.07126493006944656,
-0.05525389313697815,
0.08548896759748459,
-0.0006743131089024246,
-0.026677142828702927,
0.06641891598701477,
0.04213419184088707,
-0.1183980330824852,
-0.003526297863572836,
-0.1253504604101181,
0.04923660680651665,
0.03304152190685272,
-0.025461293756961823,
0.01802455261349678,
-0.08804701268672943,
-0.005478319711983204,
0.017344944179058075,
0.23128946125507355,
0.0190504789352417,
0.07687529921531677,
0.13548193871974945,
0.04076102003455162,
-0.19495664536952972,
-0.05747789889574051,
-0.0733247697353363,
-0.06767270714044571,
0.10593090951442719,
-0.054676979780197144,
0.06691879779100418,
0.13010664284229279,
-0.016147466376423836,
-0.04715171828866005,
0.12972740828990936,
-0.0104427644982934,
-0.036696918308734894,
0.13350361585617065,
-0.22019407153129578,
-0.040550775825977325,
-0.017647039145231247,
0.02422356978058815,
0.13563129305839539,
0.14111296832561493,
0.13283899426460266,
0.058004871010780334,
-0.01267054583877325,
-0.029117165133357048,
-0.008320917375385761,
-0.12583395838737488,
0.023886704817414284,
0.060772985219955444,
0.05346056818962097,
-0.13121677935123444,
0.06892767548561096,
-0.03396102786064148,
-0.2190098762512207,
-0.05701538920402527,
0.08250365406274796,
-0.13768930733203888,
-0.09656660258769989,
-0.03476579114794731,
0.03317911550402641,
-0.16277584433555603,
-0.1058233380317688,
-0.0011678390437737107,
-0.11089672148227692,
0.06016036868095398,
0.19821591675281525,
0.08584316074848175,
0.12878917157649994,
-0.004668691195547581,
-0.03300845995545387,
0.01784323900938034,
-0.09368711709976196,
-0.04300886020064354,
0.02095545083284378,
-0.12486502528190613,
0.038176409900188446,
-0.013503308407962322,
0.1630987972021103,
-0.07313559204339981,
-0.051649946719408035,
-0.12331976741552353,
0.09006806463003159,
-0.13275665044784546,
-0.0854976549744606,
-0.1250491738319397,
-0.06581363081932068,
0.013615244999527931,
-0.09591872990131378,
-0.034739308059215546,
0.011225644499063492,
-0.12756182253360748,
0.043969616293907166,
0.0009053647518157959,
-0.02014702372252941,
-0.07956037670373917,
-0.017965039238333702,
0.13025692105293274,
-0.05154487118124962,
0.08216773718595505,
0.17792077362537384,
-0.08201625198125839,
0.1104230284690857,
-0.1436932235956192,
-0.16703467071056366,
0.10313183069229126,
0.024749020114541054,
0.07502827793359756,
0.06481365859508514,
0.018033171072602272,
0.07953952997922897,
0.011842158623039722,
0.02604297734797001,
0.08792343735694885,
-0.09257270395755768,
-0.006632095202803612,
-0.015475695952773094,
-0.14394335448741913,
-0.04419282078742981,
-0.09318436682224274,
0.1546846330165863,
0.033206190913915634,
0.06882304698228836,
0.04272148013114929,
0.12172862142324448,
-0.02028835192322731,
0.01617366634309292,
-0.006689881905913353,
-0.15021154284477234,
0.014644641429185867,
-0.03612583503127098,
-0.0021800745744258165,
-0.03575151786208153,
0.2570479214191437,
-0.1169935017824173,
0.012041608802974224,
0.01839110627770424,
0.022250182926654816,
-0.06228094547986984,
0.014537185430526733,
0.2532733380794525,
0.10427812486886978,
-0.01946539245545864,
-0.1274455040693283,
0.056511733680963516,
-0.019331151619553566,
-0.05403968691825867,
0.12092624604701996,
0.1124909445643425,
0.017289919778704643,
0.11789786070585251,
0.036434538662433624,
0.011680149473249912,
-0.13912732899188995,
-0.26045718789100647,
-0.04385225102305412,
0.029577074572443962,
-0.0001971813035197556,
0.058319419622421265,
0.1449197679758072,
-0.016475338488817215,
0.03671139106154442,
0.011999168433248997,
-0.008807698264718056,
-0.15147577226161957,
-0.06979291141033173,
-0.05161357671022415,
-0.14267928898334503,
0.01620505377650261,
-0.04583301022648811,
0.03940702974796295,
0.14390712976455688,
0.043750230222940445,
-0.023306414484977722,
0.10670891404151917,
0.10184470564126968,
-0.0461588054895401,
-0.00009338666131952778,
0.005663316231220961,
0.03760925307869911,
0.06582412123680115,
-0.030398687347769737,
-0.10730119794607162,
-0.06887103617191315,
-0.05416353419423103,
0.048443060368299484,
-0.0858021229505539,
0.018585942685604095,
-0.07846789062023163,
-0.1045307144522667,
-0.07198254019021988,
0.050066154450178146,
-0.06974399834871292,
0.13762202858924866,
-0.003172858851030469,
0.037631019949913025,
0.015124509111046791,
0.13010895252227783,
-0.0900275781750679,
-0.08025754243135452,
0.012630991637706757,
0.1375042349100113,
0.05914553999900818,
0.10857908427715302,
0.013716067187488079,
0.02013135328888893,
-0.12275931984186172,
0.3350679278373718,
0.2155296951532364,
-0.032326579093933105,
0.06582900881767273,
0.0579368881881237,
0.049310024827718735,
0.0958622395992279,
0.10804342478513718,
0.09512009471654892,
0.3223268389701843,
-0.09742537140846252,
-0.052733100950717926,
-0.04021194577217102,
-0.004916955251246691,
-0.10172080993652344,
0.03782305493950844,
-0.03438255190849304,
-0.10795999318361282,
-0.06923660635948181,
0.09309882670640945,
-0.16782429814338684,
0.12473972141742706,
0.07427876442670822,
-0.21031813323497772,
-0.011814062483608723,
-0.0347696952521801,
0.21958883106708527,
0.011305928230285645,
0.1111871525645256,
-0.0572868175804615,
-0.12331073731184006,
0.12511004507541656,
0.06147737801074982,
-0.26694005727767944,
-0.048798006027936935,
0.08402017503976822,
-0.027201419696211815,
-0.04711960256099701,
-0.009916482493281364,
0.07510706037282944,
0.056327104568481445,
0.08745048195123672,
-0.0662127360701561,
0.04744267463684082,
0.018712041899561882,
-0.12005000561475754,
-0.12446659058332443,
0.003750364063307643,
-0.01677951030433178,
-0.12423498183488846,
0.011587136425077915,
-0.19719207286834717,
0.023814454674720764,
0.08385258167982101,
0.02231554687023163,
0.011619635857641697,
-0.04419941082596779,
-0.025614161044359207,
0.02065540850162506,
0.014288868755102158,
-0.01516236923635006,
-0.032940823584795,
-0.036324892193078995,
-0.006021621637046337,
0.03554140776395798,
-0.10285046696662903,
-0.13045722246170044,
-0.05361767113208771,
-0.04653622582554817,
0.09644684940576553,
0.022592172026634216,
0.010417021811008453,
-0.019849611446261406,
-0.06606122106313705,
0.03475489094853401,
-0.11004940420389175,
0.013839082792401314,
0.056078873574733734,
0.028882477432489395,
0.02919583208858967,
-0.012709894217550755,
0.05175841227173805,
0.05956165865063667,
-0.10579817742109299,
-0.12618069350719452
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# wav2vec2-base-timit-demo-colab
This model is a fine-tuned version of [facebook/wav2vec2-base](https://huggingface.co/facebook/wav2vec2-base) on the None dataset.
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 0.0001
- train_batch_size: 32
- eval_batch_size: 8
- seed: 42
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- lr_scheduler_warmup_steps: 1000
- num_epochs: 30
- mixed_precision_training: Native AMP
### Training results
### Framework versions
- Transformers 4.11.3
- Pytorch 1.10.0+cu111
- Datasets 1.13.3
- Tokenizers 0.10.3
|
{"license": "apache-2.0", "tags": ["generated_from_trainer"], "model-index": [{"name": "wav2vec2-base-timit-demo-colab", "results": []}]}
|
automatic-speech-recognition
|
ahazeemi/wav2vec2-base-timit-demo-colab
|
[
"transformers",
"pytorch",
"tensorboard",
"wav2vec2",
"automatic-speech-recognition",
"generated_from_trainer",
"license:apache-2.0",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tensorboard #wav2vec2 #automatic-speech-recognition #generated_from_trainer #license-apache-2.0 #endpoints_compatible #region-us
|
# wav2vec2-base-timit-demo-colab
This model is a fine-tuned version of facebook/wav2vec2-base on the None dataset.
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 0.0001
- train_batch_size: 32
- eval_batch_size: 8
- seed: 42
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- lr_scheduler_warmup_steps: 1000
- num_epochs: 30
- mixed_precision_training: Native AMP
### Training results
### Framework versions
- Transformers 4.11.3
- Pytorch 1.10.0+cu111
- Datasets 1.13.3
- Tokenizers 0.10.3
|
[
"# wav2vec2-base-timit-demo-colab\n\nThis model is a fine-tuned version of facebook/wav2vec2-base on the None dataset.",
"## Model description\n\nMore information needed",
"## Intended uses & limitations\n\nMore information needed",
"## Training and evaluation data\n\nMore information needed",
"## Training procedure",
"### Training hyperparameters\n\nThe following hyperparameters were used during training:\n- learning_rate: 0.0001\n- train_batch_size: 32\n- eval_batch_size: 8\n- seed: 42\n- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n- lr_scheduler_type: linear\n- lr_scheduler_warmup_steps: 1000\n- num_epochs: 30\n- mixed_precision_training: Native AMP",
"### Training results",
"### Framework versions\n\n- Transformers 4.11.3\n- Pytorch 1.10.0+cu111\n- Datasets 1.13.3\n- Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #wav2vec2 #automatic-speech-recognition #generated_from_trainer #license-apache-2.0 #endpoints_compatible #region-us \n",
"# wav2vec2-base-timit-demo-colab\n\nThis model is a fine-tuned version of facebook/wav2vec2-base on the None dataset.",
"## Model description\n\nMore information needed",
"## Intended uses & limitations\n\nMore information needed",
"## Training and evaluation data\n\nMore information needed",
"## Training procedure",
"### Training hyperparameters\n\nThe following hyperparameters were used during training:\n- learning_rate: 0.0001\n- train_batch_size: 32\n- eval_batch_size: 8\n- seed: 42\n- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n- lr_scheduler_type: linear\n- lr_scheduler_warmup_steps: 1000\n- num_epochs: 30\n- mixed_precision_training: Native AMP",
"### Training results",
"### Framework versions\n\n- Transformers 4.11.3\n- Pytorch 1.10.0+cu111\n- Datasets 1.13.3\n- Tokenizers 0.10.3"
] |
[
56,
39,
6,
12,
8,
3,
117,
4,
33
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #wav2vec2 #automatic-speech-recognition #generated_from_trainer #license-apache-2.0 #endpoints_compatible #region-us \n# wav2vec2-base-timit-demo-colab\n\nThis model is a fine-tuned version of facebook/wav2vec2-base on the None dataset.## Model description\n\nMore information needed## Intended uses & limitations\n\nMore information needed## Training and evaluation data\n\nMore information needed## Training procedure### Training hyperparameters\n\nThe following hyperparameters were used during training:\n- learning_rate: 0.0001\n- train_batch_size: 32\n- eval_batch_size: 8\n- seed: 42\n- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n- lr_scheduler_type: linear\n- lr_scheduler_warmup_steps: 1000\n- num_epochs: 30\n- mixed_precision_training: Native AMP### Training results### Framework versions\n\n- Transformers 4.11.3\n- Pytorch 1.10.0+cu111\n- Datasets 1.13.3\n- Tokenizers 0.10.3"
] |
[
-0.09372417628765106,
0.10234329104423523,
-0.002581487176939845,
0.04791366308927536,
0.11391223967075348,
0.0076308343559503555,
0.10248864442110062,
0.10984732210636139,
-0.061121560633182526,
0.07888339459896088,
0.08395478874444962,
0.032434262335300446,
0.07175695896148682,
0.16986656188964844,
-0.031120600178837776,
-0.21932756900787354,
0.02506607584655285,
-0.034708909690380096,
-0.07112698256969452,
0.09175078570842743,
0.07741697132587433,
-0.10179688781499863,
0.06932766735553741,
0.01680986024439335,
-0.15235628187656403,
0.020389406010508537,
-0.03465995565056801,
-0.07865212857723236,
0.10690807551145554,
0.018166029825806618,
0.0593135766685009,
0.03227915242314339,
0.1359288990497589,
-0.23349642753601074,
0.002926630200818181,
0.09380435198545456,
0.025516333058476448,
0.07126753777265549,
0.0647822767496109,
0.003957673441618681,
0.1206374391913414,
-0.1572330892086029,
0.10276351124048233,
0.04506142437458038,
-0.08666706085205078,
-0.18708963692188263,
-0.07954846322536469,
0.07677389681339264,
0.09781576693058014,
0.09595860540866852,
-0.008948706090450287,
0.09295886754989624,
-0.08385195583105087,
0.06980044394731522,
0.20506493747234344,
-0.2394924908876419,
-0.06427508592605591,
0.004021770320832729,
0.05146244540810585,
0.024911589920520782,
-0.10330867767333984,
0.018662510439753532,
0.03103337436914444,
0.041048403829336166,
0.09417149424552917,
-0.012656232342123985,
-0.060726966708898544,
-0.01841304637491703,
-0.1229928731918335,
-0.0007522738305851817,
0.08687368035316467,
0.06610233336687088,
-0.04572315514087677,
-0.11093653738498688,
-0.0489499606192112,
-0.05989925190806389,
-0.03219097852706909,
-0.06161799654364586,
0.029733674600720406,
-0.03444592282176018,
-0.07823297381401062,
-0.0397634319961071,
-0.05573275312781334,
-0.05985423922538757,
0.009482095949351788,
0.10492374002933502,
0.01895664818584919,
0.020519020035862923,
-0.042734093964099884,
0.07723204046487808,
0.0352497361600399,
-0.11249946802854538,
0.01196763850748539,
-0.010120026767253876,
-0.11305660009384155,
-0.032986629754304886,
-0.038381531834602356,
0.010651377029716969,
0.024343445897102356,
0.12486044317483902,
-0.04520667344331741,
0.11074282228946686,
-0.007183377165347338,
-0.008080722764134407,
-0.016345476731657982,
0.11504261940717697,
-0.04504144564270973,
-0.07527056336402893,
-0.020427126437425613,
0.07202932983636856,
-0.005350797437131405,
-0.025412550196051598,
-0.0574769601225853,
-0.007262813858687878,
0.06386655569076538,
0.04929468408226967,
-0.04495358467102051,
0.01180102489888668,
-0.04708658158779144,
-0.02017177827656269,
-0.0021958332508802414,
-0.1132035031914711,
0.050865478813648224,
0.023466788232326508,
-0.05425140634179115,
0.030262090265750885,
0.013083389028906822,
0.01197703368961811,
-0.0369773805141449,
0.08768586814403534,
-0.05167703703045845,
-0.016910525038838387,
-0.051480043679475784,
-0.054502975195646286,
0.019722793251276016,
-0.08027619123458862,
-0.021322987973690033,
-0.07414688915014267,
-0.1490982323884964,
-0.04584137350320816,
0.0653081014752388,
-0.06247971951961517,
-0.019923856481909752,
-0.042849790304899216,
-0.04393693432211876,
0.039653170853853226,
-0.02549387700855732,
0.16758066415786743,
-0.05815302953124046,
0.043135225772857666,
-0.03320975974202156,
0.058117035776376724,
0.028514740988612175,
0.04331935942173004,
-0.03894777223467827,
0.027431022375822067,
-0.12129592895507812,
0.08855752646923065,
-0.09511353075504303,
0.011426257900893688,
-0.13391433656215668,
-0.08245799690485,
0.0029165991581976414,
-0.00603038165718317,
0.07855429500341415,
0.11762617528438568,
-0.23335330188274384,
-0.05361294373869896,
0.12967434525489807,
-0.08265263587236404,
-0.049054503440856934,
0.09241625666618347,
-0.034198928624391556,
0.011997123248875141,
0.05828015133738518,
0.1813267320394516,
0.05734557658433914,
-0.1545058786869049,
0.0001269052445422858,
-0.007035333663225174,
0.04262600094079971,
0.028161950409412384,
0.026248738169670105,
-0.03104887157678604,
0.032332997769117355,
0.000407714192988351,
-0.013014108873903751,
-0.009733946062624454,
-0.07640810310840607,
-0.07488133013248444,
-0.03690563514828682,
-0.08400506526231766,
0.010677127167582512,
-0.0049018049612641335,
0.003032681765034795,
-0.07614285498857498,
-0.09893997013568878,
0.06161873787641525,
0.10550662875175476,
-0.06638748198747635,
0.03170826658606529,
-0.07393622398376465,
0.00282221264205873,
0.037411320954561234,
-0.012334419414401054,
-0.1785574108362198,
-0.05221819877624512,
0.0245197843760252,
-0.09863810986280441,
0.028826655820012093,
0.026921281591057777,
0.052749115973711014,
0.04268805310130119,
-0.03594408556818962,
-0.014203157275915146,
-0.09769327938556671,
0.023811688646674156,
-0.07329252362251282,
-0.2143383026123047,
-0.04885398969054222,
-0.035107482224702835,
0.1298670768737793,
-0.19400757551193237,
-0.010164492763578892,
0.0358627587556839,
0.14705711603164673,
0.03360311686992645,
-0.05414099246263504,
-0.01044378150254488,
0.052205223590135574,
0.03201530873775482,
-0.09548720717430115,
0.04549461975693703,
0.0032096935901790857,
-0.07923763990402222,
-0.03717849776148796,
-0.12498237937688828,
0.019053326919674873,
0.09561588615179062,
0.05806223303079605,
-0.08855533599853516,
-0.008349006064236164,
-0.06821189820766449,
-0.03499875217676163,
-0.07257566601037979,
0.013217956759035587,
0.22059224545955658,
0.033058930188417435,
0.09159553050994873,
-0.06149178743362427,
-0.0638308972120285,
0.029270442202687263,
0.014734013937413692,
-0.02192307449877262,
0.08802769333124161,
0.09284177422523499,
-0.0921378955245018,
0.05611322447657585,
0.11332982033491135,
0.0026056289207190275,
0.13940778374671936,
-0.036874689161777496,
-0.06801096349954605,
-0.022396309301257133,
-0.029965350404381752,
-0.020513838157057762,
0.12894059717655182,
-0.10647492855787277,
0.005166169255971909,
0.015837902203202248,
0.035489771515131,
0.03998461365699768,
-0.15654654800891876,
0.017062125727534294,
0.008465622551739216,
-0.05925446003675461,
-0.025170400738716125,
-0.0044571273028850555,
0.0244598351418972,
0.08202569931745529,
0.029795724898576736,
-0.01568327099084854,
0.005342781078070402,
-0.020969780161976814,
-0.09140950441360474,
0.16066360473632812,
-0.1314879208803177,
-0.18411485850811005,
-0.06836460530757904,
0.023394178599119186,
-0.04811205342411995,
-0.03745168820023537,
0.02753608115017414,
-0.1193803995847702,
-0.06638316810131073,
-0.08177691698074341,
0.0196192879229784,
-0.02524801529943943,
0.02209313027560711,
0.10324067622423172,
0.014880290254950523,
0.07784267514944077,
-0.11765214055776596,
-0.007134580984711647,
-0.043135691434144974,
-0.03628262132406235,
-0.011536169797182083,
0.08038406074047089,
0.060621220618486404,
0.09760072827339172,
0.008818532340228558,
0.033391255885362625,
-0.028789615258574486,
0.2431279867887497,
-0.07582444697618484,
0.037459395825862885,
0.13031619787216187,
-0.0032817202154546976,
0.04067518189549446,
0.10429277271032333,
0.03204474598169327,
-0.1250687688589096,
0.03499627858400345,
0.07502542436122894,
-0.01666150987148285,
-0.22245509922504425,
-0.053420789539813995,
-0.029490886256098747,
-0.06452792137861252,
0.10351229459047318,
0.03374094516038895,
-0.051333267241716385,
0.016434114426374435,
0.020207658410072327,
0.007877549156546593,
0.0035634529776871204,
0.051234953105449677,
0.08921042084693909,
0.04175221174955368,
0.09154051542282104,
-0.022524002939462662,
-0.002904947381466627,
0.08061228692531586,
-0.02258162945508957,
0.23964181542396545,
0.017805038020014763,
0.03269489109516144,
0.06241856515407562,
0.11424101889133453,
-0.00786362774670124,
0.04218650609254837,
0.02294817380607128,
-0.01951725408434868,
-0.003684364026412368,
-0.05977718159556389,
-0.0198834091424942,
0.025373300537467003,
0.0159667506814003,
-0.010247744619846344,
-0.09545537084341049,
0.038783855736255646,
0.025359129533171654,
0.29895222187042236,
0.04829881712794304,
-0.2456631064414978,
-0.05652238056063652,
-0.007170958444476128,
-0.06584177911281586,
-0.06147930398583412,
0.024786805734038353,
0.12012315541505814,
-0.12712393701076508,
0.10569003969430923,
-0.045097772032022476,
0.0858788788318634,
-0.030698701739311218,
0.02613341249525547,
0.05129418894648552,
0.12633004784584045,
-0.004261984024196863,
0.043246060609817505,
-0.22500336170196533,
0.22693653404712677,
0.01741265505552292,
0.12185961753129959,
-0.0717887431383133,
0.029859758913517,
0.012497453950345516,
0.0044652000069618225,
0.10224538296461105,
0.004126423969864845,
-0.1042787954211235,
-0.11048168689012527,
-0.07517525553703308,
0.05119035765528679,
0.13351817429065704,
-0.02240203507244587,
0.06223849952220917,
-0.023981744423508644,
0.001924030715599656,
0.04091547057032585,
-0.021917099133133888,
-0.2102496176958084,
-0.12706707417964935,
0.0012119245948269963,
0.039028361439704895,
-0.006982014514505863,
-0.07342571765184402,
-0.09032530337572098,
-0.045881256461143494,
0.1346598118543625,
-0.0012232324806973338,
-0.018288439139723778,
-0.15483373403549194,
0.049734026193618774,
0.1353924423456192,
-0.041379764676094055,
0.03354113921523094,
0.032149773091077805,
0.12677305936813354,
0.014109555631875992,
-0.09820560365915298,
0.0696612298488617,
-0.09868481755256653,
-0.19054166972637177,
-0.047515641897916794,
0.13448794186115265,
0.09574054181575775,
0.030699025839567184,
0.01654987782239914,
0.024735746905207634,
0.012461812235414982,
-0.10166327655315399,
0.06647664308547974,
0.09177155047655106,
0.03856779262423515,
0.018800826743245125,
-0.042491015046834946,
-0.019726628437638283,
-0.03150758519768715,
-0.027332566678524017,
0.11227826774120331,
0.23093880712985992,
-0.07755092531442642,
0.13097402453422546,
0.12339884042739868,
-0.06838521361351013,
-0.16606353223323822,
0.07095310091972351,
0.11351672559976578,
0.021992366760969162,
0.03613268956542015,
-0.20139019191265106,
0.12068181484937668,
0.09588276594877243,
-0.005956016946583986,
0.016133226454257965,
-0.2737990617752075,
-0.14086169004440308,
0.09112846851348877,
0.08504518121480942,
0.036822956055402756,
-0.07566916942596436,
-0.02156558260321617,
-0.0654563158750534,
-0.0982176810503006,
0.14527441561222076,
-0.13280119001865387,
0.0972609594464302,
0.0114942891523242,
0.07463083416223526,
0.0018234276212751865,
-0.026732146739959717,
0.12591177225112915,
0.04113941267132759,
0.07924047857522964,
-0.032739073038101196,
0.07245505601167679,
0.012945666909217834,
-0.05242753401398659,
0.020656753331422806,
-0.05411161482334137,
0.054699838161468506,
-0.09508112818002701,
-0.02373923547565937,
-0.08529037982225418,
0.06922295689582825,
-0.05470060184597969,
-0.055187590420246124,
-0.033094506710767746,
0.051193829625844955,
0.07203084975481033,
-0.04191393777728081,
-0.026093458756804466,
-0.01284984964877367,
0.08063969761133194,
0.10259252041578293,
0.10942360758781433,
-0.058948881924152374,
-0.05746295675635338,
0.007251186762005091,
-0.03277130797505379,
0.05502647906541824,
-0.0674433708190918,
0.029081406071782112,
0.11262242496013641,
0.027810122817754745,
0.12529394030570984,
0.03550386056303978,
-0.07508376985788345,
-0.010555071756243706,
0.03310644254088402,
-0.11956698447465897,
-0.11767536401748657,
0.015783194452524185,
-0.021159034222364426,
-0.08722449094057083,
0.011073729023337364,
0.13703353703022003,
-0.03877997770905495,
-0.003124414710327983,
-0.01335113774985075,
0.02877396158874035,
-0.03904997184872627,
0.18014760315418243,
0.01994926854968071,
0.06697924435138702,
-0.10342983156442642,
0.1359579712152481,
0.030985839664936066,
-0.10426783561706543,
0.09253183752298355,
0.07915832102298737,
-0.09338586777448654,
-0.013059516437351704,
0.06461676210165024,
0.17011795938014984,
0.011650685220956802,
-0.05825217440724373,
-0.06451616436243057,
-0.13963164389133453,
0.07913393527269363,
0.15539032220840454,
0.017904095351696014,
-0.021329116076231003,
-0.053989116102457047,
0.022678524255752563,
-0.10999787598848343,
0.05381781980395317,
0.048734597861766815,
0.02726086787879467,
-0.11465459316968918,
0.10694816708564758,
0.022303400561213493,
0.015652276575565338,
-0.018180392682552338,
0.01635599136352539,
-0.11475029587745667,
-0.009849424473941326,
-0.13897624611854553,
-0.024353614076972008,
-0.03581714257597923,
0.009208953008055687,
-0.01324309129267931,
-0.04102622717618942,
-0.052826374769210815,
0.031416431069374084,
-0.0733073279261589,
-0.047600340098142624,
0.006655659526586533,
0.04199123755097389,
-0.14105960726737976,
0.012876486405730247,
0.018212363123893738,
-0.09731800854206085,
0.08397163450717926,
0.04689601808786392,
-0.0006531726685352623,
0.036197494715452194,
-0.12423504889011383,
-0.033218029886484146,
0.027140846475958824,
0.01812315359711647,
0.0693352073431015,
-0.12103603035211563,
-0.022427385672926903,
-0.017882568761706352,
0.045688800513744354,
0.023865077644586563,
0.09572675079107285,
-0.08836017549037933,
-0.01567026600241661,
-0.06461289525032043,
-0.060380686074495316,
-0.048795171082019806,
0.03383024036884308,
0.1252221167087555,
0.044788528233766556,
0.15126842260360718,
-0.0920586958527565,
0.04523687809705734,
-0.17871537804603577,
-0.03235427290201187,
-0.00417034886777401,
-0.008779839612543583,
-0.020713085308670998,
-0.0371626578271389,
0.1050410345196724,
-0.05252527818083763,
0.1321924924850464,
-0.018515165895223618,
0.07384705543518066,
0.042278390377759933,
-0.11463005095720291,
-0.11410976946353912,
0.02323053404688835,
0.16225536167621613,
0.055326055735349655,
0.000041045590478461236,
0.07831252366304398,
-0.02082769386470318,
0.0608612559735775,
0.0736425369977951,
0.22982411086559296,
0.17613191902637482,
0.010159286670386791,
0.07111494988203049,
0.07576582580804825,
-0.1266716867685318,
-0.1164482831954956,
0.14908801019191742,
-0.0872097909450531,
0.13297970592975616,
-0.06752055883407593,
0.13920989632606506,
0.050327811390161514,
-0.18167540431022644,
0.04602142795920372,
-0.04181884601712227,
-0.09437616169452667,
-0.12452282756567001,
-0.03738487511873245,
-0.0697978064417839,
-0.13805536925792694,
0.026770079508423805,
-0.10418504476547241,
0.04949958994984627,
0.039428941905498505,
0.029416458681225777,
0.027254294604063034,
0.12609337270259857,
-0.04540633782744408,
-0.011983479373157024,
0.1016593873500824,
0.02889084629714489,
-0.03771878778934479,
-0.05696457624435425,
-0.0743909403681755,
0.02681836299598217,
0.033621493726968765,
0.058395978063344955,
-0.0301838181912899,
-0.058280762284994125,
0.041810400784015656,
0.002657354110851884,
-0.0606052540242672,
0.03518322482705116,
-0.01722654514014721,
0.03911367431282997,
0.05633269622921944,
0.06459355354309082,
-0.02174994722008705,
-0.03033214434981346,
0.23759780824184418,
-0.0861881896853447,
-0.08798935264348984,
-0.14441722631454468,
0.15732231736183167,
0.00862217228859663,
0.002134969225153327,
0.038173869252204895,
-0.08040457218885422,
-0.048320379108190536,
0.18219751119613647,
0.145535409450531,
-0.08128604292869568,
-0.005914174951612949,
-0.024779802188277245,
-0.01193852536380291,
-0.04891972243785858,
0.15232057869434357,
0.11166482418775558,
0.03696750849485397,
-0.04177583381533623,
-0.013171426951885223,
-0.02231893502175808,
-0.07412689179182053,
-0.06802365183830261,
0.09938459098339081,
0.014033057726919651,
-0.013976527377963066,
-0.029567934572696686,
0.09715554863214493,
-0.019119368866086006,
-0.21143914759159088,
0.024249427020549774,
-0.16057325899600983,
-0.17315427958965302,
-0.011705251410603523,
0.05713680386543274,
-0.0016112581361085176,
0.04705798253417015,
0.0011250866809859872,
-0.0020188584458082914,
0.1372583955526352,
-0.006777009926736355,
-0.0162190068513155,
-0.11050543934106827,
0.07975690066814423,
-0.08622404932975769,
0.17569923400878906,
-0.008325489237904549,
0.04024094343185425,
0.094985730946064,
0.05627938359975815,
-0.11663048714399338,
0.03923811390995979,
0.049564242362976074,
-0.07956723868846893,
0.013952819630503654,
0.1453864723443985,
-0.051312368363142014,
0.08450287580490112,
0.05760576203465462,
-0.12679553031921387,
-0.012966668233275414,
-0.043279822915792465,
-0.020504677668213844,
-0.056225746870040894,
0.008479291573166847,
-0.07220370322465897,
0.15185874700546265,
0.17428453266620636,
-0.04060377925634384,
0.015837768092751503,
-0.06893790513277054,
0.05047145485877991,
0.009217094630002975,
0.06023559346795082,
-0.03847077116370201,
-0.22984610497951508,
0.026762111112475395,
0.015379494987428188,
0.01630079373717308,
-0.18785083293914795,
-0.09838417172431946,
0.03187374770641327,
-0.041696906089782715,
-0.055124856531620026,
0.1088261604309082,
0.02262573130428791,
0.024444568902254105,
-0.034037940204143524,
-0.09148868918418884,
-0.015765920281410217,
0.15918397903442383,
-0.15084122121334076,
-0.028586694970726967
] |
null | null |
speechbrain
|
<iframe src="https://ghbtns.com/github-btn.html?user=speechbrain&repo=speechbrain&type=star&count=true&size=large&v=2" frameborder="0" scrolling="0" width="170" height="30" title="GitHub"></iframe>
<br/><br/>
# Speaker Verification with ECAPA-TDNN embeddings on Voxceleb
This repository provides all the necessary tools to perform speaker verification with a pretrained ECAPA-TDNN model using SpeechBrain.
The system can be used to extract speaker embeddings as well.
It is trained on Voxceleb 1+ Voxceleb2 training data.
For a better experience, we encourage you to learn more about
[SpeechBrain](https://speechbrain.github.io). The model performance on Voxceleb1-test set(Cleaned) is:
| Release | EER(%) | minDCF |
|:-------------:|:--------------:|:--------------:|
| 05-03-21 | 0.69 | 0.08258 |
## Pipeline description
This system is composed of an ECAPA-TDNN model. It is a combination of convolutional and residual blocks. The embeddings are extracted using attentive statistical pooling. The system is trained with Additive Margin Softmax Loss. Speaker Verification is performed using cosine distance between speaker embeddings.
## Install SpeechBrain
First of all, please install SpeechBrain with the following command:
```
gh repo clone aheba/speechbrain-aheba-contribs
git checkout pretrain_new
pip install -r requirements.txt
pip install --editable .
```
Please notice that we encourage you to read our tutorials and learn more about
[SpeechBrain](https://speechbrain.github.io).
### Compute your speaker embeddings
```python
import torchaudio
from speechbrain.pretrained import Predictor
classifier = Predictor.import_model(source="aheba31/test-predictor")
signal, fs = torchaudio.load('samples/audio_samples/example1.wav')
embeddings = classifier.encode_batch(signal)
```
### Perform Speaker Verification
```python
from speechbrain.pretrained import SpeakerRecognition
verification = SpeakerRecognition.from_hparams(source="aheba31/test-predictor", savedir="aheba31/test-predictor")
score, prediction = verification.verify_files("speechbrain/spkrec-ecapa-voxceleb/example1.wav", "speechbrain/spkrec-ecapa-voxceleb/example2.flac")
```
The prediction is 1 if the two signals in input are from the same speaker and 0 otherwise.
### Inference on GPU
To perform inference on the GPU, add `run_opts={"device":"cuda"}` when calling the `from_hparams` method.
### Training
The model was trained with SpeechBrain (aa018540).
To train it from scratch follows these steps:
1. Clone SpeechBrain:
```bash
git clone https://github.com/speechbrain/speechbrain/
```
2. Install it:
```
cd speechbrain
pip install -r requirements.txt
pip install -e .
```
3. Run Training:
```
cd recipes/VoxCeleb/SpeakerRec
python train_speaker_embeddings.py hparams/train_ecapa_tdnn.yaml --data_folder=your_data_folder
```
You can find our training results (models, logs, etc) [here](https://drive.google.com/drive/folders/1-ahC1xeyPinAHp2oAohL-02smNWO41Cc?usp=sharing).
### Limitations
The SpeechBrain team does not provide any warranty on the performance achieved by this model when used on other datasets.
#### Referencing ECAPA-TDNN
```
@inproceedings{DBLP:conf/interspeech/DesplanquesTD20,
author = {Brecht Desplanques and
Jenthe Thienpondt and
Kris Demuynck},
editor = {Helen Meng and
Bo Xu and
Thomas Fang Zheng},
title = {{ECAPA-TDNN:} Emphasized Channel Attention, Propagation and Aggregation
in {TDNN} Based Speaker Verification},
booktitle = {Interspeech 2020},
pages = {3830--3834},
publisher = {{ISCA}},
year = {2020},
}
```
# **Citing SpeechBrain**
Please, cite SpeechBrain if you use it for your research or business.
```bibtex
@misc{speechbrain,
title={{SpeechBrain}: A General-Purpose Speech Toolkit},
author={Mirco Ravanelli and Titouan Parcollet and Peter Plantinga and Aku Rouhe and Samuele Cornell and Loren Lugosch and Cem Subakan and Nauman Dawalatabad and Abdelwahab Heba and Jianyuan Zhong and Ju-Chieh Chou and Sung-Lin Yeh and Szu-Wei Fu and Chien-Feng Liao and Elena Rastorgueva and François Grondin and William Aris and Hwidong Na and Yan Gao and Renato De Mori and Yoshua Bengio},
year={2021},
eprint={2106.04624},
archivePrefix={arXiv},
primaryClass={eess.AS},
note={arXiv:2106.04624}
}
```
# **About SpeechBrain**
- Website: https://speechbrain.github.io/
- Code: https://github.com/speechbrain/speechbrain/
- HuggingFace: https://huggingface.co/speechbrain/
|
{"language": "en", "license": "apache-2.0", "tags": ["speechbrain", "embeddings", "Speaker", "Verification", "Identification", "pytorch", "ECAPA", "TDNN"], "datasets": ["voxceleb"], "metrics": ["EER"], "widget": [{"example_title": "VoxCeleb Speaker id10003", "src": "https://cdn-media.huggingface.co/speech_samples/VoxCeleb1_00003.wav"}, {"example_title": "VoxCeleb Speaker id10004", "src": "https://cdn-media.huggingface.co/speech_samples/VoxCeleb_00004.wav"}]}
| null |
aheba31/test-predictor
|
[
"speechbrain",
"embeddings",
"Speaker",
"Verification",
"Identification",
"pytorch",
"ECAPA",
"TDNN",
"en",
"dataset:voxceleb",
"arxiv:2106.04624",
"license:apache-2.0",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[
"2106.04624"
] |
[
"en"
] |
TAGS
#speechbrain #embeddings #Speaker #Verification #Identification #pytorch #ECAPA #TDNN #en #dataset-voxceleb #arxiv-2106.04624 #license-apache-2.0 #region-us
|
Speaker Verification with ECAPA-TDNN embeddings on Voxceleb
===========================================================
This repository provides all the necessary tools to perform speaker verification with a pretrained ECAPA-TDNN model using SpeechBrain.
The system can be used to extract speaker embeddings as well.
It is trained on Voxceleb 1+ Voxceleb2 training data.
For a better experience, we encourage you to learn more about
SpeechBrain. The model performance on Voxceleb1-test set(Cleaned) is:
Pipeline description
--------------------
This system is composed of an ECAPA-TDNN model. It is a combination of convolutional and residual blocks. The embeddings are extracted using attentive statistical pooling. The system is trained with Additive Margin Softmax Loss. Speaker Verification is performed using cosine distance between speaker embeddings.
Install SpeechBrain
-------------------
First of all, please install SpeechBrain with the following command:
Please notice that we encourage you to read our tutorials and learn more about
SpeechBrain.
### Compute your speaker embeddings
### Perform Speaker Verification
The prediction is 1 if the two signals in input are from the same speaker and 0 otherwise.
### Inference on GPU
To perform inference on the GPU, add 'run\_opts={"device":"cuda"}' when calling the 'from\_hparams' method.
### Training
The model was trained with SpeechBrain (aa018540).
To train it from scratch follows these steps:
1. Clone SpeechBrain:
2. Install it:
3. Run Training:
You can find our training results (models, logs, etc) here.
### Limitations
The SpeechBrain team does not provide any warranty on the performance achieved by this model when used on other datasets.
#### Referencing ECAPA-TDNN
Citing SpeechBrain
==================
Please, cite SpeechBrain if you use it for your research or business.
About SpeechBrain
=================
* Website: URL
* Code: URL
* HuggingFace: URL
|
[
"### Compute your speaker embeddings",
"### Perform Speaker Verification\n\n\nThe prediction is 1 if the two signals in input are from the same speaker and 0 otherwise.",
"### Inference on GPU\n\n\nTo perform inference on the GPU, add 'run\\_opts={\"device\":\"cuda\"}' when calling the 'from\\_hparams' method.",
"### Training\n\n\nThe model was trained with SpeechBrain (aa018540).\nTo train it from scratch follows these steps:\n\n\n1. Clone SpeechBrain:\n2. Install it:\n3. Run Training:\n\n\nYou can find our training results (models, logs, etc) here.",
"### Limitations\n\n\nThe SpeechBrain team does not provide any warranty on the performance achieved by this model when used on other datasets.",
"#### Referencing ECAPA-TDNN\n\n\nCiting SpeechBrain\n==================\n\n\nPlease, cite SpeechBrain if you use it for your research or business.\n\n\nAbout SpeechBrain\n=================\n\n\n* Website: URL\n* Code: URL\n* HuggingFace: URL"
] |
[
"TAGS\n#speechbrain #embeddings #Speaker #Verification #Identification #pytorch #ECAPA #TDNN #en #dataset-voxceleb #arxiv-2106.04624 #license-apache-2.0 #region-us \n",
"### Compute your speaker embeddings",
"### Perform Speaker Verification\n\n\nThe prediction is 1 if the two signals in input are from the same speaker and 0 otherwise.",
"### Inference on GPU\n\n\nTo perform inference on the GPU, add 'run\\_opts={\"device\":\"cuda\"}' when calling the 'from\\_hparams' method.",
"### Training\n\n\nThe model was trained with SpeechBrain (aa018540).\nTo train it from scratch follows these steps:\n\n\n1. Clone SpeechBrain:\n2. Install it:\n3. Run Training:\n\n\nYou can find our training results (models, logs, etc) here.",
"### Limitations\n\n\nThe SpeechBrain team does not provide any warranty on the performance achieved by this model when used on other datasets.",
"#### Referencing ECAPA-TDNN\n\n\nCiting SpeechBrain\n==================\n\n\nPlease, cite SpeechBrain if you use it for your research or business.\n\n\nAbout SpeechBrain\n=================\n\n\n* Website: URL\n* Code: URL\n* HuggingFace: URL"
] |
[
62,
10,
27,
48,
61,
31,
55
] |
[
"passage: TAGS\n#speechbrain #embeddings #Speaker #Verification #Identification #pytorch #ECAPA #TDNN #en #dataset-voxceleb #arxiv-2106.04624 #license-apache-2.0 #region-us \n### Compute your speaker embeddings### Perform Speaker Verification\n\n\nThe prediction is 1 if the two signals in input are from the same speaker and 0 otherwise.### Inference on GPU\n\n\nTo perform inference on the GPU, add 'run\\_opts={\"device\":\"cuda\"}' when calling the 'from\\_hparams' method.### Training\n\n\nThe model was trained with SpeechBrain (aa018540).\nTo train it from scratch follows these steps:\n\n\n1. Clone SpeechBrain:\n2. Install it:\n3. Run Training:\n\n\nYou can find our training results (models, logs, etc) here.### Limitations\n\n\nThe SpeechBrain team does not provide any warranty on the performance achieved by this model when used on other datasets.#### Referencing ECAPA-TDNN\n\n\nCiting SpeechBrain\n==================\n\n\nPlease, cite SpeechBrain if you use it for your research or business.\n\n\nAbout SpeechBrain\n=================\n\n\n* Website: URL\n* Code: URL\n* HuggingFace: URL"
] |
[
-0.0901050865650177,
0.23678945004940033,
-0.0033758992794901133,
0.07440940290689468,
0.05189485475420952,
-0.001022808370180428,
0.13064809143543243,
0.10967093706130981,
0.05852459371089935,
0.12423175573348999,
0.06487762928009033,
0.034642063081264496,
0.0868513435125351,
0.11660456657409668,
0.07402313500642776,
-0.13330338895320892,
0.0389433316886425,
-0.051176849752664566,
0.14228439331054688,
0.07367176562547684,
0.04978903383016586,
-0.053184397518634796,
0.06514406204223633,
-0.0033576597925275564,
-0.11294496059417725,
0.002608334645628929,
0.05978129431605339,
-0.03509284183382988,
0.08939553052186966,
0.0571008138358593,
0.028997791931033134,
-0.03776068985462189,
0.06711012125015259,
-0.25121036171913147,
0.013068316504359245,
0.05615628883242607,
0.02481374703347683,
0.05463742837309837,
0.05111049488186836,
-0.02910083346068859,
0.15513142943382263,
0.1488223671913147,
0.04831841588020325,
0.1341095268726349,
-0.10286926478147507,
-0.04419008642435074,
-0.10979627817869186,
0.12423016130924225,
0.03818559646606445,
0.09904071688652039,
-0.05949944257736206,
0.1286548525094986,
-0.03724490851163864,
0.07236240804195404,
0.13667629659175873,
-0.22382840514183044,
0.008880230598151684,
-0.009086555801331997,
0.0652332603931427,
-0.013653907924890518,
-0.09500464797019958,
0.012625367380678654,
0.05939389392733574,
0.018642382696270943,
-0.009688897989690304,
-0.00639771344140172,
0.01569654420018196,
-0.02882426790893078,
-0.08749272674322128,
-0.10993985831737518,
0.1545172929763794,
0.07694468647241592,
-0.12050259113311768,
-0.09609783440828323,
-0.017954332754015923,
-0.0019031877163797617,
0.047824595123529434,
0.02685869112610817,
0.0415043942630291,
-0.0047878846526145935,
-0.00492085749283433,
-0.05799149349331856,
-0.07509192824363708,
-0.11408942192792892,
0.014738209545612335,
0.0918230190873146,
0.04312815144658089,
0.042220618575811386,
-0.009082605130970478,
0.10935816913843155,
-0.06850456446409225,
-0.059267349541187286,
-0.047889433801174164,
-0.023141229525208473,
-0.06573706865310669,
0.018686506897211075,
0.024181891232728958,
-0.011366227641701698,
0.03220048174262047,
0.19180668890476227,
-0.01337249856442213,
0.030276289209723473,
-0.03082701563835144,
0.0141760827973485,
-0.022579818964004517,
0.13576938211917877,
-0.07404769212007523,
-0.01904434710741043,
0.02389417216181755,
0.06275947391986847,
0.029055939987301826,
-0.01502961851656437,
-0.02414102852344513,
0.09893936663866043,
-0.02850463055074215,
0.04668576270341873,
0.052180878818035126,
-0.0023250572849065065,
-0.12221252173185349,
0.031589727848768234,
0.11443832516670227,
-0.12046884000301361,
0.024159224703907967,
0.06435318291187286,
-0.06650315970182419,
-0.108573779463768,
0.14008979499340057,
0.03022555634379387,
-0.10525877773761749,
-0.04203084111213684,
-0.08319701999425888,
-0.007508795242756605,
-0.047660186886787415,
-0.058734551072120667,
0.049554210156202316,
-0.002876602578908205,
-0.03764265403151512,
-0.11414281278848648,
-0.10339735448360443,
-0.06266023218631744,
0.019414426758885384,
-0.05433652922511101,
-0.02626938745379448,
-0.09732277691364288,
-0.03256071358919144,
0.0033386864233762026,
-0.031876128166913986,
-0.060047563165426254,
-0.03583136200904846,
0.04433733597397804,
0.028464382514357567,
0.04035178944468498,
0.015899987891316414,
0.025730684399604797,
-0.07033953815698624,
-0.006975868251174688,
-0.07575909048318863,
0.08773505687713623,
-0.04290148988366127,
0.034927401691675186,
-0.07233516126871109,
0.0021157122682780027,
-0.04506866633892059,
0.041844360530376434,
0.10074375569820404,
0.16590391099452972,
-0.11581803858280182,
-0.06235881894826889,
0.22431136667728424,
-0.05615680664777756,
-0.10466863960027695,
0.15053129196166992,
-0.029277954250574112,
0.12796097993850708,
0.09708794206380844,
0.23199427127838135,
-0.04993651434779167,
-0.216505229473114,
-0.10670482367277145,
-0.09714069962501526,
-0.03828864172101021,
0.029534509405493736,
0.0301398616284132,
-0.1732623279094696,
0.11054966598749161,
0.024715183302760124,
0.03855520486831665,
-0.019021177664399147,
-0.011374205350875854,
-0.06012116000056267,
-0.000651037145871669,
-0.06211558356881142,
-0.030756881460547447,
-0.02343890815973282,
-0.029229482635855675,
-0.04025229811668396,
-0.16039793193340302,
0.003213428659364581,
0.08195798844099045,
-0.08806519210338593,
0.06894669681787491,
-0.08098409324884415,
0.06696883589029312,
-0.05185375735163689,
0.007875527255237103,
-0.11326046288013458,
0.011997696943581104,
0.06496280431747437,
-0.14409507811069489,
0.13971182703971863,
-0.10190600156784058,
0.01784970425069332,
0.026040514931082726,
0.00471560750156641,
-0.02579692006111145,
-0.06288285553455353,
-0.003809808986261487,
-0.0523810014128685,
-0.122709259390831,
0.007073086220771074,
-0.024809805676341057,
0.13768334686756134,
-0.18499301373958588,
-0.005082341842353344,
0.00016327384219039232,
0.1336338222026825,
0.025310218334197998,
-0.11294449120759964,
0.03959566727280617,
0.002063443884253502,
-0.010988340713083744,
-0.033154912292957306,
0.005056518595665693,
0.024760186672210693,
-0.045235246419906616,
0.10890085995197296,
-0.19719058275222778,
-0.1373128741979599,
0.08887122571468353,
0.009155564941465855,
-0.052449263632297516,
0.10658075660467148,
-0.05530719459056854,
-0.049557220190763474,
-0.03847527131438255,
-0.12234264612197876,
0.19103719294071198,
0.022625688463449478,
0.12102853506803513,
-0.092717744410038,
-0.03910958021879196,
0.012798813171684742,
-0.07799002528190613,
-0.002530617406591773,
0.07921265065670013,
0.026419438421726227,
0.0809112936258316,
0.037193506956100464,
-0.059501077979803085,
-0.04320148006081581,
0.16701045632362366,
0.022213855758309364,
-0.09763041883707047,
-0.07050110399723053,
0.021802419796586037,
0.017704950645565987,
0.16446924209594727,
-0.08153600245714188,
-0.019352054223418236,
0.03139872848987579,
0.0030605236534029245,
0.04314026981592178,
-0.1437121033668518,
0.05723470076918602,
-0.0001291459338972345,
-0.09044240415096283,
-0.12840792536735535,
-0.010901362635195255,
-0.0858849436044693,
0.06878965348005295,
-0.014308391138911247,
0.001253114314749837,
-0.01927572302520275,
-0.04515666514635086,
-0.14807723462581635,
0.11455987393856049,
-0.08559677004814148,
-0.17850784957408905,
-0.21929430961608887,
0.02978583052754402,
-0.058280982077121735,
0.03248022869229317,
0.055931054055690765,
-0.05979473888874054,
-0.08585229516029358,
-0.11315195262432098,
-0.02086760476231575,
0.030418701469898224,
-0.08739195019006729,
-0.07489475607872009,
0.011032873764634132,
0.06812766194343567,
-0.1368512660264969,
-0.017040226608514786,
-0.027566328644752502,
-0.000843257294036448,
-0.02137017995119095,
0.030638152733445168,
0.06369804590940475,
0.129318967461586,
0.06753146648406982,
0.011945896781980991,
-0.040605578571558,
0.12234031409025192,
-0.10559132695198059,
0.022221144288778305,
0.061446305364370346,
0.0039944578893482685,
0.042415034025907516,
0.15186403691768646,
0.023106591776013374,
-0.08499301224946976,
0.042544905096292496,
0.028166862204670906,
-0.03767256438732147,
-0.26450830698013306,
-0.028527159243822098,
-0.050630245357751846,
0.02934260293841362,
0.04025080427527428,
0.048738911747932434,
0.033173296600580215,
-0.04937343671917915,
-0.005909026600420475,
-0.02755540795624256,
0.03383738175034523,
0.05991104617714882,
0.03345175087451935,
-0.01807606965303421,
0.04924483224749565,
-0.07999066263437271,
0.03140861541032791,
0.05884188786149025,
0.08186312764883041,
0.16945935785770416,
-0.0016661975532770157,
0.2475930005311966,
0.10883317142724991,
0.07737161964178085,
-0.0409492626786232,
0.06733245402574539,
0.039270494133234024,
0.0442609004676342,
0.030261393636465073,
-0.11045396327972412,
-0.06875718384981155,
0.09630632400512695,
0.09580358862876892,
0.008698084391653538,
-0.03471355140209198,
-0.052764855325222015,
0.01261006761342287,
0.2060910165309906,
0.13413865864276886,
-0.225356325507164,
-0.06914067268371582,
0.021567638963460922,
-0.054601579904556274,
-0.04928472265601158,
0.01985897682607174,
0.07377221435308456,
-0.11275503039360046,
-0.007871033623814583,
0.05352158471941948,
0.09361684322357178,
-0.03207550570368767,
-0.030505426228046417,
-0.06735049933195114,
0.06743355095386505,
-0.018936173990368843,
0.044363293796777725,
-0.19543562829494476,
0.1944282054901123,
0.005540261510759592,
0.14543697237968445,
0.018914464861154556,
0.05967910960316658,
0.023177260532975197,
-0.09336970746517181,
0.18004749715328217,
0.001693621394224465,
0.1003912016749382,
-0.10651423782110214,
-0.1283821016550064,
0.011167488060891628,
0.024874946102499962,
-0.0022093974985182285,
0.06955909729003906,
0.01206880621612072,
-0.016252119094133377,
0.003887427970767021,
-0.07707826793193817,
-0.2211863249540329,
-0.08789942413568497,
0.020241739228367805,
0.03694611415266991,
0.046346452087163925,
-0.03285928815603256,
-0.04861346259713173,
-0.03238038346171379,
0.09582316875457764,
-0.13649636507034302,
-0.057469893246889114,
-0.054385289549827576,
-0.0891939252614975,
0.09044311940670013,
-0.06280267983675003,
0.03379080072045326,
-0.033413853496313095,
0.05911470949649811,
-0.015000741928815842,
0.01474813837558031,
0.06799852848052979,
-0.06385329365730286,
-0.10545193403959274,
-0.06309016048908234,
0.1689864844083786,
0.05591452121734619,
0.06309207528829575,
0.00407590065151453,
0.05171586945652962,
-0.08078765869140625,
-0.08212345093488693,
0.02397972159087658,
0.12864074110984802,
-0.00910763069987297,
0.079953633248806,
-0.05028734728693962,
-0.06383770704269409,
-0.10961627215147018,
-0.051528628915548325,
0.06728698313236237,
0.28449198603630066,
-0.06646953523159027,
0.13872230052947998,
0.20170089602470398,
-0.13077501952648163,
-0.19941380620002747,
-0.10033311694860458,
0.028549151495099068,
0.05664825439453125,
-0.032634299248456955,
-0.059894710779190063,
0.031139593571424484,
-0.007371856365352869,
-0.04924385994672775,
-0.0031951467972248793,
-0.12229034304618835,
-0.16185705363750458,
0.09657260775566101,
0.033773891627788544,
-0.09442364424467087,
-0.08735980838537216,
-0.07064514607191086,
-0.05017323046922684,
-0.12025614082813263,
0.04837040230631828,
-0.06442596018314362,
0.09557238966226578,
0.04926421865820885,
0.06640248000621796,
0.0003578859905246645,
-0.06720850616693497,
0.07580477744340897,
0.04270055517554283,
-0.021218230947852135,
-0.026695189997553825,
0.04088872671127319,
0.08802574872970581,
-0.06942244619131088,
0.2085782140493393,
-0.0027826472651213408,
0.027606308460235596,
-0.14983047544956207,
-0.025046348571777344,
-0.08028166741132736,
0.05548996105790138,
-0.06919502466917038,
-0.01109575666487217,
-0.0027442295104265213,
-0.011900934390723705,
0.0434538759291172,
0.025526463985443115,
-0.0866834744811058,
-0.11726050078868866,
0.051534730941057205,
0.26112639904022217,
0.13000845909118652,
-0.05278374254703522,
-0.109107606112957,
0.014328718185424805,
-0.08869926631450653,
0.033816441893577576,
-0.16369082033634186,
0.017386458814144135,
0.08058594167232513,
0.024033166468143463,
0.15336520969867706,
-0.0040200259536504745,
-0.18074476718902588,
0.034879326820373535,
0.05775890126824379,
-0.0996406227350235,
-0.11194300651550293,
0.018390782177448273,
-0.0970689058303833,
-0.09997190535068512,
0.00756344199180603,
0.13634446263313293,
-0.08966036885976791,
0.0022542066872119904,
0.004674871917814016,
0.06386257708072662,
-0.10676275193691254,
0.15699897706508636,
0.08972734957933426,
0.05103399232029915,
-0.06765694171190262,
0.1241951733827591,
0.055210307240486145,
-0.05436556413769722,
0.08408478647470474,
0.01609550602734089,
-0.05282137170433998,
-0.035178687423467636,
-0.14659970998764038,
0.08328457921743393,
-0.016068561002612114,
-0.07901082187891006,
-0.1539650857448578,
-0.0059332335367798805,
0.0010798848234117031,
0.04600756987929344,
-0.0006650918512605131,
0.0874018445611,
-0.02591354213654995,
0.03192354366183281,
-0.08181352913379669,
0.10351936519145966,
0.08582165837287903,
-0.0015050711808726192,
-0.07049092650413513,
0.1208878830075264,
0.07420801371335983,
0.010529609397053719,
-0.004442911129444838,
-0.05171576142311096,
-0.10687829554080963,
0.054758816957473755,
-0.03231869637966156,
0.009361273609101772,
-0.01305233221501112,
-0.0074134450405836105,
0.003579159965738654,
-0.027458392083644867,
-0.0014094251673668623,
0.07083526998758316,
-0.0632084533572197,
-0.05450110509991646,
0.014117402955889702,
0.1137944832444191,
-0.18446595966815948,
0.02186831645667553,
0.04795306921005249,
-0.060987167060375214,
0.09684287011623383,
0.061545196920633316,
-0.06302978098392487,
0.02813144214451313,
-0.09223803132772446,
0.035488393157720566,
-0.025190217420458794,
-0.0060675800777971745,
-0.0018486016197130084,
-0.113042451441288,
-0.030712297186255455,
0.010444838553667068,
0.02282717265188694,
-0.02394705079495907,
0.06170632317662239,
-0.08098950982093811,
-0.0008821836090646684,
0.03616464510560036,
-0.03453278914093971,
-0.04034252092242241,
0.05730365589261055,
-0.05177467316389084,
0.07586277276277542,
0.0828104317188263,
-0.06518024951219559,
0.0052397595718503,
-0.15209373831748962,
0.005666630808264017,
0.01575273834168911,
0.02721290849149227,
-0.07361383736133575,
-0.009136300534009933,
0.07348532229661942,
-0.014591997489333153,
0.13088876008987427,
-0.034471094608306885,
0.0038304352201521397,
0.03630360588431358,
0.027348700910806656,
-0.03394352272152901,
0.013785396702587605,
0.03874325379729271,
-0.013023093342781067,
-0.009031844325363636,
-0.012796962633728981,
-0.04425669088959694,
0.005952537525445223,
-0.08661044389009476,
0.05888887122273445,
0.1182752177119255,
0.11355317384004593,
0.07880502939224243,
0.12524588406085968,
-0.017804434522986412,
-0.08031222224235535,
-0.043185118585824966,
-0.07984112203121185,
0.056546375155448914,
-0.08825630694627762,
0.16627037525177002,
0.13583476841449738,
-0.09124071896076202,
0.0621151439845562,
-0.0031006925273686647,
-0.10090399533510208,
-0.1050601527094841,
-0.11427214741706848,
-0.036990124732255936,
-0.04365917667746544,
0.020648637786507607,
-0.06898090988397598,
0.0504893884062767,
0.08966174721717834,
0.003802882507443428,
-0.025485748425126076,
0.1367044448852539,
-0.037118151783943176,
-0.056567173451185226,
0.022801248356699944,
-0.02965068817138672,
-0.021716473624110222,
0.006036132574081421,
0.030435023829340935,
0.035210173577070236,
0.06492195278406143,
0.10403987765312195,
0.06471969187259674,
-0.007633192930370569,
0.06372205913066864,
-0.011991575360298157,
-0.04601573199033737,
0.016862109303474426,
0.04218851774930954,
0.010426902212202549,
0.09027030318975449,
0.09436497837305069,
-0.02869628369808197,
0.021999968215823174,
0.13809321820735931,
-0.09693168848752975,
-0.05741097405552864,
-0.16446442902088165,
0.22561921179294586,
-0.019073104485869408,
0.013896817341446877,
-0.05141708627343178,
-0.1276705265045166,
-0.01243860274553299,
0.19775013625621796,
0.1599130928516388,
-0.03828717768192291,
-0.02763398550450802,
-0.006289368961006403,
0.008543680422008038,
-0.017604434862732887,
0.09791833907365799,
0.01714951917529106,
0.22129014134407043,
-0.0021355869248509407,
0.09622859209775925,
0.010057849809527397,
-0.05347210541367531,
-0.04618576169013977,
0.06310166418552399,
-0.09092532843351364,
-0.03201736882328987,
-0.06719596683979034,
0.051476653665304184,
-0.06588802486658096,
-0.189300999045372,
-0.10244812816381454,
-0.09392043948173523,
-0.06057017669081688,
0.014193447306752205,
0.025292260572314262,
0.019842635840177536,
0.04994058609008789,
0.0016334885731339455,
0.0006375855882652104,
0.2609764039516449,
-0.02824961021542549,
-0.0857243537902832,
-0.06726349145174026,
-0.017661510035395622,
-0.1494213193655014,
0.23634260892868042,
0.0022899508476257324,
0.11212678253650665,
0.07688693702220917,
0.0310846958309412,
-0.06880400329828262,
0.12286670506000519,
0.024515196681022644,
-0.11632579565048218,
-0.0019965143874287605,
0.140182226896286,
0.004521803930401802,
0.04265682399272919,
0.0010964858811348677,
0.013677822425961494,
0.05540589615702629,
-0.06723837554454803,
-0.015986625105142593,
-0.14134177565574646,
0.03904089331626892,
-0.08174574375152588,
0.11564446985721588,
0.09866280853748322,
-0.0777040645480156,
-0.007664526347070932,
-0.06190996617078781,
0.009828589856624603,
0.038182370364665985,
0.020176801830530167,
0.017840541899204254,
-0.21346630156040192,
0.09457509964704514,
-0.05852163955569267,
-0.00860991608351469,
-0.3548681437969208,
-0.04027760401368141,
-0.013558443635702133,
-0.014914893545210361,
0.0026096426881849766,
0.10726673156023026,
0.05442793667316437,
0.05165537819266319,
-0.04905314743518829,
-0.10405955463647842,
0.010164335370063782,
0.14041630923748016,
-0.08233869820833206,
-0.12992967665195465
] |
null | null |
transformers
|
<iframe src="https://ghbtns.com/github-btn.html?user=speechbrain&repo=speechbrain&type=star&count=true&size=large&v=2" frameborder="0" scrolling="0" width="170" height="30" title="GitHub"></iframe>
<br/><br/>
# Speaker Verification with ECAPA-TDNN embeddings on Zaion
This repository provides all the necessary tools to perform speaker verification with a pretrained ECAPA-TDNN model using SpeechBrain.
The system can be used to extract speaker embeddings as well.
It is trained on Voxceleb 1+ Voxceleb2 training data.
For a better experience, we encourage you to learn more about
[SpeechBrain](https://speechbrain.github.io). The model performance on Voxceleb1-test set(Cleaned) is:
## Pipeline description
This system is composed of an ECAPA-TDNN model. It is a combination of convolutional and residual blocks. The embeddings are extracted using attentive statistical pooling. The system is trained with Additive Margin Softmax Loss. Speaker Verification is performed using cosine distance between speaker embeddings.
## Install SpeechBrain
First of all, please install SpeechBrain with the following command:
```
gh repo clone aheba/speechbrain-aheba-contribs
git checkout pretrain_new
pip install -r requirements.txt
pip install --editable .
```
Please notice that we encourage you to read our tutorials and learn more about
[SpeechBrain](https://speechbrain.github.io).
### Compute your speaker embeddings
```python
import torch audio
import torch
from speechbrain.pretrained import Pretrained
classifier = Pretrained.import_model(source="aheba31/test-predictor", pymodule_file="inference.py" ,class_name="EncoderClassifier")
print(classifier.classify_file("/workspace/contributions/test/spkrec-ecapa-voxceleb/example1.wav"))
```
### Inference on GPU
To perform inference on the GPU, add `run_opts={"device":"cuda"}` when calling the `from_hparams` method.
### Training
The model was trained with SpeechBrain (aa018540).
To train it from scratch follows these steps:
1. Clone SpeechBrain:
```bash
git clone https://github.com/speechbrain/speechbrain/
```
2. Install it:
```
cd speechbrain
pip install -r requirements.txt
pip install -e .
```
3. Run Training:
```
cd recipes/VoxCeleb/SpeakerRec
python train_speaker_embeddings.py hparams/train_ecapa_tdnn.yaml --data_folder=your_data_folder
```
You can find our training results (models, logs, etc) [here](https://drive.google.com/drive/folders/1-ahC1xeyPinAHp2oAohL-02smNWO41Cc?usp=sharing).
### Limitations
The SpeechBrain team does not provide any warranty on the performance achieved by this model when used on other datasets.
#### Referencing ECAPA-TDNN
```
@inproceedings{DBLP:conf/interspeech/DesplanquesTD20,
author = {Brecht Desplanques and
Jenthe Thienpondt and
Kris Demuynck},
editor = {Helen Meng and
Bo Xu and
Thomas Fang Zheng},
title = {{ECAPA-TDNN:} Emphasized Channel Attention, Propagation and Aggregation
in {TDNN} Based Speaker Verification},
booktitle = {Interspeech 2020},
pages = {3830--3834},
publisher = {{ISCA}},
year = {2020},
}
```
# **Citing SpeechBrain**
Please, cite SpeechBrain if you use it for your research or business.
```bibtex
@misc{speechbrain,
title={{SpeechBrain}: A General-Purpose Speech Toolkit},
author={Mirco Ravanelli and Titouan Parcollet and Peter Plantinga and Aku Rouhe and Samuele Cornell and Loren Lugosch and Cem Subakan and Nauman Dawalatabad and Abdelwahab Heba and Jianyuan Zhong and Ju-Chieh Chou and Sung-Lin Yeh and Szu-Wei Fu and Chien-Feng Liao and Elena Rastorgueva and François Grondin and William Aris and Hwidong Na and Yan Gao and Renato De Mori and Yoshua Bengio},
year={2021},
eprint={2106.04624},
archivePrefix={arXiv},
primaryClass={eess.AS},
note={arXiv:2106.04624}
}
```
# **About SpeechBrain**
- Website: https://speechbrain.github.io/
- Code: https://github.com/speechbrain/speechbrain/
- HuggingFace: https://huggingface.co/speechbrain/
|
{"language": "en", "license": "apache-2.0", "tags": ["speechbrain", "embeddings", "Speaker", "Verification", "Identification", "pytorch", "ECAPA", "TDNN"], "datasets": ["Zaion corpus"], "metrics": ["EER"], "widget": [{"example_title": "VoxCeleb Speaker id10003", "src": "https://cdn-media.huggingface.co/speech_samples/VoxCeleb1_00003.wav"}, {"example_title": "VoxCeleb Speaker id10004", "src": "https://cdn-media.huggingface.co/speech_samples/VoxCeleb_00004.wav"}]}
| null |
aheba31/zaion-speaker-ident
|
[
"transformers",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #endpoints_compatible #region-us
|
<iframe src="URL frameborder="0" scrolling="0" width="170" height="30" title="GitHub"></iframe>
<br/><br/>
# Speaker Verification with ECAPA-TDNN embeddings on Zaion
This repository provides all the necessary tools to perform speaker verification with a pretrained ECAPA-TDNN model using SpeechBrain.
The system can be used to extract speaker embeddings as well.
It is trained on Voxceleb 1+ Voxceleb2 training data.
For a better experience, we encourage you to learn more about
SpeechBrain. The model performance on Voxceleb1-test set(Cleaned) is:
## Pipeline description
This system is composed of an ECAPA-TDNN model. It is a combination of convolutional and residual blocks. The embeddings are extracted using attentive statistical pooling. The system is trained with Additive Margin Softmax Loss. Speaker Verification is performed using cosine distance between speaker embeddings.
## Install SpeechBrain
First of all, please install SpeechBrain with the following command:
Please notice that we encourage you to read our tutorials and learn more about
SpeechBrain.
### Compute your speaker embeddings
### Inference on GPU
To perform inference on the GPU, add 'run_opts={"device":"cuda"}' when calling the 'from_hparams' method.
### Training
The model was trained with SpeechBrain (aa018540).
To train it from scratch follows these steps:
1. Clone SpeechBrain:
2. Install it:
3. Run Training:
You can find our training results (models, logs, etc) here.
### Limitations
The SpeechBrain team does not provide any warranty on the performance achieved by this model when used on other datasets.
#### Referencing ECAPA-TDNN
# Citing SpeechBrain
Please, cite SpeechBrain if you use it for your research or business.
# About SpeechBrain
- Website: URL
- Code: URL
- HuggingFace: URL
|
[
"# Speaker Verification with ECAPA-TDNN embeddings on Zaion\n\nThis repository provides all the necessary tools to perform speaker verification with a pretrained ECAPA-TDNN model using SpeechBrain. \nThe system can be used to extract speaker embeddings as well. \nIt is trained on Voxceleb 1+ Voxceleb2 training data. \n\nFor a better experience, we encourage you to learn more about\nSpeechBrain. The model performance on Voxceleb1-test set(Cleaned) is:",
"## Pipeline description\n\nThis system is composed of an ECAPA-TDNN model. It is a combination of convolutional and residual blocks. The embeddings are extracted using attentive statistical pooling. The system is trained with Additive Margin Softmax Loss. Speaker Verification is performed using cosine distance between speaker embeddings.",
"## Install SpeechBrain\n\nFirst of all, please install SpeechBrain with the following command:\n\n\n\nPlease notice that we encourage you to read our tutorials and learn more about\nSpeechBrain.",
"### Compute your speaker embeddings",
"### Inference on GPU\nTo perform inference on the GPU, add 'run_opts={\"device\":\"cuda\"}' when calling the 'from_hparams' method.",
"### Training\nThe model was trained with SpeechBrain (aa018540).\nTo train it from scratch follows these steps:\n1. Clone SpeechBrain:\n\n2. Install it:\n\n\n3. Run Training:\n\n\nYou can find our training results (models, logs, etc) here.",
"### Limitations\nThe SpeechBrain team does not provide any warranty on the performance achieved by this model when used on other datasets.",
"#### Referencing ECAPA-TDNN",
"# Citing SpeechBrain\nPlease, cite SpeechBrain if you use it for your research or business.",
"# About SpeechBrain\n- Website: URL\n- Code: URL\n- HuggingFace: URL"
] |
[
"TAGS\n#transformers #endpoints_compatible #region-us \n",
"# Speaker Verification with ECAPA-TDNN embeddings on Zaion\n\nThis repository provides all the necessary tools to perform speaker verification with a pretrained ECAPA-TDNN model using SpeechBrain. \nThe system can be used to extract speaker embeddings as well. \nIt is trained on Voxceleb 1+ Voxceleb2 training data. \n\nFor a better experience, we encourage you to learn more about\nSpeechBrain. The model performance on Voxceleb1-test set(Cleaned) is:",
"## Pipeline description\n\nThis system is composed of an ECAPA-TDNN model. It is a combination of convolutional and residual blocks. The embeddings are extracted using attentive statistical pooling. The system is trained with Additive Margin Softmax Loss. Speaker Verification is performed using cosine distance between speaker embeddings.",
"## Install SpeechBrain\n\nFirst of all, please install SpeechBrain with the following command:\n\n\n\nPlease notice that we encourage you to read our tutorials and learn more about\nSpeechBrain.",
"### Compute your speaker embeddings",
"### Inference on GPU\nTo perform inference on the GPU, add 'run_opts={\"device\":\"cuda\"}' when calling the 'from_hparams' method.",
"### Training\nThe model was trained with SpeechBrain (aa018540).\nTo train it from scratch follows these steps:\n1. Clone SpeechBrain:\n\n2. Install it:\n\n\n3. Run Training:\n\n\nYou can find our training results (models, logs, etc) here.",
"### Limitations\nThe SpeechBrain team does not provide any warranty on the performance achieved by this model when used on other datasets.",
"#### Referencing ECAPA-TDNN",
"# Citing SpeechBrain\nPlease, cite SpeechBrain if you use it for your research or business.",
"# About SpeechBrain\n- Website: URL\n- Code: URL\n- HuggingFace: URL"
] |
[
17,
115,
81,
38,
10,
46,
61,
31,
10,
22,
20
] |
[
"passage: TAGS\n#transformers #endpoints_compatible #region-us \n# Speaker Verification with ECAPA-TDNN embeddings on Zaion\n\nThis repository provides all the necessary tools to perform speaker verification with a pretrained ECAPA-TDNN model using SpeechBrain. \nThe system can be used to extract speaker embeddings as well. \nIt is trained on Voxceleb 1+ Voxceleb2 training data. \n\nFor a better experience, we encourage you to learn more about\nSpeechBrain. The model performance on Voxceleb1-test set(Cleaned) is:## Pipeline description\n\nThis system is composed of an ECAPA-TDNN model. It is a combination of convolutional and residual blocks. The embeddings are extracted using attentive statistical pooling. The system is trained with Additive Margin Softmax Loss. Speaker Verification is performed using cosine distance between speaker embeddings.## Install SpeechBrain\n\nFirst of all, please install SpeechBrain with the following command:\n\n\n\nPlease notice that we encourage you to read our tutorials and learn more about\nSpeechBrain.### Compute your speaker embeddings### Inference on GPU\nTo perform inference on the GPU, add 'run_opts={\"device\":\"cuda\"}' when calling the 'from_hparams' method.### Training\nThe model was trained with SpeechBrain (aa018540).\nTo train it from scratch follows these steps:\n1. Clone SpeechBrain:\n\n2. Install it:\n\n\n3. Run Training:\n\n\nYou can find our training results (models, logs, etc) here.### Limitations\nThe SpeechBrain team does not provide any warranty on the performance achieved by this model when used on other datasets.#### Referencing ECAPA-TDNN# Citing SpeechBrain\nPlease, cite SpeechBrain if you use it for your research or business.# About SpeechBrain\n- Website: URL\n- Code: URL\n- HuggingFace: URL"
] |
[
-0.058879826217889786,
0.12495332956314087,
-0.004507667385041714,
0.04448652267456055,
0.09033125638961792,
0.014196711592376232,
0.011300378479063511,
0.0826578438282013,
0.05859934166073799,
0.08821789920330048,
0.026424894109368324,
0.010212610475718975,
0.0954068973660469,
0.05162704735994339,
0.11484463512897491,
-0.10935817658901215,
0.025877585634589195,
-0.055531516671180725,
0.130276158452034,
0.04862048849463463,
0.07467516511678696,
-0.06499055027961731,
0.06476438045501709,
-0.020559143275022507,
-0.11605983972549438,
-0.001605896046385169,
-0.02107327990233898,
-0.03371646627783775,
0.09551932662725449,
-0.0055946907959878445,
0.017102569341659546,
-0.026258716359734535,
0.07342617213726044,
-0.2154073864221573,
0.01566687971353531,
0.07451053708791733,
0.007240565028041601,
0.053027860820293427,
0.05077039822936058,
-0.005241958424448967,
0.11081909388303757,
0.007616260554641485,
0.07158008962869644,
0.09474610537290573,
-0.09897788614034653,
-0.019289109855890274,
-0.10522539168596268,
0.06287740916013718,
0.113301120698452,
0.06528151035308838,
-0.04052790626883507,
0.06238174811005592,
-0.005946462973952293,
0.06351497769355774,
0.07233124226331711,
-0.1856049746274948,
0.007010551635175943,
-0.01692270301282406,
0.10698016732931137,
0.008697612211108208,
-0.10165824741125107,
0.012344644404947758,
0.009542037732899189,
0.04715657979249954,
0.03539936617016792,
-0.034874942153692245,
0.019942138344049454,
-0.09664970636367798,
-0.12709768116474152,
-0.018711039796471596,
0.14758481085300446,
-0.003141667926684022,
-0.08777535706758499,
-0.1259801685810089,
-0.0033143956679850817,
-0.10210905224084854,
0.0203108973801136,
0.0019944587256759405,
0.0223368089646101,
0.015584863722324371,
0.09147796779870987,
-0.12861894071102142,
-0.08358515053987503,
-0.07121127098798752,
-0.03796296566724777,
0.16077175736427307,
0.027904408052563667,
0.003945047967135906,
0.08723820745944977,
0.11915697902441025,
-0.03829840198159218,
-0.023612888529896736,
-0.07209512591362,
-0.005386977456510067,
-0.1302158385515213,
-0.06000474467873573,
0.008062098175287247,
-0.08142479509115219,
-0.003383152186870575,
0.21042916178703308,
-0.03358316421508789,
0.043919697403907776,
-0.024765435606241226,
0.01673729531466961,
0.02321077324450016,
0.09592790901660919,
-0.023624079301953316,
-0.041212569922208786,
0.040727730840444565,
-0.06384826451539993,
0.03759279474616051,
-0.014898495748639107,
-0.04338698461651802,
0.012799475342035294,
-0.024031654000282288,
0.07355998456478119,
0.04060641676187515,
0.01692388392984867,
-0.061370015144348145,
-0.014229629188776016,
0.13464845716953278,
-0.1206543892621994,
0.06610015779733658,
0.0643124133348465,
-0.007674854248762131,
-0.024805009365081787,
0.10536972433328629,
0.025763025507330894,
-0.1551361382007599,
-0.015122600831091404,
-0.056834880262613297,
0.01620096154510975,
-0.037179723381996155,
-0.026140833273530006,
0.05993935465812683,
-0.05561069771647453,
-0.07603184133768082,
-0.11918280273675919,
-0.05279446765780449,
-0.05373968556523323,
-0.028675446286797523,
-0.04358924925327301,
0.018478043377399445,
-0.060313522815704346,
0.03509139269590378,
0.0152592109516263,
-0.0013599470257759094,
0.009820373728871346,
-0.01705224998295307,
-0.014005128294229507,
0.019881678745150566,
0.036422498524188995,
0.005830045789480209,
0.021838655695319176,
-0.037215325981378555,
0.016686413437128067,
-0.10413569957017899,
0.12410683929920197,
-0.03470654785633087,
0.04428083822131157,
-0.11623638868331909,
0.04173171892762184,
-0.06316976994276047,
0.01830330491065979,
0.09032910317182541,
0.18634968996047974,
-0.11934822052717209,
0.0010570335434749722,
0.16623777151107788,
-0.11276870220899582,
-0.040432609617710114,
0.08729364722967148,
0.0020995947998017073,
0.0892048329114914,
0.0754815861582756,
0.09039183706045151,
-0.05230935662984848,
-0.19130761921405792,
-0.1052042692899704,
-0.06981611996889114,
-0.06582950800657272,
0.02785559371113777,
0.05773913115262985,
-0.12506863474845886,
0.11976078152656555,
0.025630807504057884,
0.03677913919091225,
0.026237256824970245,
0.02499302104115486,
-0.014386193826794624,
-0.01983669400215149,
-0.07775391638278961,
-0.0248024370521307,
-0.03676914423704147,
-0.030846353620290756,
0.018402235582470894,
-0.13067726790905,
-0.008379318751394749,
0.11301960796117783,
-0.08749133348464966,
0.07516594976186752,
-0.09883788228034973,
0.05291230231523514,
-0.07986923307180405,
0.023025495931506157,
-0.09259026497602463,
-0.00798751600086689,
0.0776018500328064,
-0.015160405077040195,
0.15190427005290985,
-0.17129452526569366,
0.02852475270628929,
0.04551292955875397,
-0.0345655120909214,
-0.013795378617942333,
0.03442873805761337,
0.03798387572169304,
0.00724486168473959,
-0.1127072274684906,
-0.03401346877217293,
-0.04698776826262474,
0.17324040830135345,
-0.15912437438964844,
-0.0009435615502297878,
0.09407907724380493,
0.16221673786640167,
-0.0025404717307537794,
-0.09718763828277588,
0.021710645407438278,
-0.009990434162318707,
-0.010123358108103275,
-0.058896005153656006,
0.0019687185995280743,
0.02507486194372177,
-0.03702874481678009,
0.07120835781097412,
-0.1869865506887436,
-0.2589934468269348,
0.07911082357168198,
-0.005469517782330513,
-0.11431426554918289,
-0.032630693167448044,
-0.002071276307106018,
-0.07684124261140823,
-0.03907794505357742,
-0.1219511553645134,
0.25430047512054443,
0.05672024190425873,
0.07763306051492691,
-0.025588441640138626,
-0.05339676886796951,
0.004227073397487402,
-0.010164938867092133,
-0.016474299132823944,
0.010436582379043102,
0.11190193146467209,
-0.0618390217423439,
0.01595786027610302,
-0.06370805203914642,
-0.028556887060403824,
0.04685978218913078,
0.0361047126352787,
-0.10762377828359604,
-0.06801807880401611,
0.02175234630703926,
0.03263970464468002,
0.11371120810508728,
-0.10135313868522644,
-0.04863433167338371,
-0.008520171977579594,
-0.04379232972860336,
0.04501694068312645,
-0.06578051298856735,
0.09420350193977356,
-0.01900128461420536,
-0.010580976493656635,
0.019079307094216347,
-0.05944804102182388,
-0.06664710491895676,
0.046706411987543106,
-0.028764713555574417,
0.04084686562418938,
-0.015080510638654232,
-0.024823077023029327,
-0.1526821106672287,
0.0928032174706459,
-0.12294107675552368,
-0.23730935156345367,
-0.16407233476638794,
-0.013263161294162273,
-0.01972120627760887,
0.043416451662778854,
0.015367389656603336,
-0.028444446623325348,
-0.06309957802295685,
-0.10331370681524277,
0.030929522588849068,
0.024279478937387466,
-0.08089418709278107,
-0.07743634283542633,
-0.02688395231962204,
0.025567200034856796,
-0.10207011550664902,
0.01096324436366558,
0.02701951563358307,
-0.021121157333254814,
-0.00794434268027544,
0.08011221140623093,
0.015209606848657131,
0.16633744537830353,
0.040910378098487854,
-0.02865414507687092,
-0.02690466120839119,
0.12165865302085876,
-0.13709695637226105,
0.132263645529747,
0.011097372509539127,
-0.08220206946134567,
0.0035573909990489483,
0.12397663295269012,
0.02076217532157898,
-0.060751888900995255,
0.02653595246374607,
-0.02982088178396225,
-0.039948511868715286,
-0.21505087614059448,
-0.08387380838394165,
-0.07457156479358673,
-0.047302160412073135,
0.11601601541042328,
0.06044555455446243,
0.032851215451955795,
-0.0058984821662306786,
-0.04071540758013725,
-0.05592409893870354,
0.06872566789388657,
0.10614193975925446,
0.11929062753915787,
-0.04126964509487152,
0.04244793951511383,
-0.059176526963710785,
0.0024999852757900953,
0.04340547323226929,
0.0864274725317955,
0.1101943776011467,
-0.012745260260999203,
0.2660098969936371,
0.011877024546265602,
0.026250455528497696,
0.0003569499822333455,
0.09923383593559265,
0.00480171013623476,
0.05953074246644974,
0.03973850980401039,
-0.06314507871866226,
-0.046465326100587845,
0.09970276057720184,
0.17624841630458832,
-0.05919473245739937,
0.003456933656707406,
-0.115587018430233,
0.025876346975564957,
0.21829752624034882,
0.046775270253419876,
-0.15576408803462982,
-0.05884157866239548,
-0.008362012915313244,
-0.0887511670589447,
-0.06312689930200577,
-0.0008359347702935338,
0.09449028968811035,
-0.0893058031797409,
0.04361385479569435,
0.034584444016218185,
0.054541267454624176,
-0.0845145732164383,
-0.06765809655189514,
0.008465878665447235,
0.07202860713005066,
-0.01028372347354889,
0.019684158265590668,
-0.1748049259185791,
-0.0035492314491420984,
0.0013791414676234126,
0.11043453216552734,
-0.01026085764169693,
0.07175392657518387,
-0.009390287101268768,
-0.013147111050784588,
0.156432643532753,
0.012127758003771305,
-0.07169069349765778,
-0.06275664269924164,
-0.13045993447303772,
-0.02072700299322605,
0.021751148626208305,
-0.05063310265541077,
0.042827002704143524,
-0.0006992715643718839,
-0.03937515616416931,
-0.012317405082285404,
-0.10229919850826263,
-0.09961124509572983,
-0.16445906460285187,
0.08679045736789703,
0.002161311684176326,
0.09197787195444107,
0.00206604297272861,
-0.024711772799491882,
-0.06085461005568504,
0.14445987343788147,
-0.17631849646568298,
-0.06288544088602066,
-0.06513337045907974,
-0.11386580020189285,
0.12866507470607758,
-0.01225678063929081,
0.10779181122779846,
0.01852354407310486,
0.09730285406112671,
-0.04741024598479271,
0.022354580461978912,
0.029395993798971176,
-0.05379703268408775,
-0.08434233069419861,
-0.03536351025104523,
0.20561616122722626,
0.04302479326725006,
0.031525980681180954,
0.040258556604385376,
0.08437420427799225,
0.006635293364524841,
-0.06395445764064789,
0.0406210832297802,
0.12333335727453232,
-0.03997514769434929,
0.04371940344572067,
-0.08319135010242462,
-0.07533521205186844,
-0.12947650253772736,
-0.08953765034675598,
0.12234820425510406,
0.25094395875930786,
-0.07897847890853882,
0.11843845993280411,
0.11623451858758926,
-0.17029722034931183,
-0.1719435453414917,
-0.06457876414060593,
0.01086109783500433,
-0.004352944903075695,
-0.05261439457535744,
-0.1546863317489624,
0.10022599250078201,
0.01881232298910618,
-0.019566841423511505,
0.14310944080352783,
-0.11474025249481201,
-0.13695910573005676,
0.11488140374422073,
0.0070835305377841,
-0.07383155077695847,
-0.0687326118350029,
-0.05142306163907051,
-0.002092805691063404,
0.010814618319272995,
0.12683075666427612,
-0.009726854041218758,
0.09415297955274582,
0.04795398935675621,
0.04079456254839897,
0.035749729722738266,
-0.057063028216362,
0.06322137266397476,
0.013214739970862865,
-0.03667936101555824,
-0.038377370685338974,
0.00702260946854949,
-0.02560449205338955,
-0.04835037887096405,
0.15237317979335785,
0.039307087659835815,
-0.013316253200173378,
-0.03425583988428116,
-0.04980500787496567,
-0.04298830404877663,
0.09934187680482864,
-0.058791592717170715,
-0.0036967634223401546,
0.0006089594680815935,
0.0385926179587841,
0.05347349867224693,
0.027483247220516205,
-0.10780374705791473,
-0.09055141359567642,
-0.044649604707956314,
0.2571380138397217,
0.13963352143764496,
-0.006778295151889324,
-0.07902469485998154,
0.03378813713788986,
-0.0667487159371376,
0.03315556421875954,
0.04121687635779381,
0.05203710123896599,
0.08729652315378189,
0.04730330407619476,
0.11161138862371445,
-0.012836150825023651,
-0.1783531755208969,
-0.0370466448366642,
0.06443535536527634,
-0.06624814122915268,
-0.11741451174020767,
0.01965046115219593,
-0.006803365424275398,
-0.01840864308178425,
-0.0210280679166317,
0.16999289393424988,
-0.039109665900468826,
-0.03734070062637329,
0.05250891298055649,
0.06754098832607269,
-0.08159496635198593,
0.10160861164331436,
0.008280937559902668,
0.0067309229634702206,
-0.061481840908527374,
0.1366766095161438,
0.13799762725830078,
-0.10375146567821503,
0.05513198301196098,
0.14019237458705902,
-0.04264010488986969,
-0.07381432503461838,
-0.19028866291046143,
-0.06150005757808685,
-0.022915273904800415,
-0.05328190326690674,
0.019147519022226334,
-0.021789677441120148,
-0.02056342363357544,
0.055633965879678726,
-0.038166917860507965,
0.05895749852061272,
-0.00774178234860301,
0.07313898950815201,
-0.13951417803764343,
0.07223760336637497,
0.02020157314836979,
0.03059820830821991,
-0.01684449426829815,
0.13380888104438782,
0.05524185299873352,
-0.023851148784160614,
-0.01720315031707287,
-0.06426262855529785,
-0.055745333433151245,
0.01928449049592018,
0.0030711612198501825,
0.03523116931319237,
-0.0631580650806427,
-0.0026379686314612627,
0.050569139420986176,
0.021887443959712982,
0.010794827714562416,
0.05375916510820389,
-0.00963522307574749,
-0.03130187839269638,
-0.01863466389477253,
0.053631242364645004,
-0.11269210278987885,
-0.018383270129561424,
0.050432514399290085,
-0.07471653819084167,
0.10632435232400894,
0.0643346905708313,
-0.03964308649301529,
0.0002682947088032961,
-0.07482447475194931,
0.06489746272563934,
0.0006553450948558748,
-0.0027176460716873407,
-0.04639577493071556,
-0.2004001885652542,
0.023474421352148056,
0.02086978405714035,
-0.014279024675488472,
-0.04733426123857498,
0.06523498892784119,
-0.07260062545537949,
-0.015739470720291138,
0.035220399498939514,
-0.04398375377058983,
-0.06156963109970093,
0.011060191318392754,
-0.01581036113202572,
0.036492373794317245,
0.07212376594543457,
-0.04633153975009918,
0.07059872150421143,
-0.052372366189956665,
0.03689803555607796,
0.003172466764226556,
0.03530073165893555,
-0.0039215111173689365,
-0.009165286086499691,
0.05039801076054573,
-0.013126501813530922,
0.16637548804283142,
-0.09478232264518738,
-0.017859911546111107,
0.04630628600716591,
0.010590704157948494,
-0.17016342282295227,
0.07009764015674591,
0.07398080825805664,
-0.03198324888944626,
-0.019224610179662704,
-0.08028092235326767,
0.006170650944113731,
0.011051024310290813,
-0.003331103827804327,
-0.011025928892195225,
0.06548153609037399,
0.10207892954349518,
0.056918397545814514,
0.036937735974788666,
-0.10752297937870026,
-0.01444219145923853,
0.06783436238765717,
-0.07672162353992462,
0.004126226995140314,
-0.11107974499464035,
0.16215312480926514,
0.09064877778291702,
-0.12233700603246689,
0.06921608746051788,
0.008156831376254559,
-0.07567291706800461,
-0.03069636970758438,
-0.12633000314235687,
-0.0014505945146083832,
-0.020988671109080315,
-0.006243193056434393,
-0.04906342923641205,
0.006505676545202732,
-0.010708889923989773,
0.01514960452914238,
-0.04087099805474281,
0.15992780029773712,
-0.06422854959964752,
-0.09647425264120102,
0.05284179374575615,
0.017530540004372597,
0.03305273875594139,
-0.014439063146710396,
0.040617648512125015,
0.020386680960655212,
0.05398579686880112,
0.11582011729478836,
0.07722436636686325,
0.042141806334257126,
0.06264308840036392,
0.005891693755984306,
-0.024171218276023865,
0.024174919351935387,
-0.008524276316165924,
-0.032261256128549576,
0.19305016100406647,
0.07209964841604233,
-0.022735832259058952,
-0.005800662562251091,
0.08951487392187119,
-0.05043715983629227,
-0.0702591985464096,
-0.14324934780597687,
0.11106284707784653,
-0.003385058371350169,
-0.01667102240025997,
-0.041380539536476135,
-0.1392441987991333,
-0.02831382490694523,
0.16569514572620392,
0.14427323639392853,
0.0014047215227037668,
-0.0005014322232455015,
0.012873353436589241,
0.010435155592858791,
0.009476911276578903,
0.05072012171149254,
0.01244462002068758,
0.1706896275281906,
-0.01996796578168869,
0.10519266128540039,
0.027768246829509735,
-0.004162073601037264,
-0.03259536996483803,
0.0696219801902771,
-0.1229095533490181,
0.04298647493124008,
-0.053984906524419785,
0.014841416850686073,
-0.09759233146905899,
-0.2744225561618805,
-0.09936686605215073,
-0.0422082357108593,
-0.03648868948221207,
0.04541400447487831,
-0.07188257575035095,
0.005462223663926125,
-4.5756102906580054e-8,
0.05380382761359215,
-0.03071802854537964,
0.2347078025341034,
0.0025042498018592596,
-0.05622108653187752,
-0.033342909067869186,
0.03940759599208832,
-0.1581934541463852,
0.2395869493484497,
-0.04080367088317871,
0.10090499371290207,
0.043420493602752686,
0.03705544397234917,
-0.1339649111032486,
0.028223706409335136,
-0.020603137090802193,
-0.10910363495349884,
0.07101573795080185,
0.21510474383831024,
0.00855722464621067,
0.13630712032318115,
-0.01402377337217331,
0.0044640484265983105,
0.05861338600516319,
-0.013234159909188747,
0.05957071855664253,
-0.06987711787223816,
0.07821087539196014,
-0.09168048202991486,
0.17790795862674713,
0.04474356025457382,
-0.0381748303771019,
-0.020744092762470245,
-0.04861894249916077,
-0.02696971409022808,
0.03241357207298279,
0.09192594885826111,
0.008305752649903297,
-0.21605737507343292,
0.05983046814799309,
-0.08551108092069626,
0.05281193554401398,
-0.24153584241867065,
-0.06851602345705032,
-0.01005643606185913,
-0.03893318772315979,
-0.052154626697301865,
0.09951377660036087,
0.06880195438861847,
0.029461050406098366,
-0.070412777364254,
-0.08468343317508698,
0.03280722349882126,
0.09195131808519363,
-0.10144379734992981,
-0.09464450925588608
] |
null | null |
transformers
|
### FinancialBERT for Sentiment Analysis
[*FinancialBERT*](https://huggingface.co/ahmedrachid/FinancialBERT) is a BERT model pre-trained on a large corpora of financial texts. The purpose is to enhance financial NLP research and practice in financial domain, hoping that financial practitioners and researchers can benefit from this model without the necessity of the significant computational resources required to train the model.
The model was fine-tuned for Sentiment Analysis task on _Financial PhraseBank_ dataset. Experiments show that this model outperforms the general BERT and other financial domain-specific models.
More details on `FinancialBERT`'s pre-training process can be found at: https://www.researchgate.net/publication/358284785_FinancialBERT_-_A_Pretrained_Language_Model_for_Financial_Text_Mining
### Training data
FinancialBERT model was fine-tuned on [Financial PhraseBank](https://www.researchgate.net/publication/251231364_FinancialPhraseBank-v10), a dataset consisting of 4840 Financial News categorised by sentiment (negative, neutral, positive).
### Fine-tuning hyper-parameters
- learning_rate = 2e-5
- batch_size = 32
- max_seq_length = 512
- num_train_epochs = 5
### Evaluation metrics
The evaluation metrics used are: Precision, Recall and F1-score. The following is the classification report on the test set.
| sentiment | precision | recall | f1-score | support |
| ------------- |:-------------:|:-------------:|:-------------:| -----:|
| negative | 0.96 | 0.97 | 0.97 | 58 |
| neutral | 0.98 | 0.99 | 0.98 | 279 |
| positive | 0.98 | 0.97 | 0.97 | 148 |
| macro avg | 0.97 | 0.98 | 0.98 | 485 |
| weighted avg | 0.98 | 0.98 | 0.98 | 485 |
### How to use
The model can be used thanks to Transformers pipeline for sentiment analysis.
```python
from transformers import BertTokenizer, BertForSequenceClassification
from transformers import pipeline
model = BertForSequenceClassification.from_pretrained("ahmedrachid/FinancialBERT-Sentiment-Analysis",num_labels=3)
tokenizer = BertTokenizer.from_pretrained("ahmedrachid/FinancialBERT-Sentiment-Analysis")
nlp = pipeline("sentiment-analysis", model=model, tokenizer=tokenizer)
sentences = ["Operating profit rose to EUR 13.1 mn from EUR 8.7 mn in the corresponding period in 2007 representing 7.7 % of net sales.",
"Bids or offers include at least 1,000 shares and the value of the shares must correspond to at least EUR 4,000.",
"Raute reported a loss per share of EUR 0.86 for the first half of 2009 , against EPS of EUR 0.74 in the corresponding period of 2008.",
]
results = nlp(sentences)
print(results)
[{'label': 'positive', 'score': 0.9998133778572083},
{'label': 'neutral', 'score': 0.9997822642326355},
{'label': 'negative', 'score': 0.9877365231513977}]
```
> Created by [Ahmed Rachid Hazourli](https://www.linkedin.com/in/ahmed-rachid/)
|
{"language": "en", "tags": ["financial-sentiment-analysis", "sentiment-analysis"], "datasets": ["financial_phrasebank"], "widget": [{"text": "Operating profit rose to EUR 13.1 mn from EUR 8.7 mn in the corresponding period in 2007 representing 7.7 % of net sales."}, {"text": "Bids or offers include at least 1,000 shares and the value of the shares must correspond to at least EUR 4,000."}, {"text": "Raute reported a loss per share of EUR 0.86 for the first half of 2009 , against EPS of EUR 0.74 in the corresponding period of 2008."}]}
|
text-classification
|
ahmedrachid/FinancialBERT-Sentiment-Analysis
|
[
"transformers",
"pytorch",
"bert",
"text-classification",
"financial-sentiment-analysis",
"sentiment-analysis",
"en",
"dataset:financial_phrasebank",
"autotrain_compatible",
"endpoints_compatible",
"has_space",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #bert #text-classification #financial-sentiment-analysis #sentiment-analysis #en #dataset-financial_phrasebank #autotrain_compatible #endpoints_compatible #has_space #region-us
|
### FinancialBERT for Sentiment Analysis
*FinancialBERT* is a BERT model pre-trained on a large corpora of financial texts. The purpose is to enhance financial NLP research and practice in financial domain, hoping that financial practitioners and researchers can benefit from this model without the necessity of the significant computational resources required to train the model.
The model was fine-tuned for Sentiment Analysis task on *Financial PhraseBank* dataset. Experiments show that this model outperforms the general BERT and other financial domain-specific models.
More details on 'FinancialBERT''s pre-training process can be found at: URL
### Training data
FinancialBERT model was fine-tuned on Financial PhraseBank, a dataset consisting of 4840 Financial News categorised by sentiment (negative, neutral, positive).
### Fine-tuning hyper-parameters
* learning\_rate = 2e-5
* batch\_size = 32
* max\_seq\_length = 512
* num\_train\_epochs = 5
### Evaluation metrics
The evaluation metrics used are: Precision, Recall and F1-score. The following is the classification report on the test set.
### How to use
The model can be used thanks to Transformers pipeline for sentiment analysis.
>
> Created by Ahmed Rachid Hazourli
>
>
>
|
[
"### FinancialBERT for Sentiment Analysis\n\n\n*FinancialBERT* is a BERT model pre-trained on a large corpora of financial texts. The purpose is to enhance financial NLP research and practice in financial domain, hoping that financial practitioners and researchers can benefit from this model without the necessity of the significant computational resources required to train the model.\n\n\nThe model was fine-tuned for Sentiment Analysis task on *Financial PhraseBank* dataset. Experiments show that this model outperforms the general BERT and other financial domain-specific models.\n\n\nMore details on 'FinancialBERT''s pre-training process can be found at: URL",
"### Training data\n\n\nFinancialBERT model was fine-tuned on Financial PhraseBank, a dataset consisting of 4840 Financial News categorised by sentiment (negative, neutral, positive).",
"### Fine-tuning hyper-parameters\n\n\n* learning\\_rate = 2e-5\n* batch\\_size = 32\n* max\\_seq\\_length = 512\n* num\\_train\\_epochs = 5",
"### Evaluation metrics\n\n\nThe evaluation metrics used are: Precision, Recall and F1-score. The following is the classification report on the test set.",
"### How to use\n\n\nThe model can be used thanks to Transformers pipeline for sentiment analysis.\n\n\n\n> \n> Created by Ahmed Rachid Hazourli\n> \n> \n>"
] |
[
"TAGS\n#transformers #pytorch #bert #text-classification #financial-sentiment-analysis #sentiment-analysis #en #dataset-financial_phrasebank #autotrain_compatible #endpoints_compatible #has_space #region-us \n",
"### FinancialBERT for Sentiment Analysis\n\n\n*FinancialBERT* is a BERT model pre-trained on a large corpora of financial texts. The purpose is to enhance financial NLP research and practice in financial domain, hoping that financial practitioners and researchers can benefit from this model without the necessity of the significant computational resources required to train the model.\n\n\nThe model was fine-tuned for Sentiment Analysis task on *Financial PhraseBank* dataset. Experiments show that this model outperforms the general BERT and other financial domain-specific models.\n\n\nMore details on 'FinancialBERT''s pre-training process can be found at: URL",
"### Training data\n\n\nFinancialBERT model was fine-tuned on Financial PhraseBank, a dataset consisting of 4840 Financial News categorised by sentiment (negative, neutral, positive).",
"### Fine-tuning hyper-parameters\n\n\n* learning\\_rate = 2e-5\n* batch\\_size = 32\n* max\\_seq\\_length = 512\n* num\\_train\\_epochs = 5",
"### Evaluation metrics\n\n\nThe evaluation metrics used are: Precision, Recall and F1-score. The following is the classification report on the test set.",
"### How to use\n\n\nThe model can be used thanks to Transformers pipeline for sentiment analysis.\n\n\n\n> \n> Created by Ahmed Rachid Hazourli\n> \n> \n>"
] |
[
67,
147,
41,
53,
36,
35
] |
[
"passage: TAGS\n#transformers #pytorch #bert #text-classification #financial-sentiment-analysis #sentiment-analysis #en #dataset-financial_phrasebank #autotrain_compatible #endpoints_compatible #has_space #region-us \n### FinancialBERT for Sentiment Analysis\n\n\n*FinancialBERT* is a BERT model pre-trained on a large corpora of financial texts. The purpose is to enhance financial NLP research and practice in financial domain, hoping that financial practitioners and researchers can benefit from this model without the necessity of the significant computational resources required to train the model.\n\n\nThe model was fine-tuned for Sentiment Analysis task on *Financial PhraseBank* dataset. Experiments show that this model outperforms the general BERT and other financial domain-specific models.\n\n\nMore details on 'FinancialBERT''s pre-training process can be found at: URL### Training data\n\n\nFinancialBERT model was fine-tuned on Financial PhraseBank, a dataset consisting of 4840 Financial News categorised by sentiment (negative, neutral, positive).### Fine-tuning hyper-parameters\n\n\n* learning\\_rate = 2e-5\n* batch\\_size = 32\n* max\\_seq\\_length = 512\n* num\\_train\\_epochs = 5### Evaluation metrics\n\n\nThe evaluation metrics used are: Precision, Recall and F1-score. The following is the classification report on the test set.### How to use\n\n\nThe model can be used thanks to Transformers pipeline for sentiment analysis.\n\n\n\n> \n> Created by Ahmed Rachid Hazourli\n> \n> \n>"
] |
[
-0.0694074034690857,
0.1514381319284439,
-0.001819823170080781,
0.05015524849295616,
0.08202318102121353,
0.009440752677619457,
0.06573265045881271,
0.08855360001325607,
0.007574940100312233,
0.12017220258712769,
0.023754093796014786,
-0.0874270647764206,
0.08345470577478409,
0.16116274893283844,
0.07453411817550659,
-0.2526785433292389,
0.003627476515248418,
-0.054162174463272095,
0.03921069949865341,
0.08744268864393234,
0.12533549964427948,
-0.11138821393251419,
0.08458291739225388,
-0.007271989248692989,
-0.0866054818034172,
0.020955422893166542,
-0.007473234087228775,
-0.002005873480811715,
0.05711698904633522,
0.11023779958486557,
0.09936012327671051,
-0.007698065601289272,
0.06201498955488205,
-0.20385098457336426,
0.012500351294875145,
0.026803432032465935,
-0.027596773579716682,
0.04044278711080551,
0.06826921552419662,
0.02264530584216118,
0.21888159215450287,
-0.05609770864248276,
0.06598764657974243,
0.05771361291408539,
-0.06985548883676529,
-0.042997173964977264,
-0.11860547214746475,
0.09495241194963455,
0.07453954219818115,
0.1037655621767044,
-0.05105812847614288,
0.03141332417726517,
-0.1302081197500229,
0.00044973986223340034,
0.04166017100214958,
-0.1782810389995575,
-0.0274811964482069,
0.05211072787642479,
-0.028827300295233727,
0.0332295298576355,
-0.12456107884645462,
-0.011468625627458096,
0.030618393793702126,
0.015743544325232506,
0.11301854997873306,
-0.02925720252096653,
0.0028122244402766228,
0.007958907634019852,
-0.17604616284370422,
-0.05174659565091133,
0.08972826600074768,
0.10828880965709686,
-0.062410321086645126,
-0.15619513392448425,
0.013003882952034473,
0.033566150814294815,
-0.013041632249951363,
-0.06036015972495079,
0.025195244699716568,
-0.012752923183143139,
0.010533332824707031,
-0.05269165709614754,
-0.08587906509637833,
0.008525238372385502,
-0.051778387278318405,
-0.006884539034217596,
0.0016169310547411442,
-0.022640621289610863,
0.017749713733792305,
0.0789109468460083,
-0.1950591802597046,
-0.07726341485977173,
-0.02804570272564888,
-0.01466749981045723,
-0.030406558886170387,
-0.026768764480948448,
-0.028862658888101578,
0.014497603289783001,
-0.021814940497279167,
0.07670479267835617,
-0.04978593438863754,
0.0021987236104905605,
-0.024211281910538673,
-0.027792757377028465,
0.01986074633896351,
0.12375562638044357,
-0.1349511742591858,
-0.12605012953281403,
-0.03459669649600983,
0.04920588433742523,
-0.01301680225878954,
0.028828317299485207,
-0.02774040773510933,
0.10054677724838257,
0.0057218377478420734,
0.03370600938796997,
-0.0430493988096714,
-0.0021006965544074774,
-0.05018804967403412,
-0.016170954331755638,
0.11649336665868759,
-0.11736828088760376,
0.06254899501800537,
0.027265189215540886,
-0.07944586873054504,
-0.008220910094678402,
-0.030616026371717453,
-0.018801862373948097,
-0.023537548258900642,
0.09607004374265671,
-0.08577132225036621,
-0.00944392941892147,
-0.10429520159959793,
-0.13533301651477814,
0.06531469523906708,
-0.011888398788869381,
0.01604360155761242,
-0.08430219441652298,
-0.10686899721622467,
-0.04830476641654968,
-0.0035023794043809175,
-0.08430545032024384,
-0.024335339665412903,
-0.026379350572824478,
-0.06841742247343063,
0.009808625094592571,
-0.00659303180873394,
0.1512479931116104,
-0.0021099781151860952,
-0.007268127519637346,
-0.03463180363178253,
0.02120509184896946,
0.07640281319618225,
-0.0037808632478117943,
-0.0877225399017334,
-0.008209332823753357,
-0.11145441979169846,
0.09693033248186111,
-0.09776799380779266,
-0.009968121536076069,
-0.11852668970823288,
-0.03614279627799988,
-0.058062173426151276,
0.02947065979242325,
0.031269676983356476,
0.11350268125534058,
-0.16469189524650574,
-0.052167486399412155,
0.13814906775951385,
-0.0964217260479927,
-0.005722912959754467,
0.1355675309896469,
-0.07714102417230606,
0.09577123820781708,
0.09770844876766205,
0.1395803987979889,
0.20362985134124756,
-0.1251526176929474,
-0.1126587986946106,
0.021867601200938225,
0.0380919873714447,
0.11295539885759354,
0.07285597175359726,
-0.01582998037338257,
-0.015521306544542313,
0.01144395861774683,
-0.09756530076265335,
0.023282203823328018,
-0.06513240933418274,
-0.04557599499821663,
0.016689665615558624,
-0.07673154771327972,
0.00819486565887928,
0.012212265282869339,
0.0004620187100954354,
-0.03933174908161163,
-0.15796206891536713,
0.0458759069442749,
0.08014090359210968,
-0.013062207959592342,
0.011929081752896309,
-0.13043375313282013,
0.0837876945734024,
0.0012368671596050262,
-0.0033690936397761106,
-0.13104550540447235,
0.003107141936197877,
0.039885494858026505,
-0.08396250009536743,
0.09339424967765808,
0.062950000166893,
0.0182664692401886,
0.009607372805476189,
-0.06576306372880936,
0.014707177877426147,
-0.04681951925158501,
-0.003162689972668886,
-0.05621938407421112,
-0.13599282503128052,
-0.05819709599018097,
-0.06023237854242325,
0.16219113767147064,
-0.16089646518230438,
0.010546579025685787,
0.11612687259912491,
0.05666930601000786,
0.013072419911623001,
-0.05585996061563492,
-0.0005406904965639114,
0.020808519795536995,
-0.011414050124585629,
-0.032221727073192596,
-0.004772410728037357,
0.016951968893408775,
-0.075032539665699,
-0.011593661271035671,
-0.13091319799423218,
-0.0522497184574604,
0.07160711288452148,
0.08711067587137222,
-0.10821536928415298,
-0.04707513749599457,
-0.04032978042960167,
0.004289436154067516,
-0.12235821783542633,
-0.07897678017616272,
0.1642005294561386,
-0.03268561512231827,
0.051709987223148346,
-0.06279080361127853,
-0.01879849098622799,
0.03506944701075554,
0.02389792911708355,
-0.011159556917846203,
0.10379714518785477,
-0.031519293785095215,
-0.06392616033554077,
0.029275136068463326,
0.014020157046616077,
0.06808053702116013,
0.0698455199599266,
-0.005877681542187929,
-0.0832696482539177,
-0.041684892028570175,
-0.005243266467005014,
-0.020509619265794754,
0.09827437996864319,
-0.06795480102300644,
-0.033209800720214844,
0.054026320576667786,
0.03622535988688469,
-0.003642131807282567,
-0.09003069996833801,
0.033819057047367096,
0.0718264952301979,
-0.03347960114479065,
-0.07713913917541504,
-0.052936721593141556,
-0.02334134466946125,
0.09831319004297256,
0.011006509885191917,
-0.0009191703866235912,
-0.036657944321632385,
-0.04333275556564331,
-0.11639481037855148,
0.17196184396743774,
-0.015025215223431587,
-0.18119840323925018,
-0.09074176102876663,
-0.013332195580005646,
0.02962443232536316,
-0.024168644100427628,
0.0329810194671154,
-0.0664343312382698,
-0.07399126887321472,
-0.15095311403274536,
0.10425765812397003,
0.04863886907696724,
-0.04906444624066353,
-0.02132437378168106,
-0.08580834418535233,
0.033752113580703735,
-0.09480223059654236,
0.016712630167603493,
-0.029088424518704414,
-0.05510701611638069,
-0.04430294409394264,
0.04559631645679474,
0.10585527122020721,
0.11876830458641052,
-0.019328434020280838,
-0.002889283699914813,
-0.055544160306453705,
0.21496818959712982,
-0.07324036210775375,
0.07379099726676941,
0.08895391970872879,
0.006476993206888437,
0.043852195143699646,
0.12960344552993774,
0.04651841148734093,
-0.057646773755550385,
0.020487530156970024,
0.07008916884660721,
-0.06835778057575226,
-0.1804676502943039,
-0.07012664526700974,
-0.04812924191355705,
-0.040965911000967026,
0.015094563364982605,
0.018586087971925735,
0.01476640347391367,
0.01576473005115986,
-0.012820398434996605,
-0.06178464740514755,
0.021981919184327126,
0.05660382658243179,
0.0440039224922657,
0.0022627562284469604,
0.06858816742897034,
-0.0330992229282856,
0.0191530492156744,
0.13596969842910767,
-0.09558484703302383,
0.25753432512283325,
-0.009196452796459198,
0.18012936413288116,
0.048969730734825134,
0.07520285248756409,
0.005518894176930189,
0.022099345922470093,
-0.06813857704401016,
0.023000067099928856,
-0.04877513274550438,
-0.024525152519345284,
0.02156863361597061,
0.07356763631105423,
0.04166831448674202,
0.040243130177259445,
-0.14065052568912506,
-0.03927500173449516,
0.025688322260975838,
0.11957171559333801,
0.06273934990167618,
-0.12807059288024902,
-0.09022713452577591,
0.035182517021894455,
-0.08822924643754959,
-0.029798336327075958,
0.03777261823415756,
0.08857560157775879,
-0.10101114958524704,
0.0035724621266126633,
-0.01937514916062355,
0.09524481743574142,
0.02679239585995674,
-0.011682150885462761,
-0.012015440501272678,
0.056671276688575745,
-0.00864988099783659,
0.10703225433826447,
-0.15054741501808167,
0.17425312101840973,
0.016106968745589256,
0.09693068265914917,
-0.05109332501888275,
0.007399084512144327,
0.034319471567869186,
-0.00023148494074121118,
0.1880052536725998,
-0.0013269579503685236,
0.07457331568002701,
-0.08753619343042374,
-0.09396708756685257,
-0.005438582040369511,
0.0764414519071579,
-0.06210795044898987,
0.1049938052892685,
0.009730824269354343,
0.005078629124909639,
0.0004241274727974087,
0.02324112318456173,
-0.10267770290374756,
-0.16527152061462402,
-0.0046156602911651134,
-0.12013152241706848,
0.033339567482471466,
-0.05139998719096184,
-0.04575246572494507,
-0.0412122942507267,
0.10986291617155075,
-0.1104242131114006,
-0.005055866204202175,
-0.12185898423194885,
0.11187443137168884,
0.07721985876560211,
-0.07575412839651108,
0.002616679295897484,
0.030109217390418053,
0.1299431324005127,
0.027232307940721512,
-0.02094976417720318,
0.004936393350362778,
-0.06340187788009644,
-0.11755052953958511,
-0.030401231721043587,
0.12737132608890533,
0.10852864384651184,
0.11104410886764526,
0.006937114987522364,
0.01569378189742565,
0.030238013714551926,
-0.1189366951584816,
-0.040660418570041656,
0.04318013787269592,
0.05770283192396164,
-0.0027944683097302914,
-0.014334076084196568,
0.019730016589164734,
-0.12780241668224335,
-0.02770872227847576,
0.08861525356769562,
0.17047759890556335,
-0.1206909790635109,
0.09743478894233704,
0.15322749316692352,
-0.08397035300731659,
-0.2282513529062271,
0.028007300570607185,
-0.0007818995509296656,
0.07311289757490158,
0.047456372529268265,
-0.13394306600093842,
0.05837112292647362,
0.02447875589132309,
-0.014529062435030937,
-0.06901754438877106,
-0.23948749899864197,
-0.12399517744779587,
0.09085532277822495,
0.00047430387348867953,
-0.08991140872240067,
-0.12420188635587692,
-0.06317459791898727,
0.07202423363924026,
-0.01620648056268692,
0.14872674643993378,
-0.15264387428760529,
0.030755015090107918,
0.05342845246195793,
0.06016855686903,
0.02828146144747734,
-0.036219824105501175,
0.11719400435686111,
0.038331303745508194,
0.023652972653508186,
-0.02012849785387516,
-0.027253596112132072,
0.08447722345590591,
-0.07145079225301743,
0.11409338563680649,
0.05081571638584137,
0.0390644408762455,
-0.18529202044010162,
-0.053695276379585266,
-0.10297073423862457,
0.021555006504058838,
-0.04532354697585106,
-0.034554239362478256,
-0.12688696384429932,
0.14310534298419952,
0.11093248426914215,
-0.02435566857457161,
-0.06583022326231003,
-0.08049221336841583,
-0.04056679829955101,
0.19751138985157013,
0.2126133143901825,
0.009201533161103725,
-0.052046194672584534,
0.060963764786720276,
-0.028966611251235008,
0.04568898305296898,
-0.17719990015029907,
0.006449130829423666,
0.08611352741718292,
0.031199784949421883,
0.13964276015758514,
-0.01447341963648796,
-0.0910668671131134,
0.014763451181352139,
0.017868751659989357,
-0.050338562577962875,
-0.2354230284690857,
-0.02762523479759693,
0.031909774988889694,
-0.1728050857782364,
-0.11963817477226257,
0.059285227209329605,
-0.08865649998188019,
-0.01834162324666977,
-0.004910048097372055,
0.03174475207924843,
-0.028339946642518044,
0.09035948663949966,
0.020082322880625725,
0.038669098168611526,
-0.08669020980596542,
0.01735553704202175,
0.09368210285902023,
-0.17851507663726807,
0.02733517624437809,
0.04227530211210251,
-0.0658162534236908,
-0.04782208055257797,
-0.06161864474415779,
0.16432948410511017,
-0.08360818773508072,
-0.02750220149755478,
0.019748244434595108,
-0.13326396048069,
0.03267528861761093,
0.12753768265247345,
0.03375300392508507,
0.00021100591402500868,
-0.06624989956617355,
0.03605864942073822,
-0.10541829466819763,
0.07897721230983734,
0.12253521382808685,
0.006723766215145588,
-0.02091570757329464,
0.09894797205924988,
0.007608894258737564,
-0.00025600340450182557,
-0.04622165858745575,
-0.004087408538907766,
-0.06933917105197906,
-0.047038037329912186,
-0.11229230463504791,
0.05122876167297363,
-0.046879831701517105,
0.007739280816167593,
-0.024353915825486183,
-0.03769673407077789,
-0.04493774101138115,
0.00447125593200326,
-0.03797905519604683,
-0.02874690853059292,
-0.0156367439776659,
0.09010133892297745,
-0.10580015927553177,
0.009136137552559376,
0.023076845332980156,
-0.04353804141283035,
0.03876516595482826,
-0.022855384275317192,
0.04305290803313255,
0.040575314313173294,
-0.05546855553984642,
0.05123310163617134,
-0.012355833314359188,
0.009667029604315758,
0.03567584976553917,
-0.1319981813430786,
0.02196536585688591,
-0.016023043543100357,
-0.032784122973680496,
-0.02308635599911213,
-0.015058192424476147,
-0.07203695178031921,
0.022192150354385376,
-0.0024042148143053055,
-0.05737275630235672,
-0.06268597394227982,
0.0679222121834755,
0.07153060287237167,
0.029355039820075035,
0.14209550619125366,
-0.04992862418293953,
0.017624476924538612,
-0.18612603843212128,
-0.03011426143348217,
0.004320261999964714,
0.07814221829175949,
-0.112697534263134,
-0.07783816009759903,
0.06187773495912552,
-0.012134304270148277,
0.19725853204727173,
0.011715739034116268,
0.022038981318473816,
0.06490058451890945,
-0.05760833993554115,
0.07450564205646515,
0.008144011721014977,
0.07408696413040161,
0.0060836258344352245,
-0.031199557706713676,
0.0009685024851933122,
-0.018874865025281906,
-0.03159353509545326,
0.05141883343458176,
0.16931696236133575,
0.14443260431289673,
0.09822744876146317,
0.05303283780813217,
-0.021812977269291878,
-0.04326687380671501,
-0.07455859333276749,
0.05006607994437218,
-0.06092412397265434,
0.0029537552036345005,
-0.050348035991191864,
0.1680566817522049,
0.11412188410758972,
-0.18106436729431152,
0.14717182517051697,
-0.01648268848657608,
-0.11019930988550186,
-0.052126117050647736,
-0.11646215617656708,
-0.059852369129657745,
-0.04315490648150444,
-0.003455676371231675,
-0.13484638929367065,
0.07361117750406265,
0.07380739599466324,
-0.0029467183630913496,
-0.011815307661890984,
0.11738063395023346,
-0.1571902632713318,
-0.010458906181156635,
0.07984591275453568,
0.023548629134893417,
0.013804690912365913,
0.0028833348769694567,
0.018500836566090584,
0.015176094137132168,
0.025837035849690437,
0.058422401547431946,
0.03742982819676399,
0.03827984258532524,
-0.0034394641406834126,
-0.0020028280559927225,
-0.07807698100805283,
-0.005658488254994154,
-0.012249571271240711,
0.056088343262672424,
0.20189093053340912,
0.019459323957562447,
-0.007381243631243706,
-0.02436535246670246,
0.1569952815771103,
-0.07694745063781738,
0.02306724339723587,
-0.1504795253276825,
0.21831925213336945,
0.005902466364204884,
0.06193381920456886,
0.022249171510338783,
-0.11479225009679794,
0.0227194931358099,
0.2292703241109848,
0.14849455654621124,
-0.03947043791413307,
-0.040679510682821274,
0.026434438303112984,
0.012792383320629597,
-0.006111650262027979,
0.11711415648460388,
0.01904897950589657,
0.22946500778198242,
-0.09261956065893173,
0.07431340217590332,
-0.05308964475989342,
-0.07654806971549988,
-0.010051565244793892,
0.18343473970890045,
0.01959119364619255,
0.0030662454664707184,
-0.08251958340406418,
0.04667745530605316,
-0.010255261324346066,
-0.27109500765800476,
0.07033957540988922,
-0.08637131750583649,
-0.06547725945711136,
0.0029989134054630995,
-0.05578155443072319,
0.05287696793675423,
0.0612434446811676,
-0.01225101388990879,
0.04192115366458893,
0.12032030522823334,
0.03582160919904709,
-0.0874042734503746,
-0.050584349781274796,
0.09243100136518478,
-0.01434134691953659,
0.1817537099123001,
0.02644343301653862,
0.06889451295137405,
0.11640039086341858,
-0.029873181134462357,
-0.10313668102025986,
0.03791031613945961,
0.06638200581073761,
-0.032992515712976456,
0.01231817714869976,
0.1649765968322754,
0.003467707661911845,
0.0994783565402031,
0.040806230157613754,
-0.13571494817733765,
0.05694858729839325,
-0.08581993728876114,
-0.07127045840024948,
-0.06760400533676147,
0.06604219973087311,
-0.06885319948196411,
0.15048469603061676,
0.16551606357097626,
-0.023929137736558914,
0.012159937992691994,
-0.04572589695453644,
0.04290936514735222,
0.016900165006518364,
0.1900377869606018,
0.010949833318591118,
-0.11792130768299103,
-0.011709251441061497,
0.04894031956791878,
0.04670681431889534,
-0.33254602551460266,
-0.05238974466919899,
-0.015727413818240166,
0.033142317086458206,
-0.019345374777913094,
0.12274164706468582,
0.04090122506022453,
0.06549014896154404,
-0.04056638479232788,
-0.17633120715618134,
0.04090563952922821,
0.11785619705915451,
-0.0896400436758995,
-0.022235892713069916
] |
null | null |
transformers
|
**FinancialBERT** is a BERT model pre-trained on a large corpora of financial texts. The purpose is to enhance financial NLP research and practice in financial domain, hoping that financial practitioners and researchers can benefit from it without the necessity of the significant computational resources required to train the model.
The model was trained on a large corpus of financial texts:
- *TRC2-financial*: 1.8M news articles that were published by Reuters between 2008 and 2010.
- *Bloomberg News*: 400,000 articles between 2006 and 2013.
- *Corporate Reports*: 192,000 transcripts (10-K & 10-Q)
- *Earning Calls*: 42,156 documents.
More details on `FinancialBERT` can be found at: https://www.researchgate.net/publication/358284785_FinancialBERT_-_A_Pretrained_Language_Model_for_Financial_Text_Mining
> Created by [Ahmed Rachid Hazourli](https://www.linkedin.com/in/ahmed-rachid/)
|
{"language": "en", "tags": ["fill-mask"], "widget": [{"text": "Tesla remains one of the highest [MASK] stocks on the market. Meanwhile, Aurora Innovation is a pre-revenue upstart that shows promise."}, {"text": "Asian stocks [MASK] from a one-year low on Wednesday as U.S. share futures and oil recovered from the previous day's selloff, but uncertainty over the impact of the Omicron"}, {"text": "U.S. stocks were set to rise on Monday, led by [MASK] in Apple which neared $3 trillion in market capitalization, while investors braced for a Federal Reserve meeting later this week."}]}
|
fill-mask
|
ahmedrachid/FinancialBERT
|
[
"transformers",
"pytorch",
"bert",
"fill-mask",
"en",
"autotrain_compatible",
"endpoints_compatible",
"has_space",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #bert #fill-mask #en #autotrain_compatible #endpoints_compatible #has_space #region-us
|
FinancialBERT is a BERT model pre-trained on a large corpora of financial texts. The purpose is to enhance financial NLP research and practice in financial domain, hoping that financial practitioners and researchers can benefit from it without the necessity of the significant computational resources required to train the model.
The model was trained on a large corpus of financial texts:
- *TRC2-financial*: 1.8M news articles that were published by Reuters between 2008 and 2010.
- *Bloomberg News*: 400,000 articles between 2006 and 2013.
- *Corporate Reports*: 192,000 transcripts (10-K & 10-Q)
- *Earning Calls*: 42,156 documents.
More details on 'FinancialBERT' can be found at: URL
> Created by Ahmed Rachid Hazourli
|
[] |
[
"TAGS\n#transformers #pytorch #bert #fill-mask #en #autotrain_compatible #endpoints_compatible #has_space #region-us \n"
] |
[
42
] |
[
"passage: TAGS\n#transformers #pytorch #bert #fill-mask #en #autotrain_compatible #endpoints_compatible #has_space #region-us \n"
] |
[
-0.019997192546725273,
0.014780418016016483,
-0.0068318662233650684,
0.023798983544111252,
0.08987267315387726,
0.02389785833656788,
0.07383517920970917,
0.09235703200101852,
0.07816500961780548,
0.03791143000125885,
0.17405477166175842,
0.13409073650836945,
-0.051454730331897736,
0.16234464943408966,
-0.04139497131109238,
-0.2700372636318207,
0.07122375071048737,
0.04123169928789139,
-0.09888456016778946,
0.10383296757936478,
0.06155335530638695,
-0.10843710601329803,
0.07045353949069977,
-0.011998448520898819,
-0.1022854670882225,
0.05032598599791527,
0.02489493228495121,
-0.09622540324926376,
0.13407501578330994,
0.011810209602117538,
0.2194730043411255,
0.025207435712218285,
-0.06130606308579445,
-0.060049161314964294,
0.04430820792913437,
0.01434273086488247,
-0.07512759417295456,
0.058154039084911346,
-0.010311419144272804,
-0.057596076279878616,
-0.005639648996293545,
0.0051578315906226635,
0.02132921852171421,
0.016043510288000107,
-0.15499195456504822,
-0.14057983458042145,
-0.023597275838255882,
0.016603834927082062,
-0.00811913050711155,
0.052211690694093704,
0.0036289459094405174,
0.20500120520591736,
-0.14436838030815125,
0.0768473744392395,
0.2050132155418396,
-0.32808512449264526,
-0.00574528006836772,
0.13807356357574463,
0.12416622042655945,
-0.03212013840675354,
-0.0475553534924984,
0.08011359721422195,
0.030726313591003418,
0.01681010238826275,
0.0950244814157486,
-0.06115942448377609,
-0.04597238823771477,
0.04647267237305641,
-0.10661830008029938,
-0.0665086954832077,
0.17132075130939484,
-0.05603498965501785,
0.06844817847013474,
-0.01189623586833477,
-0.13061924278736115,
-0.08083189278841019,
0.012292750179767609,
-0.004360265098512173,
-0.011134655214846134,
0.030042581260204315,
-0.023343408480286598,
-0.03607942909002304,
-0.14559675753116608,
0.037346016615629196,
-0.22716374695301056,
0.22807790338993073,
-0.0021595561411231756,
0.06564522534608841,
-0.1609334945678711,
0.06621173024177551,
-0.04811084270477295,
-0.1310233175754547,
0.08042474091053009,
-0.0983743742108345,
0.026545649394392967,
0.014652957208454609,
-0.09899450093507767,
-0.015399972908198833,
0.05277616158127785,
0.15310360491275787,
0.038486067205667496,
0.005427191965281963,
0.0576895996928215,
0.11484527587890625,
0.0468781404197216,
0.09522499889135361,
-0.022713957354426384,
-0.04379211366176605,
0.010413505136966705,
-0.073536217212677,
0.031225204467773438,
-0.06791940331459045,
-0.14752791821956635,
-0.06818811595439911,
0.0213534627109766,
0.043174270540475845,
0.06837717443704605,
0.0556020513176918,
-0.0658470168709755,
0.029443323612213135,
0.07160183787345886,
-0.05716608464717865,
0.035650584846735,
-0.02408956177532673,
0.05407790467143059,
0.06689951568841934,
0.035270705819129944,
-0.011631379835307598,
0.02675902284681797,
0.10464239865541458,
-0.09729970991611481,
-0.02498665638267994,
-0.07309185713529587,
-0.11168652027845383,
0.039843566715717316,
-0.126163050532341,
0.031249333173036575,
-0.18737362325191498,
-0.050580885261297226,
0.044490206986665726,
0.06373832374811172,
-0.012622763402760029,
-0.040059540420770645,
0.07929396629333496,
-0.04004373773932457,
0.05642369017004967,
-0.04999612271785736,
-0.01643742248415947,
-0.04351089894771576,
0.09297361224889755,
-0.024169040843844414,
0.16232319176197052,
-0.13296359777450562,
0.05146636813879013,
-0.07414641231298447,
0.02616247721016407,
-0.15671250224113464,
-0.06691022217273712,
-0.029202422127127647,
0.11290900409221649,
0.01985234022140503,
-0.055902957916259766,
-0.14365024864673615,
0.05384698510169983,
0.002830381039530039,
0.13930785655975342,
-0.103221096098423,
-0.08584429323673248,
0.1723223775625229,
-0.07459700852632523,
-0.12498592585325241,
0.08450481295585632,
-0.0021834061481058598,
0.005788642447441816,
-0.01612323336303234,
0.15655365586280823,
0.007788538932800293,
-0.1459207534790039,
0.02517043426632881,
0.12556684017181396,
-0.10784289240837097,
-0.07218190282583237,
0.04402026906609535,
0.01608199067413807,
-0.06122705340385437,
0.017061758786439896,
0.0761684849858284,
0.09253887832164764,
-0.06099804490804672,
-0.04992813616991043,
-0.03266993910074234,
-0.03985404595732689,
0.17489810287952423,
0.031515251845121384,
0.11334993690252304,
-0.0887557715177536,
-0.05869276449084282,
0.002472382504492998,
-0.000010590838428470306,
0.07978932559490204,
0.04706462472677231,
-0.04262077435851097,
0.173573300242424,
-0.08694329112768173,
-0.017911717295646667,
-0.17223328351974487,
-0.10613740235567093,
-0.035184405744075775,
0.06002962216734886,
-0.02354571595788002,
0.22939138114452362,
0.10045787692070007,
-0.052762627601623535,
-0.01266152411699295,
-0.022808140143752098,
0.0734381228685379,
0.045754801481962204,
-0.05912294611334801,
-0.08714418858289719,
-0.019147580489516258,
-0.09574199467897415,
-0.022937489673495293,
0.00461559509858489,
0.017668096348643303,
0.013645407743752003,
0.15327565371990204,
-0.013122829608619213,
0.05148979276418686,
-0.03711831942200661,
0.04457459598779678,
-0.05503220111131668,
0.005972165148705244,
0.05869760736823082,
-0.00018122517212759703,
-0.046011265367269516,
0.18646493554115295,
-0.17442137002944946,
0.3588862121105194,
0.2137124389410019,
-0.30202731490135193,
-0.030901100486516953,
0.05960994213819504,
-0.02790084108710289,
0.02056352235376835,
0.024805458262562752,
-0.04125044122338295,
-0.01781635917723179,
-0.03365306928753853,
0.12532514333724976,
-0.00743429409340024,
-0.023089420050382614,
0.021000880748033524,
-0.09153689444065094,
-0.08132359385490417,
0.01721842773258686,
0.06322828680276871,
-0.10036931186914444,
0.20869576930999756,
0.3527931272983551,
-0.03517407923936844,
0.15895430743694305,
0.04585598409175873,
-0.0032595298252999783,
-0.020002547651529312,
-0.07584541290998459,
-0.061042141169309616,
0.08585010468959808,
-0.1926221251487732,
-0.060722652822732925,
0.08632775396108627,
-0.024965791031718254,
0.05443504452705383,
-0.1242685317993164,
-0.03916269168257713,
0.04801696911454201,
0.08721378445625305,
-0.06858187913894653,
0.15527896583080292,
0.04643772542476654,
0.07740999758243561,
0.01485730241984129,
-0.08171246200799942,
0.07853212207555771,
0.016190508380532265,
-0.009880288504064083,
0.11189299821853638,
-0.12322121113538742,
-0.3270411193370819,
-0.1036125048995018,
-0.1356876641511917,
0.0236385315656662,
0.03963719308376312,
0.0848759263753891,
-0.06500120460987091,
-0.04776328429579735,
0.048077989369630814,
0.003034977475181222,
-0.07322894781827927,
0.0807197168469429,
-0.08422946184873581,
0.0028993047308176756,
-0.031046703457832336,
-0.06981796026229858,
-0.07935451716184616,
-0.021158605813980103,
-0.028699737042188644,
0.14996680617332458,
-0.03195967525243759,
0.0929282009601593,
0.13017547130584717,
-0.017366066575050354,
0.05216728150844574,
-0.001333492575213313,
0.21310119330883026,
-0.08228164911270142,
0.01297125592827797,
0.1641165316104889,
-0.010039757005870342,
0.08381732553243637,
0.1808161735534668,
0.02276216633617878,
-0.01400842610746622,
0.004599452018737793,
-0.0377776138484478,
-0.12526200711727142,
-0.09371183067560196,
-0.11544765532016754,
-0.13713443279266357,
-0.03001927025616169,
0.03838959336280823,
0.04998483881354332,
0.12899404764175415,
0.07106755673885345,
0.04336494207382202,
-0.020945880562067032,
-0.11677882075309753,
0.04133368656039238,
0.17263683676719666,
-0.0688924565911293,
0.15709428489208221,
-0.027079137042164803,
-0.142767995595932,
0.05419948697090149,
0.04750750958919525,
0.07328753918409348,
0.09328272193670273,
-0.042643431574106216,
0.04854756221175194,
0.18599742650985718,
0.1558474749326706,
0.12238230556249619,
0.03202788159251213,
-0.05972132831811905,
-0.019247906282544136,
-0.016303520649671555,
-0.017734156921505928,
0.06445907801389694,
0.19762155413627625,
-0.11132435500621796,
-0.0360291488468647,
-0.17527107894420624,
0.06210777908563614,
0.06503309309482574,
0.10352429002523422,
-0.19332195818424225,
0.011805955320596695,
0.07989324629306793,
0.008022811263799667,
-0.05122343450784683,
0.031791478395462036,
0.06403814256191254,
-0.105424664914608,
0.02268732152879238,
-0.005086842924356461,
0.07829988747835159,
0.09072943031787872,
0.09520526230335236,
-0.06981232762336731,
-0.11341451853513718,
0.020407982170581818,
0.05340219661593437,
-0.237494096159935,
0.2752111256122589,
-0.01675400696694851,
-0.10440664738416672,
-0.06639803200960159,
-0.014095853082835674,
0.06103066727519035,
0.11879439651966095,
0.08904329687356949,
0.03827634081244469,
-0.06885389983654022,
-0.14516481757164001,
0.013928813859820366,
-0.008963711559772491,
0.09685934334993362,
-0.03752905875444412,
0.009552331641316414,
-0.020827943459153175,
-0.03018965758383274,
0.014908617362380028,
0.22459544241428375,
0.005754288285970688,
-0.12589268386363983,
0.09304971247911453,
0.06522144377231598,
-0.046285245567560196,
-0.011271243914961815,
-0.08316995948553085,
-0.18322397768497467,
0.15367135405540466,
0.02645072154700756,
-0.023927437141537666,
-0.1048317551612854,
-0.09475836902856827,
0.12502896785736084,
-0.07658235728740692,
0.10046371072530746,
-0.07910627126693726,
0.01353660598397255,
-0.10440919548273087,
-0.14336179196834564,
0.16278621554374695,
-0.1094912514090538,
-0.013093649409711361,
-0.07687623798847198,
0.10533767938613892,
-0.11038915812969208,
0.06483782082796097,
-0.00986416358500719,
0.07901265472173691,
-0.15780720114707947,
-0.05598217621445656,
0.04586689919233322,
-0.07911389321088791,
0.06263264268636703,
0.025845585390925407,
-0.04219656065106392,
-0.011793752200901508,
0.07394790649414062,
0.049299415200948715,
0.23513655364513397,
0.23025807738304138,
-0.10544830560684204,
0.1366138905286789,
0.11136489361524582,
-0.01137661375105381,
-0.3609326481819153,
-0.09062621742486954,
-0.16480356454849243,
0.027790041640400887,
0.06418738514184952,
-0.07783430069684982,
0.056974146515131,
-0.038508735597133636,
-0.07596040517091751,
0.07943204045295715,
-0.16055847704410553,
-0.09214849770069122,
0.21518586575984955,
-0.042186107486486435,
0.4521917700767517,
-0.10656812787055969,
-0.030645860359072685,
-0.01665211282670498,
-0.12203021347522736,
0.04576216638088226,
-0.042045433074235916,
0.10051035135984421,
-0.022584471851587296,
0.08909247070550919,
0.04318235069513321,
-0.08929072320461273,
0.13088437914848328,
-0.08299088478088379,
0.01258410606533289,
-0.12291601300239563,
-0.13526736199855804,
0.10009243339300156,
-0.04157940670847893,
-0.019031938165426254,
-0.02512298710644245,
-0.002436131238937378,
-0.0716470405459404,
0.009744895622134209,
-0.1215086579322815,
0.1240008994936943,
0.006744388025254011,
-0.06943678110837936,
-0.016564011573791504,
0.004025585483759642,
0.006819569505751133,
0.0005916405352763832,
0.20583884418010712,
-0.026668958365917206,
0.1982712298631668,
0.13076886534690857,
-0.01518801599740982,
-0.13593414425849915,
-0.07102048397064209,
0.013657873496413231,
-0.0759863629937172,
0.10298828780651093,
-0.059104785323143005,
0.03914390876889229,
0.09357528388500214,
-0.026357809081673622,
0.0382515974342823,
0.12117663770914078,
0.006824060808867216,
-0.017438286915421486,
0.17119427025318146,
-0.22347323596477509,
-0.002105974592268467,
-0.02485521510243416,
-0.012474271468818188,
0.051724232733249664,
0.01653975620865822,
0.09372277557849884,
0.009481724351644516,
-0.024638501927256584,
0.0012095279525965452,
-0.01719045825302601,
-0.07337337732315063,
0.02532159723341465,
0.0763181820511818,
0.0640103667974472,
-0.09319755434989929,
-0.022116515785455704,
0.019402600824832916,
-0.18742866814136505,
-0.0049998946487903595,
0.06983502209186554,
-0.07257737219333649,
-0.1438433825969696,
-0.03517717868089676,
0.03386753797531128,
-0.06339820474386215,
-0.003573950147256255,
-0.03833897039294243,
-0.1015959233045578,
0.033729035407304764,
0.20060980319976807,
0.12331750988960266,
0.06271950900554657,
-0.007443425245583057,
-0.006798196118324995,
0.02133343555033207,
-0.0006412708316929638,
0.005519035272300243,
0.04805190488696098,
-0.09671364724636078,
0.05400523915886879,
-0.020883671939373016,
0.159361332654953,
-0.11516035348176956,
-0.03602749481797218,
-0.16608001291751862,
-0.0025005992501974106,
-0.06620870530605316,
-0.12793685495853424,
-0.09089316427707672,
-0.10265116393566132,
0.022762583568692207,
-0.093812495470047,
-0.05636368691921234,
-0.04407127946615219,
-0.13413715362548828,
0.005441281013190746,
0.018367331475019455,
0.02155439928174019,
-0.07125702500343323,
-0.043369393795728683,
0.1418784111738205,
-0.04337143525481224,
0.0683596283197403,
0.09818147867918015,
-0.06203332170844078,
0.054137710481882095,
-0.04970165342092514,
-0.14185525476932526,
0.0840473622083664,
0.01764846034348011,
0.10236053913831711,
0.05411945655941963,
-0.014528910629451275,
0.019403595477342606,
0.06873858720064163,
0.04191399738192558,
0.055410150438547134,
-0.08811602741479874,
0.0727192685008049,
0.017829444259405136,
-0.17344309389591217,
-0.002812223043292761,
-0.09228700399398804,
0.09573064744472504,
0.0038106467109173536,
0.08692013472318649,
-0.018838319927453995,
0.07669402658939362,
-0.06372105330228806,
0.032385438680648804,
-0.04787842184305191,
-0.15474893152713776,
0.022027676925063133,
-0.031797122210264206,
0.02222929336130619,
-0.01781092956662178,
0.25592875480651855,
0.04199562966823578,
-0.028444714844226837,
0.04539623484015465,
0.08636938035488129,
0.006372279487550259,
0.002514956519007683,
0.1312417834997177,
0.0799771249294281,
-0.06958220154047012,
-0.0805487185716629,
0.08878789097070694,
0.024096166715025902,
-0.022522514685988426,
0.10551164299249649,
0.0890565887093544,
0.09588615596294403,
0.10439319163560867,
0.0029458487406373024,
-0.003767570713534951,
-0.15333908796310425,
-0.20982478559017181,
-0.03524164482951164,
0.07446170598268509,
-0.03912423551082611,
-0.011536002159118652,
0.14743657410144806,
-0.017466602846980095,
0.06339583545923233,
-0.06103375554084778,
0.003024832811206579,
-0.1707538366317749,
-0.11110015213489532,
-0.06665298342704773,
-0.07638658583164215,
-0.01133937668055296,
-0.025547748431563377,
0.015600880607962608,
0.14057202637195587,
0.019721798598766327,
-0.006055000703781843,
0.15045739710330963,
-0.002882585860788822,
-0.00872653629630804,
-0.0069918944500386715,
0.010790313594043255,
0.04433319717645645,
-0.031996894627809525,
-0.007406067568808794,
-0.15730082988739014,
0.011805969290435314,
-0.05616869032382965,
-0.003391900798305869,
-0.09486696124076843,
-0.008379851467907429,
-0.08778379112482071,
-0.12449922412633896,
-0.0818711593747139,
0.012598068453371525,
-0.034598805010318756,
0.1099323257803917,
-0.015518623404204845,
0.05860991030931473,
-0.02492854930460453,
0.17339323461055756,
-0.11348263919353485,
-0.06562087684869766,
-0.03767455369234085,
0.14412148296833038,
0.02393786795437336,
0.08941729366779327,
-0.04761238768696785,
0.011728709563612938,
-0.14790870249271393,
0.2699657380580902,
0.3681162893772125,
-0.07610306888818741,
0.09933292120695114,
0.06586647778749466,
0.021928440779447556,
0.0534493513405323,
0.07365147024393082,
0.09011058509349823,
0.2447095513343811,
-0.11538466066122055,
-0.027343424037098885,
-0.0548795610666275,
-0.0258488766849041,
-0.10267343372106552,
0.01651790551841259,
0.0354560911655426,
-0.046818505972623825,
-0.06981255859136581,
0.03282694146037102,
-0.14478671550750732,
0.09901022166013718,
0.0633462518453598,
-0.256929486989975,
-0.05686559900641441,
0.018117044121026993,
0.21250036358833313,
-0.010101517662405968,
0.12501320242881775,
-0.045949649065732956,
-0.08358869701623917,
0.029424646869301796,
0.001018420560285449,
-0.16109754145145416,
-0.03556673601269722,
0.13814355432987213,
-0.01659751683473587,
0.08334828913211823,
-0.03534523397684097,
-0.0032180710695683956,
0.10341408103704453,
0.07460443675518036,
-0.029920494183897972,
0.018643151968717575,
0.04702908173203468,
-0.1253814548254013,
-0.09715913981199265,
0.022846678271889687,
0.0022769933566451073,
-0.10901212692260742,
0.055909570306539536,
-0.19673795998096466,
0.053856540471315384,
-0.07197987288236618,
-0.00637035770341754,
0.003932078834623098,
0.019976330921053886,
-0.031013404950499535,
0.06418923288583755,
0.06631232798099518,
0.006334143690764904,
-0.046848539263010025,
-0.02244441583752632,
-0.04560748487710953,
0.05041050538420677,
-0.08738179504871368,
-0.18448606133460999,
-0.061912667006254196,
-0.04870956763625145,
0.02761106938123703,
-0.013057345524430275,
-0.12331314384937286,
-0.0649430900812149,
-0.04519079998135567,
0.0410563200712204,
-0.10278905928134918,
0.03630625829100609,
0.07308628410100937,
0.029482077807188034,
0.010941985063254833,
-0.04218148812651634,
0.029453212395310402,
0.04804936796426773,
-0.15652084350585938,
-0.07838112115859985
] |
null | null |
transformers
|
#Bert2Bert Turkish Paraphrase Generation
#INISTA 2021
#Comparison of Turkish Paraphrase Generation Models
#Dataset
The dataset used in model training was created with the combination of the translation of the QQP dataset and manually generated dataset.
Dataset [Link](https://drive.google.com/file/d/1-2l9EwIzXZ7fUkNW1vdeF3lzQp2pygp_/view?usp=sharing)
#How To Use
```python
from transformers import BertTokenizerFast,EncoderDecoderModel
tokenizer=BertTokenizerFast.from_pretrained("dbmdz/bert-base-turkish-cased")
model = EncoderDecoderModel.from_pretrained("ahmetbagci/bert2bert-turkish-paraphrase-generation")
text="son model arabalar çevreye daha mı az zarar veriyor?"
input_ids = tokenizer(text, return_tensors="pt").input_ids
output_ids = model.generate(input_ids)
print(tokenizer.decode(output_ids[0], skip_special_tokens=True))
#sample output
#son model arabalar çevre için daha az zararlı mı?
```
#Cite
```bibtex
@INPROCEEDINGS{9548335,
author={Bağcı, Ahmet and Amasyali, Mehmet Fatih},
booktitle={2021 International Conference on INnovations in Intelligent SysTems and Applications (INISTA)},
title={Comparison of Turkish Paraphrase Generation Models},
year={2021},
volume={},
number={},
pages={1-6},
doi={10.1109/INISTA52262.2021.9548335}
}
```
|
{"language": ["tr"], "tags": ["paraphrasing", "encoder-decoder", "seq2seq", "bert"]}
|
text2text-generation
|
ahmetbagci/bert2bert-turkish-paraphrase-generation
|
[
"transformers",
"pytorch",
"encoder-decoder",
"text2text-generation",
"paraphrasing",
"seq2seq",
"bert",
"tr",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"tr"
] |
TAGS
#transformers #pytorch #encoder-decoder #text2text-generation #paraphrasing #seq2seq #bert #tr #autotrain_compatible #endpoints_compatible #region-us
|
#Bert2Bert Turkish Paraphrase Generation
#INISTA 2021
#Comparison of Turkish Paraphrase Generation Models
#Dataset
The dataset used in model training was created with the combination of the translation of the QQP dataset and manually generated dataset.
Dataset Link
#How To Use
#Cite
|
[] |
[
"TAGS\n#transformers #pytorch #encoder-decoder #text2text-generation #paraphrasing #seq2seq #bert #tr #autotrain_compatible #endpoints_compatible #region-us \n"
] |
[
56
] |
[
"passage: TAGS\n#transformers #pytorch #encoder-decoder #text2text-generation #paraphrasing #seq2seq #bert #tr #autotrain_compatible #endpoints_compatible #region-us \n"
] |
[
0.004845081362873316,
0.0002728491963353008,
-0.007828525267541409,
0.010680540464818478,
0.16071753203868866,
0.0039472877979278564,
0.06263165175914764,
0.11872876435518265,
-0.05338850989937782,
-0.021664535626769066,
0.1134541854262352,
0.21414808928966522,
-0.016569195315241814,
0.044592730700969696,
-0.06656809151172638,
-0.24469858407974243,
0.06288204342126846,
0.07128794491291046,
0.04685601219534874,
0.10898637771606445,
0.08771263062953949,
-0.05385265126824379,
0.1025058925151825,
-0.012906919233500957,
-0.06534802913665771,
0.08990804105997086,
0.028823617845773697,
-0.06843987107276917,
0.09469900280237198,
0.05451532080769539,
0.09188804775476456,
0.01437271200120449,
-0.017063375562429428,
-0.20767809450626373,
0.02524842880666256,
0.007994375191628933,
-0.07973784953355789,
0.010717292316257954,
0.07289894670248032,
-0.11262629181146622,
0.045062363147735596,
0.054069433361291885,
-0.022969309240579605,
0.05531572178006172,
-0.12484965473413467,
0.007543416693806648,
-0.007263740058988333,
-0.016239654272794724,
0.133840411901474,
0.09748712927103043,
-0.022411121055483818,
0.07196155190467834,
-0.11342044174671173,
0.1410277932882309,
0.14379897713661194,
-0.3031107187271118,
0.010154986754059792,
-0.05483492091298103,
0.11157624423503876,
0.06324539333581924,
0.0012445903848856688,
0.024224260821938515,
0.03490225970745087,
0.03553759306669235,
-0.019735442474484444,
-0.08653558790683746,
-0.1334179788827896,
0.015268017537891865,
-0.09485769271850586,
-0.0615389421582222,
0.20487163960933685,
-0.0520436130464077,
0.04991563782095909,
0.015587209723889828,
-0.12766095995903015,
-0.06084996461868286,
-0.03415517881512642,
0.013176390901207924,
-0.04661725088953972,
0.09454452246427536,
0.008492567576467991,
-0.08331812918186188,
-0.13350994884967804,
-0.0505450963973999,
-0.19630612432956696,
0.0960407555103302,
0.040494199842214584,
0.03448736295104027,
-0.20914360880851746,
0.051119692623615265,
0.04455132037401199,
-0.08841093629598618,
0.039510369300842285,
-0.0547054223716259,
0.04713301733136177,
0.02773696556687355,
-0.08335597813129425,
-0.11719959229230881,
0.06363441050052643,
0.18010759353637695,
-0.006362884771078825,
0.050839778035879135,
-0.01957261562347412,
0.0866309404373169,
-0.027472173795104027,
0.07572730630636215,
-0.013005423359572887,
0.011176411993801594,
0.018104057759046555,
-0.12247093766927719,
0.00976922083646059,
-0.05073453113436699,
-0.15330177545547485,
-0.04201026260852814,
0.0590456984937191,
0.05853689834475517,
0.010609609074890614,
0.05669379234313965,
-0.06575535237789154,
0.03256218880414963,
0.03366225212812424,
-0.06619859486818314,
0.014570088125765324,
0.004458408337086439,
0.06373099237680435,
0.04431414231657982,
0.025355080142617226,
0.05624958500266075,
-0.15065811574459076,
0.04616856947541237,
-0.05448470264673233,
0.020392779260873795,
-0.006906809750944376,
-0.04472748562693596,
0.01405956968665123,
-0.1316072791814804,
0.032024312764406204,
-0.1435723900794983,
-0.13091638684272766,
-0.0022578968200832605,
-0.02458648569881916,
0.009322873316705227,
-0.01689017564058304,
-0.06906986236572266,
-0.032817207276821136,
0.05972059816122055,
-0.07263859361410141,
-0.07813576608896255,
-0.05520780757069588,
0.08921582996845245,
-0.033874306827783585,
0.05248427391052246,
-0.1540571004152298,
0.0545266717672348,
-0.13221530616283417,
-0.006438986398279667,
-0.04581965506076813,
0.07597299665212631,
0.02684781886637211,
0.07677193731069565,
-0.035279735922813416,
-0.034818340092897415,
-0.12690693140029907,
0.06671007722616196,
0.01556343398988247,
0.20130684971809387,
-0.16016200184822083,
-0.09580651670694351,
0.2785685658454895,
-0.08607883751392365,
-0.1101909652352333,
0.12652680277824402,
0.02692471258342266,
-0.018006665632128716,
0.06496124714612961,
0.20681574940681458,
0.03627888485789299,
0.0042830645106732845,
0.0425102673470974,
0.161228746175766,
-0.08207247406244278,
-0.0849185436964035,
0.04781140759587288,
-0.05294511467218399,
-0.021426845341920853,
0.026271678507328033,
0.09242997318506241,
0.057539235800504684,
-0.044197481125593185,
-0.053296755999326706,
-0.029932653531432152,
0.007269647903740406,
0.0752902403473854,
0.003070630133152008,
0.08448686450719833,
-0.05973618105053902,
-0.030826477333903313,
0.05487152561545372,
-0.01982274278998375,
-0.04384920373558998,
0.08769723773002625,
-0.08217164874076843,
0.06658486276865005,
-0.0019089991692453623,
0.036552295088768005,
-0.18118254840373993,
-0.000029555707442341372,
-0.05756330490112305,
0.13930374383926392,
-0.009050858207046986,
0.050073228776454926,
0.05699343979358673,
-0.04643202945590019,
0.00539046386256814,
-0.030545871704816818,
0.11347368359565735,
0.0107181491330266,
-0.0974782183766365,
-0.03376417234539986,
0.0507480762898922,
-0.042238298803567886,
-0.013321234844624996,
0.02301943115890026,
0.041088636964559555,
0.053858738392591476,
0.1423664689064026,
-0.009652686305344105,
0.019579824060201645,
-0.03413048014044762,
0.06393551081418991,
-0.06755086034536362,
0.04864683374762535,
0.09276897460222244,
0.0013613988412544131,
-0.08906413614749908,
0.20791977643966675,
-0.14701223373413086,
0.17909643054008484,
0.1700255423784256,
-0.2922540605068207,
0.03553265333175659,
0.006000861059874296,
-0.026919497177004814,
0.020151330158114433,
0.047735795378685,
-0.07223079353570938,
0.16197308897972107,
0.0016102083027362823,
0.20199686288833618,
-0.06337828934192657,
-0.049662996083498,
-0.011365771293640137,
-0.1059560775756836,
-0.022237006574869156,
0.10119616240262985,
0.08618547022342682,
-0.10932020843029022,
0.15955230593681335,
0.21906130015850067,
-0.0015114833367988467,
0.15171851217746735,
-0.019976651296019554,
-0.03767221048474312,
0.06880281120538712,
0.006887360475957394,
-0.045248258858919144,
-0.1143995150923729,
-0.20749381184577942,
-0.024023691192269325,
0.030322138220071793,
0.039903681725263596,
0.10870398581027985,
-0.12409032136201859,
-0.02085990458726883,
0.00890937726944685,
-0.025983814150094986,
-0.041871245950460434,
0.054262373596429825,
0.04900028929114342,
0.09247048199176788,
-0.021312957629561424,
-0.0428536981344223,
0.09515873342752457,
0.009317893534898758,
-0.12360928952693939,
0.18976560235023499,
-0.17723055183887482,
-0.26349782943725586,
-0.20493856072425842,
-0.08565624803304672,
-0.013715104199945927,
0.06595149636268616,
0.16881190240383148,
-0.11056781560182571,
-0.007353850174695253,
-0.024011170491576195,
0.00006276468775467947,
-0.058590129017829895,
0.030522894114255905,
-0.021338827908039093,
0.019587397575378418,
-0.019215131178498268,
-0.05657161772251129,
-0.044816307723522186,
-0.0009115244029089808,
-0.06638089567422867,
0.09355100989341736,
-0.13377170264720917,
0.11364927887916565,
0.14344526827335358,
0.015565824694931507,
0.041849445551633835,
-0.024656541645526886,
0.22077099978923798,
-0.05025288835167885,
-0.048158541321754456,
0.22733135521411896,
-0.11327419430017471,
0.09031419456005096,
0.10333830863237381,
-0.017216317355632782,
-0.055831193923950195,
0.020092420279979706,
-0.005704396869987249,
-0.08630374819040298,
-0.20368331670761108,
-0.09038812667131424,
-0.09913022071123123,
0.08070100098848343,
0.0018782188417389989,
0.056442081928253174,
0.04211294278502464,
0.06266893446445465,
-0.023020049557089806,
-0.031120598316192627,
0.030311772599816322,
0.060645293444395065,
0.1862955391407013,
-0.030792491510510445,
0.13414587080478668,
-0.04735132306814194,
-0.13997696340084076,
0.07394035905599594,
0.08092352002859116,
0.009554372169077396,
0.06739094853401184,
0.055529363453388214,
0.024772990494966507,
0.048190221190452576,
0.08114925026893616,
0.11207245290279388,
-0.025968877598643303,
0.007442468777298927,
-0.017376840114593506,
0.005651891231536865,
-0.06316079199314117,
0.041323717683553696,
0.031457483768463135,
-0.13697461783885956,
-0.09628698229789734,
0.005394719075411558,
0.11061844229698181,
0.14363114535808563,
0.08028580993413925,
-0.18250198662281036,
0.011639295145869255,
0.09355757385492325,
-0.04384797066450119,
-0.08311349898576736,
0.1027570590376854,
0.03747688606381416,
-0.07409238815307617,
0.04939752444624901,
0.006847855634987354,
0.1125548705458641,
-0.03943830356001854,
0.11748991161584854,
-0.03501161187887192,
-0.1180211752653122,
0.048893097788095474,
0.09509912878274918,
-0.2621147334575653,
0.15918035805225372,
0.010385052300989628,
-0.03523145988583565,
-0.05776597559452057,
0.00810237880796194,
-0.030392702668905258,
0.10818704217672348,
0.17721658945083618,
0.004940577782690525,
-0.043279655277729034,
-0.15712031722068787,
-0.017658662050962448,
0.021778671070933342,
0.1717696636915207,
-0.025880569592118263,
0.007893223315477371,
-0.07135223597288132,
0.0019023120403289795,
-0.021775655448436737,
-0.05029372498393059,
0.03172405809164047,
-0.172221377491951,
0.04511832818388939,
0.04574202746152878,
0.05580786243081093,
0.006354114972054958,
0.004508171696215868,
-0.07022908329963684,
0.17534133791923523,
-0.16703204810619354,
-0.09688938409090042,
-0.09556786715984344,
-0.12097139656543732,
0.03340103477239609,
-0.10381569713354111,
0.05709338188171387,
-0.03751685470342636,
-0.0052117411978542805,
-0.057017732411623,
-0.1850176900625229,
0.1024027168750763,
-0.08580627292394638,
-0.03681233152747154,
-0.021999498829245567,
0.17754706740379333,
-0.068831667304039,
0.019069813191890717,
0.05174160748720169,
0.01318694930523634,
-0.08803199976682663,
-0.07589277625083923,
-0.02301132306456566,
-0.03330245241522789,
0.0875253900885582,
0.043610114604234695,
-0.09760557115077972,
0.021436329931020737,
-0.013868353329598904,
-0.006453867070376873,
0.3028523623943329,
0.16622884571552277,
-0.04453949257731438,
0.12459202855825424,
0.1280694454908371,
-0.0855848491191864,
-0.3617960214614868,
-0.17553606629371643,
-0.07604910433292389,
-0.03173697367310524,
-0.009704782627522945,
-0.13393063843250275,
0.09137072414159775,
0.022549638524651527,
-0.003524947911500931,
-0.06139049306511879,
-0.25616851449012756,
-0.0934155285358429,
0.17247842252254486,
-0.0033609054517000914,
0.2987949252128601,
-0.1238507553935051,
-0.10225887596607208,
-0.07689310610294342,
-0.1517038643360138,
0.08001170307397842,
-0.08184065669775009,
0.07470013201236725,
0.0061724442057311535,
0.042427919805049896,
0.034566592425107956,
-0.02901797741651535,
0.06943128257989883,
0.027249569073319435,
-0.002321473089978099,
-0.0793585553765297,
-0.021955354139208794,
-0.00226952345110476,
0.00881360936909914,
0.016383133828639984,
-0.09285938739776611,
0.04900795966386795,
-0.16589437425136566,
-0.018517563119530678,
-0.08545958250761032,
0.03763395920395851,
0.02893701009452343,
-0.032863687723875046,
0.016923103481531143,
-0.09276173263788223,
0.03969782963395119,
0.013706323690712452,
0.22845342755317688,
-0.08281946182250977,
0.11513824015855789,
0.16028407216072083,
0.1745736002922058,
-0.13817501068115234,
0.04411815106868744,
-0.03438113257288933,
-0.07499592751264572,
0.03534238785505295,
-0.007365957833826542,
0.1076551228761673,
0.10979242622852325,
-0.04340844973921776,
0.07006490975618362,
0.12300717085599899,
0.050982244312763214,
0.01728571578860283,
0.14610151946544647,
-0.20604033768177032,
-0.06160685047507286,
-0.0843583419919014,
-0.05962710827589035,
0.03411350026726723,
0.09679090976715088,
0.18228323757648468,
0.05838910862803459,
-0.03528478369116783,
-0.02219984494149685,
0.026124870404601097,
-0.074155792593956,
0.07647480070590973,
0.03645804896950722,
0.011481649242341518,
-0.12456534802913666,
0.049823686480522156,
-0.02195066586136818,
-0.10576183348894119,
0.04617385193705559,
0.1396186500787735,
-0.1140972450375557,
-0.11539649963378906,
-0.05134638771414757,
0.1324792206287384,
-0.16131268441677094,
-0.0532449372112751,
-0.10866648703813553,
-0.09755245596170425,
0.06694347411394119,
0.1934119015932083,
0.07861163467168808,
0.08768338710069656,
-0.0273879524320364,
-0.020480824634432793,
-0.009826128371059895,
0.01234008651226759,
0.025246931239962578,
0.014810888096690178,
-0.07990819960832596,
0.07269323617219925,
-0.05891767889261246,
0.17092666029930115,
-0.07806810736656189,
-0.01503763161599636,
-0.1451093852519989,
0.0549796000123024,
-0.13492430746555328,
-0.05633359029889107,
-0.09448296576738358,
-0.05187658220529556,
0.01910140924155712,
-0.01596044935286045,
-0.04528241604566574,
-0.04096253961324692,
-0.09480970352888107,
-0.007061055861413479,
-0.041727013885974884,
0.033036474138498306,
-0.1086711511015892,
0.019552119076251984,
0.10137558728456497,
-0.04316801205277443,
0.08796905726194382,
0.12070361524820328,
-0.07260937988758087,
0.1156395897269249,
-0.08972886949777603,
-0.10124751925468445,
0.12571844458580017,
0.017626050859689713,
0.012262154370546341,
0.17328138649463654,
0.007071465253829956,
0.08877817541360855,
-0.0004373614501673728,
0.02645450085401535,
0.1193930134177208,
-0.08956964313983917,
0.07401727139949799,
-0.04314511641860008,
-0.1223457083106041,
-0.06958146393299103,
-0.02253117971122265,
0.07935694605112076,
0.060577407479286194,
0.15098971128463745,
-0.06530127674341202,
0.0857890397310257,
-0.038088489323854446,
0.040122948586940765,
0.03817689046263695,
-0.13946357369422913,
-0.03992791101336479,
-0.11437313258647919,
0.016100646927952766,
-0.009270058013498783,
0.17478744685649872,
0.0036741504445672035,
0.0920478031039238,
0.03904896229505539,
0.06191297993063927,
0.013702518306672573,
0.034789808094501495,
0.20086100697517395,
0.08585512638092041,
-0.07768278568983078,
-0.11732643097639084,
0.05183850973844528,
0.02103358320891857,
0.015726614743471146,
0.09394960850477219,
0.061700742691755295,
-0.02006184309720993,
0.12472162395715714,
-0.006797176785767078,
0.052109915763139725,
-0.15330709517002106,
-0.17748017609119415,
-0.017934568226337433,
0.0429917611181736,
0.0012460291618481278,
0.1098814457654953,
0.16454724967479706,
-0.02584347501397133,
0.018722696229815483,
0.007262864615768194,
-0.053192250430583954,
-0.22037261724472046,
-0.06343018263578415,
-0.09372175484895706,
-0.09826881438493729,
0.010141788050532341,
-0.10602886229753494,
0.023597747087478638,
0.04270908981561661,
0.028305742889642715,
-0.027956346049904823,
0.1092296913266182,
0.02565075270831585,
-0.08157332986593246,
0.011348811909556389,
-0.04890153184533119,
0.08117170631885529,
0.03432215750217438,
-0.05511714145541191,
-0.02551085688173771,
-0.004077146761119366,
0.006130958441644907,
0.03619496896862984,
-0.06750660389661789,
0.004363180138170719,
-0.1537816822528839,
-0.09963921457529068,
-0.04402518272399902,
0.07204726338386536,
-0.043879054486751556,
0.11826331913471222,
0.03945372626185417,
-0.02197188511490822,
0.03665849193930626,
0.16134540736675262,
-0.09373094886541367,
-0.07082531601190567,
-0.061131540685892105,
0.20321504771709442,
0.10497711598873138,
0.1471145898103714,
-0.013286313973367214,
-0.05159538611769676,
-0.09259537607431412,
0.3223220705986023,
0.19066599011421204,
-0.10779967159032822,
0.04194958508014679,
0.03885580599308014,
0.055682819336652756,
0.13212546706199646,
0.07176600396633148,
0.10774799436330795,
0.24198667705059052,
-0.0850115641951561,
-0.022384438663721085,
-0.018046174198389053,
-0.017486320808529854,
-0.11989085376262665,
0.09690949320793152,
-0.00031089011463336647,
-0.03331909328699112,
-0.021639423444867134,
0.07105792313814163,
-0.22307631373405457,
0.12697027623653412,
-0.09167250245809555,
-0.1366548389196396,
-0.0039794291369616985,
0.010581991635262966,
0.14699189364910126,
0.010531964711844921,
0.06783802062273026,
0.00920537393540144,
-0.0970349982380867,
0.05769004300236702,
0.03405383974313736,
-0.19634275138378143,
0.03129531070590019,
0.01826382428407669,
-0.1292518675327301,
-0.012508087791502476,
-0.014223290607333183,
0.0012823810102418065,
0.05514613166451454,
0.08393192291259766,
-0.004228282254189253,
0.09098393470048904,
-0.008708415552973747,
0.00917426124215126,
-0.002861109795048833,
0.020068461075425148,
-0.008357629179954529,
-0.09112586826086044,
0.019638797268271446,
-0.17382124066352844,
0.0536687858402729,
-0.04810817912220955,
-0.055506207048892975,
0.006670569069683552,
0.0007486522081308067,
-0.0797073170542717,
0.04620343819260597,
0.06844745576381683,
0.010719320736825466,
0.006762451026588678,
-0.029975656419992447,
-0.05427056550979614,
-0.017081810161471367,
-0.0930955708026886,
-0.06901969760656357,
-0.11528806388378143,
-0.07885977625846863,
0.12976893782615662,
0.011557414196431637,
-0.13792459666728973,
0.008182594552636147,
-0.08484403043985367,
0.06415541470050812,
-0.16331599652767181,
0.07905606180429459,
0.10163272172212601,
-0.010096952319145203,
-0.014980274252593517,
-0.07861216366291046,
0.06902386248111725,
0.09388373792171478,
-0.08484313637018204,
-0.10015341639518738
] |
null | null |
transformers
|
## Albert xxlarge version 1 language model fine-tuned on SQuAD2.0
### (updated 30Sept2020) with the following results:
```
exact: 86.11134506864315
f1: 89.35371214945009
total': 11873
HasAns_exact': 83.56950067476383
HasAns_f1': 90.06353312254078
HasAns_total': 5928
NoAns_exact': 88.64592094196804
NoAns_f1': 88.64592094196804
NoAns_total': 5945
best_exact': 86.11134506864315
best_exact_thresh': 0.0
best_f1': 89.35371214944985
best_f1_thresh': 0.0
```
### from script:
```
python ${EXAMPLES}/run_squad.py \
--model_type albert \
--model_name_or_path albert-xxlarge-v1 \
--do_train \
--do_eval \
--train_file ${SQUAD}/train-v2.0.json \
--predict_file ${SQUAD}/dev-v2.0.json \
--version_2_with_negative \
--do_lower_case \
--num_train_epochs 3 \
--max_steps 8144 \
--warmup_steps 814 \
--learning_rate 3e-5 \
--max_seq_length 512 \
--doc_stride 128 \
--per_gpu_train_batch_size 6 \
--gradient_accumulation_steps 8 \
--per_gpu_eval_batch_size 48 \
--fp16 \
--fp16_opt_level O1 \
--threads 12 \
--logging_steps 50 \
--save_steps 3000 \
--overwrite_output_dir \
--output_dir ${MODEL_PATH}
```
### using the following software & system:
```
Transformers: 3.1.0
PyTorch: 1.6.0
TensorFlow: 2.3.1
Python: 3.8.1
OS: Linux-5.4.0-48-generic-x86_64-with-glibc2.10
CPU/GPU: Intel i9-9900K / NVIDIA Titan RTX 24GB
```
|
{}
|
question-answering
|
ahotrod/albert_xxlargev1_squad2_512
|
[
"transformers",
"pytorch",
"tf",
"albert",
"question-answering",
"endpoints_compatible",
"has_space",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tf #albert #question-answering #endpoints_compatible #has_space #region-us
|
## Albert xxlarge version 1 language model fine-tuned on SQuAD2.0
### (updated 30Sept2020) with the following results:
### from script:
### using the following software & system:
|
[
"## Albert xxlarge version 1 language model fine-tuned on SQuAD2.0",
"### (updated 30Sept2020) with the following results:",
"### from script:",
"### using the following software & system:"
] |
[
"TAGS\n#transformers #pytorch #tf #albert #question-answering #endpoints_compatible #has_space #region-us \n",
"## Albert xxlarge version 1 language model fine-tuned on SQuAD2.0",
"### (updated 30Sept2020) with the following results:",
"### from script:",
"### using the following software & system:"
] |
[
37,
18,
15,
5,
9
] |
[
"passage: TAGS\n#transformers #pytorch #tf #albert #question-answering #endpoints_compatible #has_space #region-us \n## Albert xxlarge version 1 language model fine-tuned on SQuAD2.0### (updated 30Sept2020) with the following results:### from script:### using the following software & system:"
] |
[
-0.07586903125047684,
0.03650606796145439,
-0.0037555161397904158,
0.03740644454956055,
0.06168130412697792,
-0.0006175166345201433,
0.03890785202383995,
0.14078174531459808,
0.0958026871085167,
0.050697848200798035,
0.1726243644952774,
0.07404335588216782,
-0.0009308906155638397,
0.14338457584381104,
-0.06524035334587097,
-0.2516421675682068,
0.011649022810161114,
0.04437495023012161,
-0.060664910823106766,
0.106917604804039,
0.06940107047557831,
-0.07000034302473068,
0.08878721296787262,
-0.01046394556760788,
-0.10811683535575867,
0.04101170226931572,
-0.002197427209466696,
-0.04927251487970352,
0.07436186075210571,
0.09336429834365845,
0.10660447180271149,
0.06791605055332184,
0.0008490407490171492,
-0.14552445709705353,
0.02835039235651493,
-0.02875073440372944,
-0.04211652651429176,
0.06080438941717148,
0.07824598252773285,
-0.03446197882294655,
0.1491265743970871,
-0.0951915830373764,
-0.04635770246386528,
0.01413736492395401,
-0.12164533138275146,
-0.11869026720523834,
-0.06333017349243164,
0.04662739112973213,
0.09054625779390335,
0.07339628040790558,
-0.028740085661411285,
0.13555915653705597,
-0.03826314955949783,
0.0969940647482872,
0.28265297412872314,
-0.41137874126434326,
-0.06606868654489517,
0.09830333292484283,
0.12790505588054657,
0.0639878436923027,
-0.010730907320976257,
0.10268599539995193,
0.05362631380558014,
0.019331926479935646,
0.038107775151729584,
-0.12111760675907135,
-0.08621453493833542,
0.04178795963525772,
-0.12910473346710205,
-0.07076262682676315,
0.20507334172725677,
0.014649846591055393,
-0.0021144095808267593,
-0.04023617133498192,
-0.10638526827096939,
-0.09440790116786957,
-0.05405588075518608,
-0.057328443974256516,
0.022815218195319176,
-0.0015234735328704119,
0.07587137073278427,
-0.1272287219762802,
-0.12488378584384918,
-0.037066251039505005,
-0.13485443592071533,
0.30316850543022156,
-0.004870964679867029,
0.08535795658826828,
-0.1702100783586502,
0.03128765895962715,
-0.12588317692279816,
-0.13000856339931488,
-0.013708450831472874,
-0.0520692877471447,
0.0025092163123190403,
0.06276749074459076,
-0.1234218180179596,
0.050558824092149734,
0.03795298561453819,
0.07892081886529922,
-0.027069803327322006,
0.04588374122977257,
0.045753125101327896,
0.07353824377059937,
0.010978111065924168,
0.25253498554229736,
-0.11953744292259216,
-0.07812757790088654,
0.003381937276571989,
0.012457800097763538,
0.03803728148341179,
-0.039557814598083496,
-0.1674003303050995,
-0.09839864820241928,
0.050702087581157684,
0.06800978630781174,
0.018878284841775894,
0.02165530063211918,
0.002658240497112274,
-0.029783470556139946,
0.07127293944358826,
-0.09267070144414902,
0.012501736171543598,
-0.0034941656049340963,
-0.04621240496635437,
0.13187353312969208,
0.029367800801992416,
-0.022776158526539803,
-0.07078300416469574,
-0.01141235139220953,
-0.054989591240882874,
0.04402472451329231,
-0.03798949345946312,
-0.09768825024366379,
0.007056546863168478,
-0.06460490822792053,
0.03826185688376427,
-0.14468435943126678,
-0.015174973756074905,
0.02478296123445034,
0.04921591654419899,
-0.02679409086704254,
0.008763421326875687,
-0.015031141228973866,
-0.06423119455575943,
0.04849320650100708,
-0.035254158079624176,
0.0459616519510746,
-0.034512974321842194,
0.018663670867681503,
0.01569143868982792,
0.10248880088329315,
-0.19511698186397552,
0.05470039322972298,
-0.01139252632856369,
0.030538272112607956,
-0.0610835924744606,
0.019534768536686897,
-0.01577506773173809,
-0.05381547287106514,
-0.05337730795145035,
-0.12507279217243195,
-0.018871376290917397,
0.04574558138847351,
0.026826336979866028,
0.15378320217132568,
-0.19258418679237366,
-0.019515981897711754,
0.2046637386083603,
0.0047929733991622925,
-0.16891981661319733,
0.1234346255660057,
0.03149409964680672,
-0.10153770446777344,
0.003920409362763166,
0.13786479830741882,
0.004324927926063538,
-0.12408950924873352,
0.008427434600889683,
0.14970237016677856,
-0.009177437983453274,
-0.014992275275290012,
0.11328180879354477,
0.011628083884716034,
-0.01922578737139702,
0.03032757341861725,
-0.08715038746595383,
0.01495152898132801,
-0.04690619930624962,
-0.04952118173241615,
-0.042904529720544815,
-0.04878688603639603,
0.06065329909324646,
0.021408552303910255,
0.09232151508331299,
-0.08466671407222748,
-0.07505639642477036,
-0.06812064349651337,
0.04817692190408707,
0.00346902827732265,
0.04184531420469284,
-0.10052044689655304,
0.12093023210763931,
-0.07300429791212082,
0.041023414582014084,
-0.20413987338542938,
-0.027027612552046776,
-0.005386564880609512,
0.052451811730861664,
0.006237764377146959,
0.22852823138237,
0.08755779266357422,
-0.04403378441929817,
-0.049200449138879776,
-0.024405434727668762,
-0.00013669308100361377,
-0.03304106742143631,
-0.07136444002389908,
-0.02511388622224331,
-0.03233475983142853,
-0.09131364524364471,
0.05171962082386017,
-0.028466081246733665,
0.0008226759964600205,
-0.005970607977360487,
0.03057856857776642,
-0.0018976905848830938,
0.05291813239455223,
-0.004620775114744902,
0.03739343211054802,
-0.04164726659655571,
-0.011807672679424286,
0.045857883989810944,
-0.024555528536438942,
-0.11100476235151291,
0.1295652836561203,
-0.13888879120349884,
0.19128315150737762,
0.19449034333229065,
-0.14132647216320038,
0.040138714015483856,
0.07988393306732178,
-0.035753242671489716,
0.040074098855257034,
0.0712767094373703,
-0.018775686621665955,
0.13134858012199402,
-0.0018594132270663977,
0.1435546875,
-0.06627128273248672,
-0.021863246336579323,
0.00014229898806661367,
-0.06236639618873596,
-0.06598338484764099,
0.06449632346630096,
-0.039112746715545654,
-0.1110890582203865,
0.08594348281621933,
0.3147313594818115,
-0.05038132518529892,
0.09685035794973373,
0.027703672647476196,
-0.07916542142629623,
-0.045749448239803314,
0.009456837549805641,
-0.013517354615032673,
0.05088719725608826,
-0.09632982313632965,
-0.07776643335819244,
0.06695646047592163,
-0.016598675400018692,
0.06742678582668304,
-0.10825519263744354,
-0.024804752320051193,
0.024440087378025055,
-0.04430478811264038,
-0.05048227310180664,
0.10511817783117294,
0.013203329406678677,
0.10799197107553482,
0.019641153514385223,
-0.05240531265735626,
-0.009692658670246601,
-0.026264185085892677,
-0.05648864805698395,
0.15040747821331024,
-0.10327082127332687,
-0.23918452858924866,
-0.10445954650640488,
-0.02449880726635456,
-0.06821322441101074,
0.028801141306757927,
0.05557854101061821,
-0.03087199665606022,
-0.0008683076011948287,
-0.049170125275850296,
0.0012233176967129111,
-0.10647010803222656,
0.009096470661461353,
0.02257896587252617,
-0.02756568044424057,
-0.018419574946165085,
-0.08856542408466339,
-0.06913802027702332,
-0.040025658905506134,
-0.06627077609300613,
0.08336830884218216,
-0.10521697998046875,
0.106504887342453,
0.135736882686615,
-0.05127139389514923,
0.07523041218519211,
-0.020508745685219765,
0.19945086538791656,
-0.06368058174848557,
-0.03755132108926773,
0.16141049563884735,
0.008184158243238926,
0.05058492347598076,
0.05008108541369438,
0.0260617695748806,
-0.07511795312166214,
-0.019159670919179916,
-0.04207717254757881,
-0.09844988584518433,
-0.18838457763195038,
-0.095292828977108,
-0.115293487906456,
0.04954928159713745,
0.035976096987724304,
0.011238695122301579,
0.022412100806832314,
0.03138462081551552,
-0.0021115238778293133,
0.013587250374257565,
-0.05649055168032646,
0.04421348497271538,
0.0650835633277893,
-0.0006229397258721292,
0.12328342348337173,
-0.04792530834674835,
-0.05258791148662567,
0.09769096225500107,
0.0173141211271286,
0.11889318376779556,
0.07820355892181396,
0.058401186019182205,
0.014489750377833843,
0.08174394816160202,
0.08245477825403214,
0.12766437232494354,
0.043621331453323364,
-0.032237377017736435,
-0.038008853793144226,
-0.05527582764625549,
-0.08642219007015228,
0.10658075660467148,
0.1318245381116867,
-0.06607568264007568,
-0.06801585853099823,
-0.02800571732223034,
0.04811027646064758,
0.12944747507572174,
0.08739766478538513,
-0.14230398833751678,
0.0072672548703849316,
0.08496357500553131,
0.013382069766521454,
-0.06827103346586227,
0.07608155161142349,
0.07608453184366226,
-0.06126488372683525,
0.04906746745109558,
-0.020406346768140793,
0.13009226322174072,
0.08546040952205658,
0.036627333611249924,
-0.1052442193031311,
-0.08514861762523651,
0.07604600489139557,
0.08777521550655365,
-0.28187891840934753,
0.29246363043785095,
0.029045797884464264,
-0.06052517518401146,
-0.044590163975954056,
0.005463576875627041,
0.05773370340466499,
0.12286359816789627,
0.20930182933807373,
0.01671302132308483,
-0.017293758690357208,
-0.08860839903354645,
-0.03785743936896324,
0.02763565070927143,
0.13412882387638092,
-0.03608671575784683,
0.0515885055065155,
-0.004626862239092588,
-0.04797092825174332,
-0.0103002255782485,
0.13699008524417877,
-0.040814973413944244,
-0.13533444702625275,
0.0736459419131279,
0.04143885150551796,
-0.07891115546226501,
-0.043080803006887436,
-0.07794147729873657,
-0.14183001220226288,
0.06841737776994705,
0.05739346146583557,
-0.04181801900267601,
-0.08801942318677902,
0.0007644969155080616,
0.10429713129997253,
-0.11768560111522675,
0.019553272053599358,
-0.03966210409998894,
0.04960104450583458,
-0.023491589352488518,
-0.15503822267055511,
0.09991095215082169,
-0.10394435375928879,
-0.02209208905696869,
0.018988139927387238,
0.165475532412529,
-0.08119064569473267,
0.044284023344516754,
0.0036688032560050488,
0.050920020788908005,
-0.07300182431936264,
-0.11983614414930344,
0.012157855555415154,
0.0033611496910452843,
0.02698497287929058,
0.03566693514585495,
-0.025691773742437363,
-0.023030845448374748,
-0.014407068490982056,
0.06648369133472443,
0.2284078299999237,
0.19007359445095062,
-0.11267857253551483,
-0.010514749214053154,
0.061157483607530594,
0.0000818643529783003,
-0.2600758373737335,
-0.10118333250284195,
-0.014124931767582893,
0.03185349702835083,
0.01901385374367237,
-0.09242414683103561,
0.0001871994318207726,
-0.006042552646249533,
-0.0288754403591156,
0.049717094749212265,
-0.1981719583272934,
-0.08171789348125458,
0.13168580830097198,
0.03775528445839882,
0.25591209530830383,
-0.1727968156337738,
-0.04687140882015228,
0.0034865420311689377,
-0.24751955270767212,
0.03697085753083229,
-0.049571093171834946,
0.06823648512363434,
-0.053720276802778244,
0.05415711551904678,
0.018278924748301506,
-0.04244176670908928,
0.1687687337398529,
-0.0435977503657341,
0.0018917617853730917,
-0.03109855391085148,
-0.14918692409992218,
0.07422718405723572,
-0.05232538282871246,
0.08568060398101807,
-0.10008052736520767,
0.046939581632614136,
-0.1670607477426529,
-0.0130906468257308,
-0.12200570106506348,
0.04512142390012741,
-0.042460937052965164,
-0.033769916743040085,
0.0033252392895519733,
-0.039258308708667755,
-0.005116646643728018,
0.017418701201677322,
0.19908595085144043,
-0.07259572297334671,
0.05672381818294525,
0.1527048498392105,
0.09240248054265976,
-0.12801040709018707,
0.004068012814968824,
0.011338209733366966,
-0.029217896983027458,
0.08013700693845749,
-0.161407470703125,
0.05856810882687569,
0.1524648219347,
0.017786994576454163,
0.06229088082909584,
0.07832258939743042,
0.01132233440876007,
-0.007590889930725098,
0.06683775037527084,
-0.17408166825771332,
-0.20348019897937775,
-0.08752569556236267,
0.07498585432767868,
0.025301344692707062,
0.020330410450696945,
0.09240742027759552,
-0.015114531852304935,
-0.04197850450873375,
-0.03291275352239609,
-0.05381539463996887,
-0.05332653969526291,
0.03049592114984989,
0.07067351788282394,
0.053824279457330704,
-0.10107792168855667,
0.0151131022721529,
0.0036957513075321913,
-0.15181681513786316,
0.00031284065335057676,
0.1291142702102661,
-0.057429976761341095,
-0.12679365277290344,
-0.019161030650138855,
0.14312142133712769,
-0.09144876897335052,
-0.03951602801680565,
-0.0409332811832428,
-0.05535450577735901,
0.03669577091932297,
0.11991319060325623,
0.10896025598049164,
0.0040634446777403355,
-0.03429010137915611,
-0.024470705538988113,
0.03615342453122139,
0.05654876306653023,
0.008611682802438736,
0.011910421773791313,
-0.07069722563028336,
-0.00975780375301838,
-0.0389694906771183,
0.1831088811159134,
-0.07554256916046143,
-0.027909521013498306,
-0.04102615639567375,
0.013679570518434048,
-0.23437127470970154,
-0.005271589383482933,
-0.09075140208005905,
-0.05165372043848038,
-0.03670154884457588,
-0.09533677995204926,
-0.1103314533829689,
0.0031957952305674553,
-0.1350729912519455,
-0.03324192389845848,
-0.06275558471679688,
0.011903696693480015,
-0.06356063485145569,
-0.049665484577417374,
0.04949470981955528,
-0.05202845484018326,
0.07826431840658188,
0.10992700606584549,
-0.04478399455547333,
0.11704673618078232,
-0.011353542096912861,
-0.11354723572731018,
0.021910209208726883,
0.08465814590454102,
0.10592123866081238,
-0.025358282029628754,
0.021101217716932297,
0.0250909011811018,
0.053991906344890594,
-0.011611459776759148,
0.024608174338936806,
-0.05025462433695793,
-0.03867759555578232,
-0.01867913268506527,
-0.06347827613353729,
-0.05840790644288063,
0.03209740296006203,
0.10555750131607056,
0.09782401472330093,
0.06421969085931778,
-0.00561006274074316,
0.0696449875831604,
-0.10133211314678192,
0.02338341437280178,
-0.013371767476201057,
-0.09797919541597366,
-0.026567958295345306,
-0.08733246475458145,
0.04504215717315674,
-0.0400717556476593,
0.1578921377658844,
-0.042182520031929016,
0.09841296076774597,
0.035380102694034576,
-0.010205551981925964,
0.059153858572244644,
0.010112087242305279,
0.23061810433864594,
0.050423502922058105,
-0.04811764508485794,
-0.040179431438446045,
0.04931846633553505,
0.040537625551223755,
0.13057951629161835,
0.17794868350028992,
0.12403159588575363,
0.06083644926548004,
0.17079530656337738,
-0.02851344645023346,
-0.04107028245925903,
-0.13267108798027039,
-0.17605307698249817,
-0.02213176339864731,
0.06355345994234085,
-0.0367174930870533,
0.08535629510879517,
0.13038936257362366,
-0.10320479422807693,
0.06490049511194229,
-0.05345601961016655,
-0.026266731321811676,
-0.13281244039535522,
-0.0007880352786742151,
-0.11497034132480621,
-0.09342210739850998,
-0.021065954118967056,
-0.1304965615272522,
0.044684410095214844,
0.05552928149700165,
0.044057682156562805,
-0.0050604138523340225,
0.1251164823770523,
-0.10305202752351761,
-0.07213675230741501,
0.0940035954117775,
-0.007499117869883776,
0.08272980153560638,
0.06569769978523254,
0.029001425951719284,
-0.040025290101766586,
-0.07847937196493149,
0.023808255791664124,
0.031433410942554474,
-0.0817255973815918,
0.008992360904812813,
-0.09955991059541702,
-0.07732182741165161,
-0.0605025477707386,
0.03342682868242264,
0.04563978686928749,
0.14777135848999023,
-0.003649134421721101,
-0.013142647221684456,
0.0043534631840884686,
0.2043370008468628,
-0.12503594160079956,
-0.04373634606599808,
-0.06998267769813538,
0.1804114133119583,
0.06386622041463852,
0.10126819461584091,
-0.03906886279582977,
0.003889900166541338,
-0.08657197654247284,
0.2499322146177292,
0.20434735715389252,
-0.05589049309492111,
0.028380822390317917,
0.056524429470300674,
0.050853531807661057,
0.028256384655833244,
0.0607023648917675,
0.11117913573980331,
0.16374163329601288,
-0.09030614793300629,
-0.07877396792173386,
-0.06150595098733902,
-0.006607424467802048,
-0.04272674396634102,
0.09844347834587097,
0.037065811455249786,
-0.07701496034860611,
-0.028584470972418785,
0.026623377576470375,
-0.038564909249544144,
0.05562214553356171,
-0.011290237307548523,
-0.24866224825382233,
-0.08413776010274887,
0.016306530684232712,
0.10743174701929092,
0.05149271339178085,
0.13142816722393036,
-0.04156823828816414,
-0.03133706375956535,
0.10508342832326889,
-0.004734533838927746,
-0.14687126874923706,
0.009655856527388096,
0.0907621756196022,
-0.1499224603176117,
0.04891962558031082,
0.018179913982748985,
0.07246895879507065,
0.10878190398216248,
0.04955482482910156,
-0.04877255856990814,
0.09823793172836304,
0.046553388237953186,
-0.01470567099750042,
-0.011571265757083893,
0.12227974086999893,
-0.035238515585660934,
-0.10252801328897476,
0.06039702519774437,
-0.10959766805171967,
-0.004207227844744921,
0.026655439287424088,
-0.02534869685769081,
-0.06415459513664246,
0.038286834955215454,
-0.0797492042183876,
0.0905335396528244,
0.12182239443063736,
-0.010118525475263596,
-0.031240835785865784,
-0.0005058111273683608,
0.022005587816238403,
0.05654103308916092,
0.08445071429014206,
-0.11442305892705917,
-0.04773896560072899,
-0.026126785203814507,
-0.023188216611742973,
0.0001187583984574303,
-0.14026521146297455,
-0.007339319679886103,
-0.05210825800895691,
0.019415535032749176,
-0.013762356713414192,
0.06933329254388809,
0.08327630907297134,
0.013816439546644688,
0.0012457063421607018,
-0.13103178143501282,
0.01236757356673479,
0.09380075335502625,
-0.1511968970298767,
-0.08351999521255493
] |
null | null |
transformers
|
## ELECTRA_large_discriminator language model fine-tuned on SQuAD2.0
### with the following results:
```
"exact": 87.09677419354838,
"f1": 89.98343832723452,
"total": 11873,
"HasAns_exact": 84.66599190283401,
"HasAns_f1": 90.44759839056285,
"HasAns_total": 5928,
"NoAns_exact": 89.52060555088309,
"NoAns_f1": 89.52060555088309,
"NoAns_total": 5945,
"best_exact": 87.09677419354838,
"best_exact_thresh": 0.0,
"best_f1": 89.98343832723432,
"best_f1_thresh": 0.0
```
### from script:
```
python ${EXAMPLES}/run_squad.py \
--model_type electra \
--model_name_or_path google/electra-large-discriminator \
--do_train \
--do_eval \
--train_file ${SQUAD}/train-v2.0.json \
--predict_file ${SQUAD}/dev-v2.0.json \
--version_2_with_negative \
--do_lower_case \
--num_train_epochs 3 \
--warmup_steps 306 \
--weight_decay 0.01 \
--learning_rate 3e-5 \
--max_grad_norm 0.5 \
--adam_epsilon 1e-6 \
--max_seq_length 512 \
--doc_stride 128 \
--per_gpu_train_batch_size 8 \
--gradient_accumulation_steps 16 \
--per_gpu_eval_batch_size 128 \
--fp16 \
--fp16_opt_level O1 \
--threads 12 \
--logging_steps 50 \
--save_steps 1000 \
--overwrite_output_dir \
--output_dir ${MODEL_PATH}
```
### using the following system & software:
```
Transformers: 2.11.0
PyTorch: 1.5.0
TensorFlow: 2.2.0
Python: 3.8.1
OS/Platform: Linux-5.3.0-59-generic-x86_64-with-glibc2.10
CPU/GPU: Intel i9-9900K / NVIDIA Titan RTX 24GB
```
|
{}
|
question-answering
|
ahotrod/electra_large_discriminator_squad2_512
|
[
"transformers",
"pytorch",
"tf",
"electra",
"question-answering",
"endpoints_compatible",
"has_space",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tf #electra #question-answering #endpoints_compatible #has_space #region-us
|
## ELECTRA_large_discriminator language model fine-tuned on SQuAD2.0
### with the following results:
### from script:
### using the following system & software:
|
[
"## ELECTRA_large_discriminator language model fine-tuned on SQuAD2.0",
"### with the following results:",
"### from script:",
"### using the following system & software:"
] |
[
"TAGS\n#transformers #pytorch #tf #electra #question-answering #endpoints_compatible #has_space #region-us \n",
"## ELECTRA_large_discriminator language model fine-tuned on SQuAD2.0",
"### with the following results:",
"### from script:",
"### using the following system & software:"
] |
[
37,
22,
7,
5,
9
] |
[
"passage: TAGS\n#transformers #pytorch #tf #electra #question-answering #endpoints_compatible #has_space #region-us \n## ELECTRA_large_discriminator language model fine-tuned on SQuAD2.0### with the following results:### from script:### using the following system & software:"
] |
[
-0.0784182921051979,
0.11840599030256271,
-0.006261963862925768,
0.05802460014820099,
0.12088112533092499,
-0.011992322281002998,
-0.006095199845731258,
0.10423185676336288,
0.031123116612434387,
0.05323488265275955,
0.13507023453712463,
0.13312633335590363,
-0.0001915647299028933,
0.12419728934764862,
-0.092670738697052,
-0.23422890901565552,
0.0005736997700296342,
0.06610731780529022,
-0.12991295754909515,
0.1249096468091011,
0.12562453746795654,
-0.09918290376663208,
0.06536225229501724,
0.009617486037313938,
-0.08444349467754364,
0.05603765696287155,
-0.0031385053880512714,
-0.09834209829568863,
0.07098010927438736,
0.07816038280725479,
0.11000489443540573,
0.0687488242983818,
-0.019515758380293846,
-0.11480609327554703,
0.043897271156311035,
-0.04705110192298889,
-0.03568839654326439,
0.0530422180891037,
0.06910214573144913,
-0.069879911839962,
0.16608145833015442,
-0.032664887607097626,
0.029032574966549873,
-0.0007780946907587349,
-0.10696592181921005,
-0.14612630009651184,
-0.01995495893061161,
0.018440326675772667,
0.09488404542207718,
0.08498823642730713,
-0.05142514035105705,
0.1688104271888733,
-0.12310921400785446,
0.0770321786403656,
0.18736989796161652,
-0.23239293694496155,
-0.01208838913589716,
0.004914703778922558,
0.09088865667581558,
-0.027634164318442345,
-0.018794270232319832,
0.015445693396031857,
0.049570225179195404,
0.042429886758327484,
0.037193506956100464,
-0.08717012405395508,
-0.10174191743135452,
0.08380641043186188,
-0.14398790895938873,
-0.04484519734978676,
0.203545480966568,
0.03332994133234024,
0.03701603040099144,
-0.05372505262494087,
-0.12203659117221832,
-0.09970441460609436,
0.008569308556616306,
0.00263282866217196,
-0.04705137014389038,
0.004083574749529362,
0.0005057472153566778,
-0.02753172628581524,
-0.11481940001249313,
-0.025105364620685577,
-0.17849324643611908,
0.3297477662563324,
0.027496011927723885,
0.05284024029970169,
-0.17087028920650482,
0.024678686633706093,
-0.044847846031188965,
-0.12389408051967621,
-0.0010107880225405097,
-0.06628589332103729,
-0.04399567469954491,
0.03512810915708542,
-0.09630316495895386,
0.0270195621997118,
0.11969266086816788,
0.19836142659187317,
0.008943541906774044,
0.02226508967578411,
0.06931427866220474,
0.0501006618142128,
0.04407006874680519,
0.1631697714328766,
-0.13849855959415436,
-0.030904490500688553,
0.02280166931450367,
-0.020786484703421593,
0.045844465494155884,
-0.050176408141851425,
-0.1575784683227539,
-0.11031832545995712,
0.055447544902563095,
0.11644415557384491,
0.03876987099647522,
0.06246086582541466,
-0.0462704636156559,
-0.027576426044106483,
0.008616828359663486,
-0.11758043617010117,
0.01653706096112728,
0.028762375935912132,
-0.016087543219327927,
0.03451460972428322,
0.009044059552252293,
-0.010465370491147041,
-0.058974266052246094,
-0.029334956780076027,
-0.04914163798093796,
0.004790839739143848,
-0.04138416424393654,
-0.1352124661207199,
0.01672857627272606,
-0.026356661692261696,
0.08089122921228409,
-0.20271573960781097,
-0.021276310086250305,
0.03758508339524269,
-0.013781709596514702,
-0.016540972515940666,
-0.05697090923786163,
0.0029160042759031057,
-0.0524478480219841,
0.03788404166698456,
-0.04679722711443901,
-0.012530107982456684,
-0.0319824144244194,
0.07298387587070465,
0.06356534361839294,
0.0948508083820343,
-0.14551463723182678,
0.028064168989658356,
-0.049561452120542526,
0.018053999170660973,
-0.17937980592250824,
0.04428699240088463,
0.028312712907791138,
-0.029229987412691116,
-0.08130856603384018,
-0.14388979971408844,
-0.08462943881750107,
0.03717641159892082,
0.027143022045493126,
0.15600015223026276,
-0.16662555932998657,
-0.037665992975234985,
0.1462995409965515,
-0.0884290561079979,
-0.13870051503181458,
0.14083856344223022,
-0.005179157014936209,
-0.08801990002393723,
0.007150380872189999,
0.1638205200433731,
0.05555606260895729,
-0.07201704382896423,
-0.009275378659367561,
0.08247562497854233,
-0.12769556045532227,
0.034165944904088974,
0.11074839532375336,
-0.060074158012866974,
0.00207950035110116,
-0.020614517852663994,
-0.04211844503879547,
0.09475122392177582,
-0.048356976360082626,
-0.039483100175857544,
-0.022774213925004005,
-0.03509834408760071,
0.13819649815559387,
0.0018903613090515137,
0.019412463530898094,
-0.0593656450510025,
-0.062211211770772934,
-0.020925967022776604,
0.11602933704853058,
-0.04138122498989105,
0.034840673208236694,
-0.2103452831506729,
0.13438785076141357,
-0.05962149426341057,
0.0005444145062938333,
-0.23212558031082153,
0.019843174144625664,
-0.04610659182071686,
0.10657503455877304,
0.02335214801132679,
0.1598152071237564,
0.05392361432313919,
-0.05855792760848999,
-0.03419588506221771,
-0.031055202707648277,
0.0016340392176061869,
-0.002021235413849354,
-0.04190210625529289,
-0.042395759373903275,
-0.030352197587490082,
-0.03536725416779518,
0.05794156715273857,
-0.08546049892902374,
0.011369949206709862,
0.1173333078622818,
0.056575946509838104,
-0.0027238770853728056,
0.06533025205135345,
0.031067153438925743,
0.043699316680431366,
-0.0846589207649231,
-0.029646167531609535,
-0.0033200483303517103,
-0.027006449177861214,
-0.1043752059340477,
0.06456225365400314,
-0.16364094614982605,
0.2367963194847107,
0.18747945129871368,
-0.13116079568862915,
0.007193759549409151,
0.08146011829376221,
-0.010517297312617302,
0.06621600687503815,
0.0023928023874759674,
-0.031052574515342712,
0.06498901546001434,
-0.007803110405802727,
0.10558980703353882,
-0.06433270126581192,
0.004361667670309544,
-0.00392566341906786,
-0.04488937184214592,
0.030167479068040848,
0.08075679838657379,
0.12291443347930908,
-0.22072134912014008,
0.0881478488445282,
0.18837982416152954,
-0.0629955306649208,
0.09813232719898224,
-0.020174091681838036,
-0.038687191903591156,
-0.037165697664022446,
0.004550753626972437,
-0.010515179485082626,
0.006826130673289299,
-0.0997440442442894,
0.02416912652552128,
0.07418873906135559,
-0.02774239145219326,
0.0471377894282341,
-0.0734504759311676,
-0.026998382061719894,
0.02939404733479023,
-0.02737448923289776,
-0.046495892107486725,
0.1215592697262764,
0.06672092527151108,
0.0736258253455162,
-0.0073735215701162815,
-0.10476300865411758,
0.034546975046396255,
-0.0037488904781639576,
-0.09677037596702576,
0.17822502553462982,
-0.09317673742771149,
-0.2000780701637268,
-0.0968049094080925,
0.0396394282579422,
0.019917674362659454,
0.04538685828447342,
0.047977570444345474,
-0.026656217873096466,
-0.028379760682582855,
-0.031131895259022713,
0.09787283092737198,
-0.10478589683771133,
0.08380674570798874,
0.019062627106904984,
0.029753969982266426,
-0.012685159221291542,
-0.09313999116420746,
-0.02122551202774048,
-0.009624156169593334,
-0.005318775773048401,
0.08927035331726074,
-0.0903247520327568,
0.11251290887594223,
0.19408445060253143,
-0.024556037038564682,
0.04818803071975708,
-0.02052384801208973,
0.23515348136425018,
-0.11441344022750854,
-0.024143777787685394,
0.23010261356830597,
-0.03564821183681488,
0.048259276896715164,
0.12567567825317383,
-0.026601437479257584,
-0.050038013607263565,
0.03449586406350136,
-0.03891879320144653,
-0.08272257447242737,
-0.16305296123027802,
-0.10603196918964386,
-0.1318066269159317,
0.018778784200549126,
0.055700719356536865,
0.025080503895878792,
-0.06055695191025734,
0.07379854470491409,
-0.006806902587413788,
-0.09637120366096497,
-0.014542602002620697,
0.05714239180088043,
0.19410444796085358,
0.047205064445734024,
0.11590351164340973,
-0.06566405296325684,
-0.110643669962883,
0.093772292137146,
-0.030416464433073997,
0.13931770622730255,
0.018902108073234558,
0.060338690876960754,
0.06463182717561722,
0.12594366073608398,
0.1121542677283287,
0.11502426862716675,
0.05400121212005615,
-0.016597267240285873,
-0.019288290292024612,
-0.020482191815972328,
-0.029660837724804878,
0.01580953598022461,
0.16366465389728546,
-0.07635633647441864,
-0.08270943909883499,
0.017309114336967468,
0.07656130939722061,
0.05032166838645935,
0.09684064984321594,
-0.22176526486873627,
0.005145863629877567,
0.05173501372337341,
-0.04387305676937103,
-0.06289473921060562,
0.04461943358182907,
0.0853351429104805,
-0.08152618259191513,
0.09955865144729614,
-0.0015170101542025805,
0.08908601850271225,
0.03369352966547012,
0.0478464737534523,
-0.12529468536376953,
-0.07723435759544373,
0.03065752051770687,
0.10418325662612915,
-0.22491925954818726,
0.1956455111503601,
0.017829151824116707,
-0.041620202362537384,
-0.04208136722445488,
-0.01798698678612709,
0.01005696039646864,
0.13002754747867584,
0.08680309355258942,
-0.012844172306358814,
-0.05407610535621643,
-0.12600834667682648,
0.031327441334724426,
0.04179070144891739,
0.10300543159246445,
-0.06825754046440125,
0.0823274701833725,
0.003368520876392722,
0.010706573724746704,
0.018307728692889214,
0.10841862857341766,
0.028276216238737106,
-0.15435399115085602,
0.014348527416586876,
0.05684428662061691,
0.005464922171086073,
-0.04189847409725189,
-0.04764213413000107,
-0.11831878125667572,
0.06823912262916565,
-0.1634775549173355,
-0.09594302624464035,
-0.09340015053749084,
0.040760621428489685,
0.15481823682785034,
-0.12529267370700836,
0.09095555543899536,
-0.0028714374639093876,
0.07358144968748093,
-0.03384131193161011,
-0.10708563029766083,
0.11042921245098114,
-0.0929117202758789,
-0.06382214277982712,
0.04701553285121918,
0.12990082800388336,
-0.06616506725549698,
0.022323012351989746,
0.037484120577573776,
0.015160330571234226,
-0.09832677990198135,
-0.1591087430715561,
-0.02901441603899002,
-0.05001838877797127,
0.10166510939598083,
0.0818028375506401,
0.06022980064153671,
-0.04980158433318138,
0.013429705053567886,
0.014326430857181549,
0.22454456984996796,
0.2068432718515396,
-0.1040019541978836,
-0.027998169884085655,
0.03619619086384773,
0.03264271840453148,
-0.23610779643058777,
-0.11640200018882751,
-0.0394621267914772,
0.03230039402842522,
0.14419616758823395,
-0.10295220464468002,
-0.01330177579075098,
-0.042864736169576645,
-0.009005369618535042,
-0.040918104350566864,
-0.23610679805278778,
-0.08430179953575134,
0.15400293469429016,
-0.0583992600440979,
0.1988874077796936,
-0.1131863072514534,
-0.03707291930913925,
-0.03549329563975334,
-0.2003154456615448,
0.08800217509269714,
-0.09148499369621277,
0.05999983847141266,
-0.0028665000572800636,
0.10406975448131561,
0.04023131728172302,
-0.03276479244232178,
0.16782689094543457,
-0.005636487156152725,
0.03703298792243004,
-0.04224089905619621,
-0.1712537407875061,
0.03605969250202179,
-0.008943882770836353,
0.08186613768339157,
-0.05778487026691437,
0.05048024281859398,
-0.1790877878665924,
-0.057867199182510376,
-0.09267628937959671,
0.06645739823579788,
0.028134040534496307,
-0.03972761705517769,
-0.0894031748175621,
-0.08072935044765472,
-0.0025387939531356096,
-0.00009155590669251978,
0.13789239525794983,
-0.04193965718150139,
0.017405696213245392,
0.10757359117269516,
0.1734534353017807,
-0.028914622962474823,
-0.10322335362434387,
0.02832198143005371,
-0.02802741713821888,
0.09584440290927887,
-0.08472143858671188,
0.11762651056051254,
0.09502141922712326,
0.0014800020726397634,
0.0651911124587059,
0.09438225626945496,
-0.013557137921452522,
0.028192495927214622,
0.09545999020338058,
-0.1097378209233284,
-0.1425754576921463,
-0.020099295303225517,
0.06685344129800797,
-0.05744802951812744,
0.04415524750947952,
0.08412934094667435,
0.042892973870038986,
-0.054686181247234344,
-0.005557157564908266,
-0.013474291190505028,
-0.029167354106903076,
0.07266207784414291,
0.14730845391750336,
0.07684030383825302,
-0.12536655366420746,
0.012542778626084328,
0.016721246764063835,
-0.11284889280796051,
0.02822822332382202,
0.079535573720932,
-0.10722075402736664,
-0.11888515949249268,
-0.01706746034324169,
0.11612916737794876,
-0.16577987372875214,
-0.07868324965238571,
-0.05679686367511749,
-0.0796395093202591,
0.04623321071267128,
0.11434200406074524,
0.11715057492256165,
0.06031649932265282,
-0.0029169770423322916,
-0.023868383839726448,
-0.0009543476044200361,
0.025024261325597763,
0.03669809550046921,
-0.03451668098568916,
-0.11844546347856522,
0.09593386203050613,
-0.055034101009368896,
0.15507195889949799,
-0.06633148342370987,
-0.00903902854770422,
-0.06907644122838974,
0.04861060529947281,
-0.09805327653884888,
0.029568754136562347,
-0.09724599868059158,
-0.022265374660491943,
-0.0030797868967056274,
-0.06944238394498825,
-0.11079248785972595,
0.004397840704768896,
-0.1215086430311203,
-0.0007494993624277413,
-0.05918998643755913,
-0.010584544390439987,
-0.06367457658052444,
-0.03336390107870102,
0.0719274953007698,
-0.051626693457365036,
0.13036024570465088,
0.04996570572257042,
-0.05586839094758034,
0.037790447473526,
-0.010887166485190392,
-0.16758114099502563,
0.08735812455415726,
0.04718378186225891,
0.04292534291744232,
-0.0693286806344986,
0.037371180951595306,
0.03190261870622635,
0.05173864588141441,
-0.006993639748543501,
0.005025399848818779,
-0.07593583315610886,
0.028648169711232185,
-0.05044230818748474,
-0.06370832026004791,
-0.05478367954492569,
0.006556177046149969,
0.05169133096933365,
0.07216933369636536,
0.13207194209098816,
0.0006158307078294456,
0.019449034705758095,
-0.09465253353118896,
0.040930330753326416,
-0.0666617676615715,
-0.024066785350441933,
0.011046778410673141,
-0.016198450699448586,
0.05637533962726593,
-0.02510407753288746,
0.17302000522613525,
-0.028539486229419708,
0.005160897970199585,
0.04707193002104759,
0.09227636456489563,
0.054606519639492035,
0.03190596401691437,
0.24729350209236145,
0.022146491333842278,
-0.03974438086152077,
-0.038080185651779175,
0.01668459363281727,
0.05724368989467621,
0.1439007818698883,
0.12424028664827347,
0.10244966298341751,
0.032608821988105774,
0.09589330106973648,
-0.08583294600248337,
-0.06018282473087311,
-0.13118457794189453,
-0.11408663541078568,
-0.013464583083987236,
-0.009549770504236221,
0.006762608885765076,
0.05200355499982834,
0.1877613663673401,
-0.025912517681717873,
0.03369651362299919,
-0.027423523366451263,
-0.02394230104982853,
-0.1710849404335022,
-0.10226215422153473,
-0.12712259590625763,
-0.11418969929218292,
-0.00493265176191926,
-0.10585372895002365,
0.020761657506227493,
0.1090826690196991,
0.03099898062646389,
-0.029624493792653084,
0.06963416934013367,
-0.0030018112156540155,
-0.12015734612941742,
0.00005505185617948882,
-0.025179315358400345,
0.048510026186704636,
0.13898585736751556,
0.008011652156710625,
-0.01368575170636177,
-0.040619928389787674,
0.05227275565266609,
0.018505720421671867,
-0.09424085170030594,
0.02480418235063553,
-0.14262177050113678,
-0.06640289723873138,
-0.05679711699485779,
0.04705046862363815,
0.04552073031663895,
0.1486925482749939,
0.00038853075238876045,
-0.0010448911925777793,
0.017585864290595055,
0.23145262897014618,
-0.08177708834409714,
-0.1527029573917389,
-0.10633011907339096,
0.08578471094369888,
0.08721356093883514,
0.12546361982822418,
-0.043438468128442764,
-0.08046473562717438,
-0.04271024465560913,
0.2279372215270996,
0.1404089331626892,
-0.07069678604602814,
0.051329925656318665,
0.005114563275128603,
0.022195378318428993,
0.04991062358021736,
0.012557019479572773,
0.08186239004135132,
0.18680042028427124,
-0.0760207250714302,
-0.046993982046842575,
-0.04852055385708809,
0.03994401544332504,
-0.08649970591068268,
0.05813474580645561,
-0.019551722332835197,
-0.029281845316290855,
-0.010764850303530693,
0.08963917940855026,
-0.08784197270870209,
-0.024805793538689613,
-0.007865003310143948,
-0.16034314036369324,
-0.0786941647529602,
0.02743527851998806,
0.12358517944812775,
0.03435857966542244,
0.10351435095071793,
-0.07957713305950165,
-0.07773204147815704,
-0.044916946440935135,
0.04835086688399315,
-0.17917990684509277,
0.029337046667933464,
0.1290050745010376,
-0.01205289177596569,
-0.05018633231520653,
-0.011104144155979156,
0.13940434157848358,
0.1414412558078766,
0.03720450773835182,
-0.07158173620700836,
0.11174292862415314,
0.04509170725941658,
0.05615699291229248,
0.06729190051555634,
0.06824616342782974,
-0.010835218243300915,
-0.07639627903699875,
0.048860102891922,
-0.09003977477550507,
-0.0107348021119833,
-0.006185430102050304,
-0.00547926826402545,
-0.11470950394868851,
-0.02178056538105011,
-0.08514660596847534,
0.05238239839673042,
-0.0013329617213457823,
-0.07882911711931229,
-0.0019113706657662988,
-0.011034877970814705,
0.03387365862727165,
0.03325425088405609,
0.0427929162979126,
-0.09824911504983902,
-0.09143660962581635,
-0.018190063536167145,
0.01241137646138668,
0.009273039177060127,
-0.09495626389980316,
0.03463121876120567,
-0.05797513201832771,
0.007553362753242254,
-0.062358733266592026,
0.08856412768363953,
0.06509972363710403,
0.005030693020671606,
-0.008778315968811512,
-0.07764620333909988,
-0.03657788410782814,
0.07676412165164948,
-0.18710559606552124,
-0.08209465444087982
] |
null | null |
transformers
|
IndicBART is a multilingual, sequence-to-sequence pre-trained model focusing on Indic languages and English. It currently supports 11 Indian languages and is based on the mBART architecture. You can use IndicBART model to build natural language generation applications for Indian languages by finetuning the model with supervised training data for tasks like machine translation, summarization, question generation, etc. Some salient features of the IndicBART are:
<ul>
<li >Supported languages: Assamese, Bengali, Gujarati, Hindi, Marathi, Odiya, Punjabi, Kannada, Malayalam, Tamil, Telugu and English. Not all of these languages are supported by mBART50 and mT5. </li>
<li >The model is much smaller than the mBART and mT5(-base) models, so less computationally expensive for finetuning and decoding. </li>
<li> Trained on large Indic language corpora (452 million sentences and 9 billion tokens) which also includes Indian English content. </li>
<li> All languages, except English, have been represented in Devanagari script to encourage transfer learning among the related languages. </li>
</ul>
You can read more about IndicBART in this <a href="https://arxiv.org/abs/2109.02903">paper</a>.
For detailed documentation, look here: https://github.com/AI4Bharat/indic-bart/ and https://indicnlp.ai4bharat.org/indic-bart/
# Pre-training corpus
We used the <a href="https://indicnlp.ai4bharat.org/corpora/">IndicCorp</a> data spanning 12 languages with 452 million sentences (9 billion tokens). The model was trained using the text-infilling objective used in mBART.
# Usage:
```
from transformers import MBartForConditionalGeneration, AutoModelForSeq2SeqLM
from transformers import AlbertTokenizer, AutoTokenizer
tokenizer = AutoTokenizer.from_pretrained("ai4bharat/IndicBART", do_lower_case=False, use_fast=False, keep_accents=True)
# Or use tokenizer = AlbertTokenizer.from_pretrained("ai4bharat/IndicBART", do_lower_case=False, use_fast=False, keep_accents=True)
model = AutoModelForSeq2SeqLM.from_pretrained("ai4bharat/IndicBART")
# Or use model = MBartForConditionalGeneration.from_pretrained("ai4bharat/IndicBART")
# Some initial mapping
bos_id = tokenizer._convert_token_to_id_with_added_voc("<s>")
eos_id = tokenizer._convert_token_to_id_with_added_voc("</s>")
pad_id = tokenizer._convert_token_to_id_with_added_voc("<pad>")
# To get lang_id use any of ['<2as>', '<2bn>', '<2en>', '<2gu>', '<2hi>', '<2kn>', '<2ml>', '<2mr>', '<2or>', '<2pa>', '<2ta>', '<2te>']
# First tokenize the input and outputs. The format below is how IndicBART was trained so the input should be "Sentence </s> <2xx>" where xx is the language code. Similarly, the output should be "<2yy> Sentence </s>".
inp = tokenizer("I am a boy </s> <2en>", add_special_tokens=False, return_tensors="pt", padding=True).input_ids # tensor([[ 466, 1981, 80, 25573, 64001, 64004]])
out = tokenizer("<2hi> मैं एक लड़का हूँ </s>", add_special_tokens=False, return_tensors="pt", padding=True).input_ids # tensor([[64006, 942, 43, 32720, 8384, 64001]])
# Note that if you use any language other than Hindi or Marathi, you should convert its script to Devanagari using the Indic NLP Library.
model_outputs=model(input_ids=inp, decoder_input_ids=out[:,0:-1], labels=out[:,1:])
# For loss
model_outputs.loss ## This is not label smoothed.
# For logits
model_outputs.logits
# For generation. Pardon the messiness. Note the decoder_start_token_id.
model.eval() # Set dropouts to zero
model_output=model.generate(inp, use_cache=True, num_beams=4, max_length=20, min_length=1, early_stopping=True, pad_token_id=pad_id, bos_token_id=bos_id, eos_token_id=eos_id, decoder_start_token_id=tokenizer._convert_token_to_id_with_added_voc("<2en>"))
# Decode to get output strings
decoded_output=tokenizer.decode(model_output[0], skip_special_tokens=True, clean_up_tokenization_spaces=False)
print(decoded_output) # I am a boy
# Note that if your output language is not Hindi or Marathi, you should convert its script from Devanagari to the desired language using the Indic NLP Library.
# What if we mask?
inp = tokenizer("I am [MASK] </s> <2en>", add_special_tokens=False, return_tensors="pt", padding=True).input_ids
model_output=model.generate(inp, use_cache=True, num_beams=4, max_length=20, min_length=1, early_stopping=True, pad_token_id=pad_id, bos_token_id=bos_id, eos_token_id=eos_id, decoder_start_token_id=tokenizer._convert_token_to_id_with_added_voc("<2en>"))
decoded_output=tokenizer.decode(model_output[0], skip_special_tokens=True, clean_up_tokenization_spaces=False)
print(decoded_output) # I am happy
inp = tokenizer("मैं [MASK] हूँ </s> <2hi>", add_special_tokens=False, return_tensors="pt", padding=True).input_ids
model_output=model.generate(inp, use_cache=True, num_beams=4, max_length=20, min_length=1, early_stopping=True, pad_token_id=pad_id, bos_token_id=bos_id, eos_token_id=eos_id, decoder_start_token_id=tokenizer._convert_token_to_id_with_added_voc("<2en>"))
decoded_output=tokenizer.decode(model_output[0], skip_special_tokens=True, clean_up_tokenization_spaces=False)
print(decoded_output) # मैं जानता हूँ
inp = tokenizer("मला [MASK] पाहिजे </s> <2mr>", add_special_tokens=False, return_tensors="pt", padding=True).input_ids
model_output=model.generate(inp, use_cache=True, num_beams=4, max_length=20, min_length=1, early_stopping=True, pad_token_id=pad_id, bos_token_id=bos_id, eos_token_id=eos_id, decoder_start_token_id=tokenizer._convert_token_to_id_with_added_voc("<2en>"))
decoded_output=tokenizer.decode(model_output[0], skip_special_tokens=True, clean_up_tokenization_spaces=False)
print(decoded_output) # मला ओळखलं पाहिजे
```
# Notes:
1. This is compatible with the latest version of transformers but was developed with version 4.3.2 so consider using 4.3.2 if possible.
2. While I have only shown how to get logits and loss and how to generate outputs, you can do pretty much everything the MBartForConditionalGeneration class can do as in https://huggingface.co/docs/transformers/model_doc/mbart#transformers.MBartForConditionalGeneration
3. Note that the tokenizer I have used is based on sentencepiece and not BPE. Therefore, I used the AlbertTokenizer class and not the MBartTokenizer class.
4. If you wish to use any language written in a non-Devanagari script (except English), then you should first convert it to Devanagari using the <a href="https://github.com/anoopkunchukuttan/indic_nlp_library">Indic NLP Library</a>. After you get the output, you should convert it back into the original script.
# Fine-tuning on a downstream task
1. If you wish to fine-tune this model, then you can do so using the <a href="https://github.com/prajdabre/yanmtt">YANMTT</a> toolkit, following the instructions <a href="https://github.com/AI4Bharat/indic-bart ">here</a>.
2. (Untested) Alternatively, you may use the official huggingface scripts for <a href="https://github.com/huggingface/transformers/tree/master/examples/pytorch/translation">translation</a> and <a href="https://github.com/huggingface/transformers/tree/master/examples/pytorch/summarization">summarization</a>.
# Contributors
<ul>
<li> Raj Dabre </li>
<li> Himani Shrotriya </li>
<li> Anoop Kunchukuttan </li>
<li> Ratish Puduppully </li>
<li> Mitesh M. Khapra </li>
<li> Pratyush Kumar </li>
</ul>
# Paper
If you use IndicBART, please cite the following paper:
```
@misc{dabre2021indicbart,
title={IndicBART: A Pre-trained Model for Natural Language Generation of Indic Languages},
author={Raj Dabre and Himani Shrotriya and Anoop Kunchukuttan and Ratish Puduppully and Mitesh M. Khapra and Pratyush Kumar},
year={2021},
eprint={2109.02903},
archivePrefix={arXiv},
primaryClass={cs.CL}
}
```
# License
The model is available under the MIT License.
|
{"language": ["as", "bn", "gu", "hi", "kn", "ml", "mr", "or", "pa", "ta", "te"], "tags": ["multilingual", "nlp", "indicnlp"]}
|
text2text-generation
|
ai4bharat/IndicBART
|
[
"transformers",
"pytorch",
"mbart",
"text2text-generation",
"multilingual",
"nlp",
"indicnlp",
"as",
"bn",
"gu",
"hi",
"kn",
"ml",
"mr",
"or",
"pa",
"ta",
"te",
"arxiv:2109.02903",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[
"2109.02903"
] |
[
"as",
"bn",
"gu",
"hi",
"kn",
"ml",
"mr",
"or",
"pa",
"ta",
"te"
] |
TAGS
#transformers #pytorch #mbart #text2text-generation #multilingual #nlp #indicnlp #as #bn #gu #hi #kn #ml #mr #or #pa #ta #te #arxiv-2109.02903 #autotrain_compatible #endpoints_compatible #region-us
|
IndicBART is a multilingual, sequence-to-sequence pre-trained model focusing on Indic languages and English. It currently supports 11 Indian languages and is based on the mBART architecture. You can use IndicBART model to build natural language generation applications for Indian languages by finetuning the model with supervised training data for tasks like machine translation, summarization, question generation, etc. Some salient features of the IndicBART are:
<ul>
<li >Supported languages: Assamese, Bengali, Gujarati, Hindi, Marathi, Odiya, Punjabi, Kannada, Malayalam, Tamil, Telugu and English. Not all of these languages are supported by mBART50 and mT5. </li>
<li >The model is much smaller than the mBART and mT5(-base) models, so less computationally expensive for finetuning and decoding. </li>
<li> Trained on large Indic language corpora (452 million sentences and 9 billion tokens) which also includes Indian English content. </li>
<li> All languages, except English, have been represented in Devanagari script to encourage transfer learning among the related languages. </li>
</ul>
You can read more about IndicBART in this <a href="URL
For detailed documentation, look here: URL and URL
# Pre-training corpus
We used the <a href="URL data spanning 12 languages with 452 million sentences (9 billion tokens). The model was trained using the text-infilling objective used in mBART.
# Usage:
# Notes:
1. This is compatible with the latest version of transformers but was developed with version 4.3.2 so consider using 4.3.2 if possible.
2. While I have only shown how to get logits and loss and how to generate outputs, you can do pretty much everything the MBartForConditionalGeneration class can do as in URL
3. Note that the tokenizer I have used is based on sentencepiece and not BPE. Therefore, I used the AlbertTokenizer class and not the MBartTokenizer class.
4. If you wish to use any language written in a non-Devanagari script (except English), then you should first convert it to Devanagari using the <a href="URL NLP Library</a>. After you get the output, you should convert it back into the original script.
# Fine-tuning on a downstream task
1. If you wish to fine-tune this model, then you can do so using the <a href="URL toolkit, following the instructions <a href="URL ">here</a>.
2. (Untested) Alternatively, you may use the official huggingface scripts for <a href="URL and <a href="URL
# Contributors
<ul>
<li> Raj Dabre </li>
<li> Himani Shrotriya </li>
<li> Anoop Kunchukuttan </li>
<li> Ratish Puduppully </li>
<li> Mitesh M. Khapra </li>
<li> Pratyush Kumar </li>
</ul>
# Paper
If you use IndicBART, please cite the following paper:
# License
The model is available under the MIT License.
|
[
"# Pre-training corpus\n\nWe used the <a href=\"URL data spanning 12 languages with 452 million sentences (9 billion tokens). The model was trained using the text-infilling objective used in mBART.",
"# Usage:",
"# Notes:\n1. This is compatible with the latest version of transformers but was developed with version 4.3.2 so consider using 4.3.2 if possible.\n2. While I have only shown how to get logits and loss and how to generate outputs, you can do pretty much everything the MBartForConditionalGeneration class can do as in URL\n3. Note that the tokenizer I have used is based on sentencepiece and not BPE. Therefore, I used the AlbertTokenizer class and not the MBartTokenizer class.\n4. If you wish to use any language written in a non-Devanagari script (except English), then you should first convert it to Devanagari using the <a href=\"URL NLP Library</a>. After you get the output, you should convert it back into the original script.",
"# Fine-tuning on a downstream task\n\n1. If you wish to fine-tune this model, then you can do so using the <a href=\"URL toolkit, following the instructions <a href=\"URL \">here</a>.\n2. (Untested) Alternatively, you may use the official huggingface scripts for <a href=\"URL and <a href=\"URL",
"# Contributors\n<ul>\n<li> Raj Dabre </li>\n<li> Himani Shrotriya </li>\n<li> Anoop Kunchukuttan </li>\n<li> Ratish Puduppully </li>\n<li> Mitesh M. Khapra </li>\n<li> Pratyush Kumar </li>\n</ul>",
"# Paper\nIf you use IndicBART, please cite the following paper:",
"# License\nThe model is available under the MIT License."
] |
[
"TAGS\n#transformers #pytorch #mbart #text2text-generation #multilingual #nlp #indicnlp #as #bn #gu #hi #kn #ml #mr #or #pa #ta #te #arxiv-2109.02903 #autotrain_compatible #endpoints_compatible #region-us \n",
"# Pre-training corpus\n\nWe used the <a href=\"URL data spanning 12 languages with 452 million sentences (9 billion tokens). The model was trained using the text-infilling objective used in mBART.",
"# Usage:",
"# Notes:\n1. This is compatible with the latest version of transformers but was developed with version 4.3.2 so consider using 4.3.2 if possible.\n2. While I have only shown how to get logits and loss and how to generate outputs, you can do pretty much everything the MBartForConditionalGeneration class can do as in URL\n3. Note that the tokenizer I have used is based on sentencepiece and not BPE. Therefore, I used the AlbertTokenizer class and not the MBartTokenizer class.\n4. If you wish to use any language written in a non-Devanagari script (except English), then you should first convert it to Devanagari using the <a href=\"URL NLP Library</a>. After you get the output, you should convert it back into the original script.",
"# Fine-tuning on a downstream task\n\n1. If you wish to fine-tune this model, then you can do so using the <a href=\"URL toolkit, following the instructions <a href=\"URL \">here</a>.\n2. (Untested) Alternatively, you may use the official huggingface scripts for <a href=\"URL and <a href=\"URL",
"# Contributors\n<ul>\n<li> Raj Dabre </li>\n<li> Himani Shrotriya </li>\n<li> Anoop Kunchukuttan </li>\n<li> Ratish Puduppully </li>\n<li> Mitesh M. Khapra </li>\n<li> Pratyush Kumar </li>\n</ul>",
"# Paper\nIf you use IndicBART, please cite the following paper:",
"# License\nThe model is available under the MIT License."
] |
[
81,
48,
4,
174,
86,
83,
16,
11
] |
[
"passage: TAGS\n#transformers #pytorch #mbart #text2text-generation #multilingual #nlp #indicnlp #as #bn #gu #hi #kn #ml #mr #or #pa #ta #te #arxiv-2109.02903 #autotrain_compatible #endpoints_compatible #region-us \n# Pre-training corpus\n\nWe used the <a href=\"URL data spanning 12 languages with 452 million sentences (9 billion tokens). The model was trained using the text-infilling objective used in mBART.# Usage:# Notes:\n1. This is compatible with the latest version of transformers but was developed with version 4.3.2 so consider using 4.3.2 if possible.\n2. While I have only shown how to get logits and loss and how to generate outputs, you can do pretty much everything the MBartForConditionalGeneration class can do as in URL\n3. Note that the tokenizer I have used is based on sentencepiece and not BPE. Therefore, I used the AlbertTokenizer class and not the MBartTokenizer class.\n4. If you wish to use any language written in a non-Devanagari script (except English), then you should first convert it to Devanagari using the <a href=\"URL NLP Library</a>. After you get the output, you should convert it back into the original script.# Fine-tuning on a downstream task\n\n1. If you wish to fine-tune this model, then you can do so using the <a href=\"URL toolkit, following the instructions <a href=\"URL \">here</a>.\n2. (Untested) Alternatively, you may use the official huggingface scripts for <a href=\"URL and <a href=\"URL# Contributors\n<ul>\n<li> Raj Dabre </li>\n<li> Himani Shrotriya </li>\n<li> Anoop Kunchukuttan </li>\n<li> Ratish Puduppully </li>\n<li> Mitesh M. Khapra </li>\n<li> Pratyush Kumar </li>\n</ul># Paper\nIf you use IndicBART, please cite the following paper:# License\nThe model is available under the MIT License."
] |
[
-0.041750553995370865,
0.14993681013584137,
-0.007560583297163248,
0.023248303681612015,
0.08347660303115845,
0.012359959073364735,
0.07984043657779694,
0.11423341929912567,
-0.0025356761179864407,
0.08031901717185974,
0.04944482073187828,
0.06865349411964417,
0.08491580933332443,
0.0644075945019722,
0.06683051586151123,
-0.2248302549123764,
0.0053130146116018295,
-0.06321264058351517,
0.061597615480422974,
0.09104020893573761,
0.09629423171281815,
-0.04516509920358658,
0.04459988325834274,
-0.004925548564642668,
-0.016911154612898827,
0.04004340246319771,
-0.047321297228336334,
-0.009094747714698315,
0.027837948873639107,
0.053738318383693695,
0.04588941112160683,
0.011878110468387604,
0.05538150295615196,
-0.21463055908679962,
0.021502090618014336,
0.04957164451479912,
-0.003999921027570963,
0.022814905270934105,
0.0897575244307518,
-0.06644340604543686,
0.13729238510131836,
-0.0637737363576889,
-0.006175100803375244,
0.03613413870334625,
-0.0940404161810875,
-0.10014093667268753,
-0.09549697488546371,
0.1545533537864685,
0.10544686764478683,
0.041559237986803055,
-0.04831825569272041,
0.011014050804078579,
0.051765602082014084,
0.07046373933553696,
0.1480521410703659,
-0.20545314252376556,
-0.03412798047065735,
0.013940981589257717,
0.07334891706705093,
0.0677228644490242,
-0.055556587874889374,
-0.008635253645479679,
0.015371224842965603,
0.007836527191102505,
0.011534411460161209,
-0.052584338933229446,
0.10592067241668701,
0.01143932156264782,
-0.1088465228676796,
-0.017981266602873802,
0.087763212621212,
-0.022273795679211617,
-0.06449993699789047,
-0.09722647815942764,
-0.03593840450048447,
-0.003432100173085928,
0.03520685061812401,
-0.008671054616570473,
0.01559403631836176,
0.025209348648786545,
0.04908543452620506,
-0.14219029247760773,
-0.0964646264910698,
-0.012477741576731205,
0.02311943843960762,
0.12203056365251541,
0.04216665029525757,
-0.018270183354616165,
0.04234432056546211,
0.0927232950925827,
-0.03113279491662979,
-0.10991650819778442,
-0.06637710332870483,
-0.05977809429168701,
-0.014677770435810089,
0.00454437592998147,
-0.029278120025992393,
-0.08323244750499725,
0.01478957012295723,
0.20651540160179138,
-0.000049838712584460154,
0.05739371478557587,
-0.004638938698917627,
0.007975228130817413,
0.04779857024550438,
0.12241297960281372,
-0.14092987775802612,
-0.03628094866871834,
0.033336788415908813,
0.019129548221826553,
0.04641266539692879,
0.007109158206731081,
-0.036202508956193924,
-0.015172010287642479,
0.0021868080366402864,
0.090581513941288,
0.014077220112085342,
0.01614142209291458,
-0.02774677984416485,
-0.02913757599890232,
0.09418682008981705,
-0.17957623302936554,
0.030114177614450455,
0.03289921581745148,
-0.03341683745384216,
0.08247219026088715,
0.08984971046447754,
-0.012658471241593361,
-0.10377270728349686,
0.01970958523452282,
-0.017708083614706993,
0.017336804419755936,
-0.11342453956604004,
-0.11258547008037567,
0.003977946005761623,
-0.04079294949769974,
-0.053533945232629776,
-0.09382074326276779,
-0.13693177700042725,
-0.03878719359636307,
0.07737459242343903,
-0.06374897062778473,
0.06933232396841049,
-0.04302978515625,
0.003966280724853277,
-0.02572326362133026,
-0.016486117616295815,
-0.028778115287423134,
-0.008475139737129211,
0.03354177251458168,
-0.015471112914383411,
0.0773722305893898,
0.05062693729996681,
0.03465786576271057,
-0.048045843839645386,
0.05149225890636444,
-0.1518072634935379,
0.10954354703426361,
-0.04558412730693817,
-0.020538972690701485,
-0.1157364547252655,
-0.032536499202251434,
0.009478362277150154,
0.006830933503806591,
0.06942938268184662,
0.07491891086101532,
-0.12328112870454788,
-0.016185207292437553,
0.15676507353782654,
-0.0849207267165184,
-0.0363539420068264,
0.07039401680231094,
-0.010497725568711758,
0.08290562778711319,
0.06750883907079697,
0.07317327708005905,
0.10017792135477066,
-0.06131208688020706,
-0.06196309253573418,
-0.012160019017755985,
-0.00447862409055233,
0.10375693440437317,
0.0633445754647255,
-0.013322376646101475,
0.11615748703479767,
0.009731877595186234,
-0.10293734818696976,
0.007586187683045864,
0.01712440513074398,
-0.04708603397011757,
0.022908566519618034,
-0.017729129642248154,
-0.040308039635419846,
-0.020799711346626282,
-0.03355461731553078,
0.02664419263601303,
-0.12460962682962418,
0.10174711048603058,
0.08111424744129181,
-0.1114625483751297,
0.05139720439910889,
-0.07265890389680862,
0.07574571669101715,
-0.05448680371046066,
0.019667506217956543,
-0.13930122554302216,
0.07284748554229736,
0.01644132472574711,
-0.1210046336054802,
0.08070743083953857,
-0.002339001977816224,
0.04274291545152664,
0.09355293959379196,
-0.034201446920633316,
-0.027770638465881348,
0.03209758177399635,
-0.011310646310448647,
0.024540409445762634,
-0.1436261683702469,
-0.05629824101924896,
-0.04259698465466499,
0.13286544382572174,
-0.09062857925891876,
0.02325606904923916,
0.04337889328598976,
0.1364125907421112,
-0.03138906881213188,
-0.02771903946995735,
0.04159482195973396,
-0.006460695993155241,
-0.031330421566963196,
-0.05942615121603012,
0.021612005308270454,
0.016622478142380714,
-0.05472537502646446,
0.08132599294185638,
-0.12567049264907837,
-0.24844691157341003,
0.06453531235456467,
0.00016973615856841207,
-0.052896954119205475,
0.03660230711102486,
-0.003852046560496092,
-0.0358206108212471,
-0.01469056773930788,
-0.052406907081604004,
0.1621367186307907,
0.07819210737943649,
0.11424411833286285,
-0.09400013089179993,
-0.06992532312870026,
-0.05758656561374664,
-0.082729771733284,
-0.025266801938414574,
0.06883341819047928,
0.002337103011086583,
-0.14486631751060486,
0.039411067962646484,
0.019120709970593452,
-0.053735360503196716,
0.16861575841903687,
0.03998441621661186,
-0.09426455199718475,
-0.057743389159440994,
0.08404248207807541,
0.025365881621837616,
-0.012401889078319073,
-0.012621250003576279,
-0.0026305890642106533,
0.029348008334636688,
-0.03669814392924309,
0.02711370214819908,
-0.08156216889619827,
0.058678384870290756,
0.010406315326690674,
-0.038758233189582825,
0.04475754126906395,
0.012843625620007515,
-0.029357630759477615,
0.03011993132531643,
0.01733248680830002,
0.13458646833896637,
-0.03785881772637367,
-0.039703819900751114,
-0.11314655095338821,
0.13680481910705566,
-0.11770898103713989,
-0.20018449425697327,
-0.18251001834869385,
-0.04809004068374634,
-0.0660238042473793,
-0.010835827328264713,
0.05101126804947853,
-0.04883282631635666,
-0.06168457865715027,
-0.06764386594295502,
0.03932201862335205,
0.01395443920046091,
-0.10975669324398041,
-0.06814207136631012,
0.0064781769178807735,
0.011342732235789299,
-0.1344766467809677,
-0.004023019690066576,
0.07593598961830139,
-0.06246308237314224,
0.026979293674230576,
0.020225562155246735,
0.05304993316531181,
0.05973541736602783,
-0.022180402651429176,
-0.00553804961964488,
0.03223308548331261,
0.10136087238788605,
-0.044459640979766846,
0.10162939131259918,
0.09473087638616562,
-0.0504620186984539,
0.09028936177492142,
0.09364111721515656,
0.010874113999307156,
-0.005507334601134062,
0.007883609272539616,
-0.0005630938103422523,
-0.01101895421743393,
-0.2331933081150055,
0.017348354682326317,
-0.028083493933081627,
-0.03396221250295639,
0.08476368337869644,
0.02639269270002842,
-0.05124959349632263,
0.05281655117869377,
-0.07579046487808228,
0.038222964853048325,
0.044057879596948624,
0.12972071766853333,
0.09143710881471634,
-0.016118159517645836,
0.07057066261768341,
-0.04396364092826843,
0.02871832437813282,
0.07486215978860855,
0.038145266473293304,
0.1544959843158722,
-0.05275639146566391,
0.17285646498203278,
0.10178367793560028,
0.04364863783121109,
-0.01999090611934662,
0.07363523542881012,
-0.06990913301706314,
0.07529893517494202,
-0.0061557842418551445,
-0.10732043534517288,
-0.021181371062994003,
0.09375406056642532,
0.02472800388932228,
0.017749402672052383,
0.06547746807336807,
-0.012258393689990044,
0.07775642722845078,
0.18228086829185486,
0.027338596060872078,
-0.17033593356609344,
-0.014868821948766708,
0.045180704444646835,
-0.042855363339185715,
-0.0556153729557991,
-0.03428589925169945,
0.02676805853843689,
-0.07747772336006165,
0.07950906455516815,
-0.020185580477118492,
0.06667640060186386,
-0.08920048922300339,
-0.04961664602160454,
0.03398100286722183,
0.0879460945725441,
0.01169742550700903,
0.10297315567731857,
-0.14229239523410797,
0.06794165819883347,
0.021973708644509315,
0.09109654277563095,
-0.022204216569662094,
0.052416153252124786,
0.03350158780813217,
-0.04148627072572708,
0.08073187619447708,
0.002348959678784013,
0.07127352058887482,
-0.04549744352698326,
-0.045856449753046036,
-0.0013274761149659753,
0.09535779058933258,
-0.06927812099456787,
0.1104612797498703,
-0.020675746724009514,
-0.02863580733537674,
-0.07768697291612625,
0.011651577427983284,
-0.13901087641716003,
-0.14562182128429413,
0.09093361347913742,
-0.08666561543941498,
0.09543391317129135,
-0.01815500669181347,
-0.024854430928826332,
-0.011041165329515934,
0.19081149995326996,
-0.1817096620798111,
-0.12785273790359497,
-0.05794621631503105,
0.013708306476473808,
0.12566331028938293,
-0.08341299742460251,
0.044326648116111755,
-0.05831116810441017,
0.08574527502059937,
-0.03957497701048851,
-0.03343743830919266,
-0.035229019820690155,
-0.0502241887152195,
-0.11157890409231186,
0.01919371634721756,
0.10095726698637009,
0.035774823278188705,
-0.005842008627951145,
0.012794854119420052,
0.04880685731768608,
-0.04629164934158325,
-0.11616132408380508,
-0.0738789513707161,
0.09348766505718231,
0.03574037924408913,
0.03203130513429642,
-0.11266487836837769,
-0.06064080446958542,
-0.10084321349859238,
-0.052718356251716614,
0.039469681680202484,
0.22566142678260803,
-0.03785359486937523,
0.07955308258533478,
0.26188167929649353,
-0.11303643137216568,
-0.16151337325572968,
-0.12285619974136353,
0.03134623542428017,
0.029815975576639175,
-0.014111371710896492,
-0.1888880729675293,
0.04948078468441963,
0.05887746438384056,
0.027234124019742012,
-0.00861597340553999,
-0.17277783155441284,
-0.12180114537477493,
0.031032636761665344,
0.08225353807210922,
-0.08807191252708435,
-0.1536889374256134,
-0.08616887778043747,
-0.082138791680336,
-0.10009509325027466,
0.0025419634766876698,
-0.05471570044755936,
0.09709086269140244,
-0.013374004513025284,
0.054289087653160095,
0.01820308156311512,
-0.052612483501434326,
0.13891716301441193,
-0.02687559276819229,
-0.009772175922989845,
-0.08607643842697144,
0.018234120681881905,
0.10867199301719666,
-0.05401710420846939,
0.16646406054496765,
-0.08084538578987122,
0.03763194754719734,
-0.06640379130840302,
-0.04653671011328697,
-0.03580525144934654,
0.0034781356807798147,
-0.05203421786427498,
-0.0640861764550209,
-0.04098841920495033,
0.059491779655218124,
0.06596790999174118,
0.00015336474461946636,
-0.04686933383345604,
-0.04787338152527809,
-0.034049469977617264,
0.14902430772781372,
0.10746103525161743,
0.0506235770881176,
0.0005227470537647605,
-0.02588237076997757,
-0.030148372054100037,
0.033953480422496796,
-0.11341329663991928,
0.017823461443185806,
0.07381513714790344,
0.052668247371912,
0.1465553492307663,
-0.017784543335437775,
-0.15811924636363983,
0.007275589741766453,
0.08432485163211823,
-0.10660268366336823,
-0.09606728702783585,
0.0037793719675391912,
0.019403882324695587,
-0.07611633092164993,
-0.06750238686800003,
0.16321592032909393,
-0.055057644844055176,
-0.03277476131916046,
0.04629063606262207,
0.0461488775908947,
-0.040565114468336105,
0.07045667618513107,
0.0615219920873642,
0.01087760180234909,
-0.028412938117980957,
0.09800262749195099,
0.07397648692131042,
-0.03832557052373886,
0.007849391549825668,
0.14532873034477234,
-0.06914450973272324,
-0.09245019406080246,
-0.1037377193570137,
0.03466953709721565,
-0.04496011510491371,
-0.009293001145124435,
-0.005433051381260157,
0.02097260020673275,
0.007701264228671789,
0.0530974343419075,
0.007217307109385729,
0.01602303236722946,
-0.03530225530266762,
0.03138771280646324,
-0.06492428481578827,
0.13744713366031647,
0.09129773825407028,
0.01429604459553957,
-0.011263680644333363,
0.054437801241874695,
0.0390126071870327,
-0.015486170537769794,
0.005096886307001114,
-0.02874363586306572,
-0.05340512841939926,
-0.007579319179058075,
-0.16769932210445404,
0.027279917150735855,
-0.11823558807373047,
-0.02720196731388569,
-0.005687206517904997,
0.005225840024650097,
-0.019581766799092293,
0.021448029205203056,
-0.015461166389286518,
-0.07227025926113129,
-0.067838154733181,
0.06407760083675385,
-0.17654788494110107,
-0.02485264651477337,
0.04433557763695717,
-0.08844853192567825,
0.048397500067949295,
0.03600607439875603,
-0.03891047090291977,
0.01131467055529356,
-0.03727275878190994,
-0.002583864377811551,
-0.03965293616056442,
0.030545346438884735,
-0.01326738204807043,
-0.19144931435585022,
0.030445026233792305,
0.0015366834122687578,
-0.007050982676446438,
-0.01519242487847805,
0.08925875276327133,
-0.09141283482313156,
0.06311384588479996,
-0.008158792741596699,
-0.004274841398000717,
-0.09599427878856659,
0.09002120047807693,
0.04262971505522728,
0.07670305669307709,
0.11314384639263153,
-0.06628426909446716,
0.05823289975523949,
-0.08594175428152084,
-0.0012366990558803082,
-0.0007673674263060093,
-0.0031775562092661858,
-0.021848579868674278,
-0.06689534336328506,
0.004486124496906996,
-0.043277859687805176,
0.09314258396625519,
-0.04485287144780159,
-0.00469062989577651,
0.03822117671370506,
-0.03844773769378662,
-0.09289731085300446,
0.020570939406752586,
0.09495899081230164,
-0.00796423852443695,
0.005240466445684433,
-0.12795761227607727,
0.013913135975599289,
-0.06207986921072006,
-0.052947841584682465,
0.016275480389595032,
0.15545205771923065,
0.11672934144735336,
0.049471884965896606,
0.03477238863706589,
-0.02192796766757965,
-0.086133673787117,
0.01209340337663889,
-0.024690838530659676,
0.041012153029441833,
-0.06292333453893661,
0.15641064941883087,
0.20179015398025513,
-0.15742893517017365,
0.06096410006284714,
0.014714404940605164,
-0.06085687875747681,
-0.052442558109760284,
-0.17463864386081696,
-0.04112713038921356,
-0.00021685169485863298,
0.004602709785103798,
-0.10909685492515564,
0.09404846280813217,
-0.009661750867962837,
0.043435074388980865,
0.000060863101680297405,
0.1185416728258133,
0.008927329443395138,
-0.13321350514888763,
0.04155801981687546,
0.018373647704720497,
0.046360619366168976,
0.05924490466713905,
0.04513340815901756,
0.015579882077872753,
-0.011664758436381817,
0.057132769376039505,
0.09890009462833405,
0.0655864030122757,
-0.015278720296919346,
-0.05800788849592209,
-0.08048319071531296,
0.009566230699419975,
0.046459417790174484,
0.010009592399001122,
0.18226836621761322,
0.03455473855137825,
-0.05052482336759567,
0.003969219513237476,
0.09071853011846542,
-0.04150853306055069,
-0.13513879477977753,
-0.1085771769285202,
0.13607636094093323,
0.04628083482384682,
0.010922009125351906,
0.009511497803032398,
-0.11094903945922852,
0.011381178162992,
0.1262090802192688,
0.06296593695878983,
0.008912707678973675,
0.0015814179787412286,
0.058072611689567566,
0.025583522394299507,
-0.02103222906589508,
0.038390759378671646,
0.054167333990335464,
0.14384539425373077,
-0.050379473716020584,
0.09063783288002014,
-0.04002002626657486,
-0.04896757751703262,
-0.0899592861533165,
0.06838132441043854,
-0.06950779259204865,
-0.035389918833971024,
-0.028410300612449646,
0.08316127210855484,
-0.016106806695461273,
-0.22047379612922668,
-0.0017994436202570796,
-0.0027719305362552404,
-0.07868199795484543,
-0.006864838767796755,
0.03149981051683426,
-0.0007315344992093742,
0.04326344653964043,
0.02626711316406727,
-0.014807679690420628,
0.24090217053890228,
0.03201299533247948,
-0.045514367520809174,
0.002240787958726287,
0.0666261613368988,
-0.06477146595716476,
0.07574334740638733,
0.001144173787906766,
0.09226210415363312,
0.10559283941984177,
0.02623171918094158,
-0.09412739425897598,
0.07004117965698242,
0.05960053950548172,
-0.06923055648803711,
0.05412157252430916,
0.15120984613895416,
-0.012705656699836254,
0.02792171575129032,
0.0428183451294899,
0.007138346321880817,
0.05309818685054779,
0.07683705538511276,
0.003430240321904421,
-0.1001812219619751,
0.0895516648888588,
-0.08753728121519089,
0.11670085787773132,
0.13966457545757294,
-0.0355038084089756,
-0.017525000497698784,
-0.05633311718702316,
0.027994688600301743,
0.027559703215956688,
0.08879206329584122,
0.01130649633705616,
-0.12910030782222748,
-0.0021667343098670244,
-0.003790108021348715,
0.12688085436820984,
-0.23910966515541077,
-0.05055234581232071,
0.03019285388290882,
0.003732208861038089,
-0.04850189387798309,
0.14690792560577393,
0.009885207749903202,
0.03651364520192146,
0.0058494629338383675,
-0.13852985203266144,
-0.01574193499982357,
0.061133481562137604,
-0.1589159518480301,
-0.055984873324632645
] |
null | null |
transformers
|
# IndicBERT
IndicBERT is a multilingual ALBERT model pretrained exclusively on 12 major Indian languages. It is pre-trained on our novel monolingual corpus of around 9 billion tokens and subsequently evaluated on a set of diverse tasks. IndicBERT has much fewer parameters than other multilingual models (mBERT, XLM-R etc.) while it also achieves a performance on-par or better than these models.
The 12 languages covered by IndicBERT are: Assamese, Bengali, English, Gujarati, Hindi, Kannada, Malayalam, Marathi, Oriya, Punjabi, Tamil, Telugu.
The code can be found [here](https://github.com/divkakwani/indic-bert). For more information, checkout our [project page](https://indicnlp.ai4bharat.org/) or our [paper](https://indicnlp.ai4bharat.org/papers/arxiv2020_indicnlp_corpus.pdf).
## Pretraining Corpus
We pre-trained indic-bert on AI4Bharat's monolingual corpus. The corpus has the following distribution of languages:
| Language | as | bn | en | gu | hi | kn | |
| ----------------- | ------ | ------ | ------ | ------ | ------ | ------ | ------- |
| **No. of Tokens** | 36.9M | 815M | 1.34B | 724M | 1.84B | 712M | |
| **Language** | **ml** | **mr** | **or** | **pa** | **ta** | **te** | **all** |
| **No. of Tokens** | 767M | 560M | 104M | 814M | 549M | 671M | 8.9B |
## Evaluation Results
IndicBERT is evaluated on IndicGLUE and some additional tasks. The results are summarized below. For more details about the tasks, refer our [official repo](https://github.com/divkakwani/indic-bert)
#### IndicGLUE
Task | mBERT | XLM-R | IndicBERT
-----| ----- | ----- | ------
News Article Headline Prediction | 89.58 | 95.52 | **95.87**
Wikipedia Section Title Prediction| **73.66** | 66.33 | 73.31
Cloze-style multiple-choice QA | 39.16 | 27.98 | **41.87**
Article Genre Classification | 90.63 | 97.03 | **97.34**
Named Entity Recognition (F1-score) | **73.24** | 65.93 | 64.47
Cross-Lingual Sentence Retrieval Task | 21.46 | 13.74 | **27.12**
Average | 64.62 | 61.09 | **66.66**
#### Additional Tasks
Task | Task Type | mBERT | XLM-R | IndicBERT
-----| ----- | ----- | ------ | -----
BBC News Classification | Genre Classification | 60.55 | **75.52** | 74.60
IIT Product Reviews | Sentiment Analysis | 74.57 | **78.97** | 71.32
IITP Movie Reviews | Sentiment Analaysis | 56.77 | **61.61** | 59.03
Soham News Article | Genre Classification | 80.23 | **87.6** | 78.45
Midas Discourse | Discourse Analysis | 71.20 | **79.94** | 78.44
iNLTK Headlines Classification | Genre Classification | 87.95 | 93.38 | **94.52**
ACTSA Sentiment Analysis | Sentiment Analysis | 48.53 | 59.33 | **61.18**
Winograd NLI | Natural Language Inference | 56.34 | 55.87 | **56.34**
Choice of Plausible Alternative (COPA) | Natural Language Inference | 54.92 | 51.13 | **58.33**
Amrita Exact Paraphrase | Paraphrase Detection | **93.81** | 93.02 | 93.75
Amrita Rough Paraphrase | Paraphrase Detection | 83.38 | 82.20 | **84.33**
Average | | 69.84 | **74.42** | 73.66
\* Note: all models have been restricted to a max_seq_length of 128.
## Downloads
The model can be downloaded [here](https://storage.googleapis.com/ai4bharat-public-indic-nlp-corpora/models/indic-bert-v1.tar.gz). Both tf checkpoints and pytorch binaries are included in the archive. Alternatively, you can also download it from [Huggingface](https://huggingface.co/ai4bharat/indic-bert).
## Citing
If you are using any of the resources, please cite the following article:
```
@inproceedings{kakwani2020indicnlpsuite,
title={{IndicNLPSuite: Monolingual Corpora, Evaluation Benchmarks and Pre-trained Multilingual Language Models for Indian Languages}},
author={Divyanshu Kakwani and Anoop Kunchukuttan and Satish Golla and Gokul N.C. and Avik Bhattacharyya and Mitesh M. Khapra and Pratyush Kumar},
year={2020},
booktitle={Findings of EMNLP},
}
```
We would like to hear from you if:
- You are using our resources. Please let us know how you are putting these resources to use.
- You have any feedback on these resources.
## License
The IndicBERT code (and models) are released under the MIT License.
## Contributors
- Divyanshu Kakwani
- Anoop Kunchukuttan
- Gokul NC
- Satish Golla
- Avik Bhattacharyya
- Mitesh Khapra
- Pratyush Kumar
This work is the outcome of a volunteer effort as part of [AI4Bharat initiative](https://ai4bharat.org).
## Contact
- Anoop Kunchukuttan ([[email protected]](mailto:[email protected]))
- Mitesh Khapra ([[email protected]](mailto:[email protected]))
- Pratyush Kumar ([[email protected]](mailto:[email protected]))
|
{"language": ["as", "bn", "en", "gu", "hi", "kn", "ml", "mr", "or", "pa", "ta", "te"], "license": "mit", "datasets": ["AI4Bharat IndicNLP Corpora"]}
| null |
ai4bharat/indic-bert
|
[
"transformers",
"pytorch",
"albert",
"as",
"bn",
"en",
"gu",
"hi",
"kn",
"ml",
"mr",
"or",
"pa",
"ta",
"te",
"license:mit",
"endpoints_compatible",
"has_space",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"as",
"bn",
"en",
"gu",
"hi",
"kn",
"ml",
"mr",
"or",
"pa",
"ta",
"te"
] |
TAGS
#transformers #pytorch #albert #as #bn #en #gu #hi #kn #ml #mr #or #pa #ta #te #license-mit #endpoints_compatible #has_space #region-us
|
IndicBERT
=========
IndicBERT is a multilingual ALBERT model pretrained exclusively on 12 major Indian languages. It is pre-trained on our novel monolingual corpus of around 9 billion tokens and subsequently evaluated on a set of diverse tasks. IndicBERT has much fewer parameters than other multilingual models (mBERT, XLM-R etc.) while it also achieves a performance on-par or better than these models.
The 12 languages covered by IndicBERT are: Assamese, Bengali, English, Gujarati, Hindi, Kannada, Malayalam, Marathi, Oriya, Punjabi, Tamil, Telugu.
The code can be found here. For more information, checkout our project page or our paper.
Pretraining Corpus
------------------
We pre-trained indic-bert on AI4Bharat's monolingual corpus. The corpus has the following distribution of languages:
Evaluation Results
------------------
IndicBERT is evaluated on IndicGLUE and some additional tasks. The results are summarized below. For more details about the tasks, refer our official repo
#### IndicGLUE
#### Additional Tasks
\* Note: all models have been restricted to a max\_seq\_length of 128.
Downloads
---------
The model can be downloaded here. Both tf checkpoints and pytorch binaries are included in the archive. Alternatively, you can also download it from Huggingface.
Citing
------
If you are using any of the resources, please cite the following article:
We would like to hear from you if:
* You are using our resources. Please let us know how you are putting these resources to use.
* You have any feedback on these resources.
License
-------
The IndicBERT code (and models) are released under the MIT License.
Contributors
------------
* Divyanshu Kakwani
* Anoop Kunchukuttan
* Gokul NC
* Satish Golla
* Avik Bhattacharyya
* Mitesh Khapra
* Pratyush Kumar
This work is the outcome of a volunteer effort as part of AI4Bharat initiative.
Contact
-------
* Anoop Kunchukuttan (anoop.kunchukuttan@URL)
* Mitesh Khapra (miteshk@URL)
* Pratyush Kumar (pratyush@URL)
|
[
"#### IndicGLUE",
"#### Additional Tasks\n\n\n\n\\* Note: all models have been restricted to a max\\_seq\\_length of 128.\n\n\nDownloads\n---------\n\n\nThe model can be downloaded here. Both tf checkpoints and pytorch binaries are included in the archive. Alternatively, you can also download it from Huggingface.\n\n\nCiting\n------\n\n\nIf you are using any of the resources, please cite the following article:\n\n\nWe would like to hear from you if:\n\n\n* You are using our resources. Please let us know how you are putting these resources to use.\n* You have any feedback on these resources.\n\n\nLicense\n-------\n\n\nThe IndicBERT code (and models) are released under the MIT License.\n\n\nContributors\n------------\n\n\n* Divyanshu Kakwani\n* Anoop Kunchukuttan\n* Gokul NC\n* Satish Golla\n* Avik Bhattacharyya\n* Mitesh Khapra\n* Pratyush Kumar\n\n\nThis work is the outcome of a volunteer effort as part of AI4Bharat initiative.\n\n\nContact\n-------\n\n\n* Anoop Kunchukuttan (anoop.kunchukuttan@URL)\n* Mitesh Khapra (miteshk@URL)\n* Pratyush Kumar (pratyush@URL)"
] |
[
"TAGS\n#transformers #pytorch #albert #as #bn #en #gu #hi #kn #ml #mr #or #pa #ta #te #license-mit #endpoints_compatible #has_space #region-us \n",
"#### IndicGLUE",
"#### Additional Tasks\n\n\n\n\\* Note: all models have been restricted to a max\\_seq\\_length of 128.\n\n\nDownloads\n---------\n\n\nThe model can be downloaded here. Both tf checkpoints and pytorch binaries are included in the archive. Alternatively, you can also download it from Huggingface.\n\n\nCiting\n------\n\n\nIf you are using any of the resources, please cite the following article:\n\n\nWe would like to hear from you if:\n\n\n* You are using our resources. Please let us know how you are putting these resources to use.\n* You have any feedback on these resources.\n\n\nLicense\n-------\n\n\nThe IndicBERT code (and models) are released under the MIT License.\n\n\nContributors\n------------\n\n\n* Divyanshu Kakwani\n* Anoop Kunchukuttan\n* Gokul NC\n* Satish Golla\n* Avik Bhattacharyya\n* Mitesh Khapra\n* Pratyush Kumar\n\n\nThis work is the outcome of a volunteer effort as part of AI4Bharat initiative.\n\n\nContact\n-------\n\n\n* Anoop Kunchukuttan (anoop.kunchukuttan@URL)\n* Mitesh Khapra (miteshk@URL)\n* Pratyush Kumar (pratyush@URL)"
] |
[
57,
6,
261
] |
[
"passage: TAGS\n#transformers #pytorch #albert #as #bn #en #gu #hi #kn #ml #mr #or #pa #ta #te #license-mit #endpoints_compatible #has_space #region-us \n#### IndicGLUE#### Additional Tasks\n\n\n\n\\* Note: all models have been restricted to a max\\_seq\\_length of 128.\n\n\nDownloads\n---------\n\n\nThe model can be downloaded here. Both tf checkpoints and pytorch binaries are included in the archive. Alternatively, you can also download it from Huggingface.\n\n\nCiting\n------\n\n\nIf you are using any of the resources, please cite the following article:\n\n\nWe would like to hear from you if:\n\n\n* You are using our resources. Please let us know how you are putting these resources to use.\n* You have any feedback on these resources.\n\n\nLicense\n-------\n\n\nThe IndicBERT code (and models) are released under the MIT License.\n\n\nContributors\n------------\n\n\n* Divyanshu Kakwani\n* Anoop Kunchukuttan\n* Gokul NC\n* Satish Golla\n* Avik Bhattacharyya\n* Mitesh Khapra\n* Pratyush Kumar\n\n\nThis work is the outcome of a volunteer effort as part of AI4Bharat initiative.\n\n\nContact\n-------\n\n\n* Anoop Kunchukuttan (anoop.kunchukuttan@URL)\n* Mitesh Khapra (miteshk@URL)\n* Pratyush Kumar (pratyush@URL)"
] |
[
-0.05582713335752487,
0.10226380825042725,
-0.00016369877266697586,
0.05241094529628754,
0.05209760367870331,
0.014915079809725285,
0.17563749849796295,
0.052143730223178864,
0.05325024947524071,
0.023330707103013992,
0.08508089184761047,
0.1014222502708435,
0.10843880474567413,
0.045661915093660355,
0.018334295600652695,
-0.2905360758304596,
-0.03577873483300209,
0.01910848170518875,
0.012194796465337276,
0.07584677636623383,
0.1268540769815445,
-0.058910928666591644,
0.09177211672067642,
0.03624682128429413,
-0.045955732464790344,
-0.014556442387402058,
-0.06077614799141884,
-0.07305456697940826,
0.08100447803735733,
0.035007864236831665,
0.037009838968515396,
0.059462130069732666,
0.043428704142570496,
-0.17021070420742035,
0.017850274220108986,
-0.03500036522746086,
-0.01581084169447422,
0.04242333397269249,
0.04942341521382332,
0.03388079255819321,
0.3042193055152893,
-0.013169640675187111,
-0.023386143147945404,
0.050446487963199615,
-0.09200077503919601,
-0.07623984664678574,
-0.09414989501237869,
0.19106437265872955,
0.13875728845596313,
0.07187563925981522,
-0.01364113762974739,
0.1691797375679016,
-0.0028242405969649553,
0.039580777287483215,
0.11894197016954422,
-0.19997185468673706,
-0.051925599575042725,
0.04990800470113754,
0.03950363025069237,
0.025835834443569183,
-0.06860929727554321,
-0.012929183430969715,
0.0829177051782608,
-0.0173768512904644,
0.051116421818733215,
-0.09001524746417999,
0.10007192939519882,
-0.019312197342514992,
-0.0943194106221199,
-0.023155419155955315,
0.16693834960460663,
0.01896955817937851,
-0.057815782725811005,
-0.09370622038841248,
0.003776371944695711,
0.09419324994087219,
0.040510665625333786,
0.015625860542058945,
0.030158810317516327,
-0.002975722076371312,
0.0065105934627354145,
-0.019949352368712425,
-0.13272327184677124,
-0.02559131197631359,
0.035641420632600784,
-0.05162747949361801,
0.039255253970623016,
0.007410737220197916,
0.0024131108075380325,
0.05385950207710266,
-0.06342858821153641,
-0.08903123438358307,
-0.06719362735748291,
-0.07831275463104248,
0.059711914509534836,
-0.011009957641363144,
0.039096247404813766,
-0.016332905739545822,
0.07957661896944046,
0.11360342055559158,
-0.08882638067007065,
-0.07171028107404709,
0.018985658884048462,
0.020791297778487206,
0.06249786540865898,
0.040565479546785355,
-0.11319662630558014,
-0.036947064101696014,
0.046900372952222824,
-0.04550103843212128,
0.062113359570503235,
0.01074304711073637,
-0.019715789705514908,
0.0553395040333271,
-0.05241067335009575,
0.08374453336000443,
0.03843085840344429,
0.04811883717775345,
0.02930089831352234,
-0.0219471026211977,
0.3240034878253937,
-0.02230299450457096,
-0.06399981677532196,
-0.0263549592345953,
-0.03689390420913696,
-0.032715898007154465,
-0.009257553145289421,
0.03523973375558853,
-0.04913755878806114,
0.038123492151498795,
-0.08978208899497986,
-0.016498621553182602,
-0.06185311824083328,
-0.09242609888315201,
0.030390076339244843,
-0.08494903147220612,
0.0012142366031184793,
-0.15563006699085236,
-0.09950237721204758,
0.018603824079036713,
0.0576246939599514,
-0.025100745260715485,
-0.05074438825249672,
0.02508658356964588,
0.005460317712277174,
-0.03252207860350609,
-0.06146569922566414,
-0.05246241018176079,
-0.04330809414386749,
0.06113254278898239,
-0.0919380858540535,
0.06278301030397415,
-0.012471767142415047,
0.050462059676647186,
-0.06739142537117004,
0.07882626354694366,
-0.045648328959941864,
0.007919631898403168,
-0.06246444582939148,
0.1107654795050621,
-0.1284557729959488,
-0.09082061797380447,
0.1055278405547142,
-0.01066720113158226,
0.04249654710292816,
0.17157132923603058,
-0.05018274486064911,
-0.027370696887373924,
-0.00012275758490432054,
-0.13203981518745422,
-0.16695687174797058,
0.10968220233917236,
0.029189078137278557,
0.0891089215874672,
0.03544536605477333,
0.18658265471458435,
0.05206949636340141,
-0.08091847598552704,
-0.09362760186195374,
0.010911485180258751,
0.018237970769405365,
-0.03349423035979271,
0.08355142921209335,
-0.008454558439552784,
0.1650741696357727,
-0.02209794521331787,
-0.06909526139497757,
0.007897411473095417,
-0.025419801473617554,
-0.05629247426986694,
0.016818314790725708,
-0.09908700734376907,
-0.045711733400821686,
0.021498098969459534,
0.005463539622724056,
-0.00969997514039278,
-0.019497064873576164,
-0.031005097553133965,
0.09186958521604538,
0.0060909101739525795,
0.04124666005373001,
-0.06889238208532333,
0.07519654184579849,
-0.020207451656460762,
-0.033943675458431244,
-0.07847222685813904,
0.10092046856880188,
0.053876444697380066,
-0.0636352002620697,
0.07845939695835114,
0.06488081812858582,
-0.004799009300768375,
0.06118936091661453,
-0.025563810020685196,
-0.03681033477187157,
0.07893073558807373,
-0.03112846240401268,
-0.024893807247281075,
-0.08688674122095108,
-0.008033279329538345,
0.011050684377551079,
0.0404529795050621,
-0.1461014300584793,
0.01992599479854107,
0.09899910539388657,
0.045849043875932693,
0.0303622055798769,
-0.0007154321647249162,
0.09619300812482834,
0.012760716490447521,
-0.06744278967380524,
-0.01871013082563877,
0.04444199055433273,
0.009918062947690487,
-0.13654357194900513,
0.1420724093914032,
-0.015356354415416718,
-0.026601728051900864,
0.06322742998600006,
-0.032272424548864365,
-0.03011409565806389,
0.07715562731027603,
-0.05005505681037903,
-0.01986055262386799,
0.07014479488134384,
0.06165314465761185,
0.04124875366687775,
0.02489994280040264,
0.06671524792909622,
-0.0633918046951294,
-0.05807621777057648,
-0.05300186201930046,
-0.05571708455681801,
-0.05060158297419548,
0.10050652921199799,
0.09242407977581024,
-0.1372826099395752,
0.11503967642784119,
0.022660033777356148,
0.07808610051870346,
0.1589408665895462,
0.010904965922236443,
-0.08676818758249283,
-0.030708888545632362,
-0.001180904801003635,
0.03128151595592499,
0.0761483907699585,
-0.09015847742557526,
-0.0056488411501049995,
0.05881145969033241,
-0.03086792305111885,
-0.05690976604819298,
-0.08548986911773682,
-0.04867067560553551,
0.006409359630197287,
-0.047752123326063156,
-0.08303157240152359,
0.11812624335289001,
-0.11872221529483795,
0.06833208352327347,
0.00001343784879281884,
0.12586252391338348,
-0.04115036129951477,
-0.03840545564889908,
-0.1256236582994461,
0.10118250548839569,
-0.03723781555891037,
-0.23038636147975922,
-0.11015261709690094,
-0.034253355115652084,
-0.014480721205472946,
-0.007667279802262783,
0.10852571576833725,
-0.07403062283992767,
-0.0710926428437233,
-0.05662514641880989,
-0.020935870707035065,
-0.0028848382644355297,
-0.06922980397939682,
-0.07355041801929474,
-0.007540669292211533,
-0.03563855215907097,
-0.12513038516044617,
-0.011533024720847607,
0.04076414182782173,
-0.10701942443847656,
0.06264139711856842,
-0.005744330585002899,
0.1071949452161789,
0.017706582322716713,
0.05959884077310562,
-0.05208401381969452,
-0.03914503753185272,
0.17239870131015778,
-0.12070939689874649,
0.1408568173646927,
0.1631968915462494,
-0.002888853894546628,
0.07442383468151093,
0.19369399547576904,
0.00939386896789074,
-0.03899490460753441,
0.03393979370594025,
-0.03726620972156525,
-0.037910785526037216,
-0.23679281771183014,
-0.037215206772089005,
-0.06996750831604004,
0.08322566002607346,
-0.008830060251057148,
0.03971777856349945,
0.10111980885267258,
0.11600688844919205,
-0.08305132389068604,
0.04091080650687218,
-0.05840311571955681,
0.12371856719255447,
0.015897568315267563,
0.007835367694497108,
0.06818842887878418,
-0.10298015177249908,
0.0399995893239975,
0.1344345062971115,
-0.07988807559013367,
0.2362649291753769,
-0.019195012748241425,
0.17423509061336517,
0.12173143029212952,
0.18536430597305298,
0.013957363553345203,
0.0322883315384388,
-0.06491196155548096,
0.010188656859099865,
-0.025665774941444397,
-0.07269685715436935,
-0.02874346263706684,
0.14285509288311005,
0.04815119132399559,
-0.023243099451065063,
0.042142435908317566,
-0.09345085173845291,
0.0010555466869845986,
0.2370232194662094,
0.0614837147295475,
-0.11639508605003357,
-0.11309337615966797,
0.03773973509669304,
-0.10157790780067444,
0.0037015476264059544,
-0.0006885732291266322,
0.04349212348461151,
-0.10915985703468323,
0.06654299050569534,
-0.02643783763051033,
0.07932846993207932,
-0.10033431649208069,
-0.0021060556173324585,
-0.05739184096455574,
-0.09828155487775803,
-0.04697088897228241,
0.05705173313617706,
-0.2008829563856125,
0.2997545599937439,
0.006259518209844828,
0.05400877445936203,
-0.06775012612342834,
-0.0198198389261961,
0.05085533484816551,
0.12955321371555328,
0.11916504800319672,
0.0011512477649375796,
0.030902640894055367,
-0.03939196467399597,
-0.0199139267206192,
-0.017026139423251152,
0.014979355037212372,
-0.0919252261519432,
0.05014083534479141,
-0.0017944691935554147,
0.010455801151692867,
-0.06103288754820824,
0.10808300226926804,
-0.18219320476055145,
-0.06579470634460449,
0.11641401052474976,
-0.04353522136807442,
0.09894860535860062,
-0.05824896693229675,
-0.030578233301639557,
0.09799971431493759,
0.08193018287420273,
-0.1481739580631256,
-0.15740105509757996,
-0.03425649181008339,
-0.06149858236312866,
0.0870472863316536,
-0.12529276311397552,
0.06578794121742249,
-0.042381513863801956,
-0.020924342796206474,
-0.010877584107220173,
0.024090459570288658,
-0.030997255817055702,
-0.07577112317085266,
-0.1303195357322693,
0.04623851552605629,
0.019952427595853806,
0.07095465064048767,
0.053344860672950745,
0.009547648951411247,
0.0589098297059536,
-0.016046151518821716,
-0.1006823480129242,
-0.04222914204001427,
-0.005831644404679537,
0.09959831833839417,
-0.07737082988023758,
-0.06393373012542725,
-0.0594034306704998,
-0.11042607575654984,
-0.20224860310554504,
0.04453152418136597,
0.34068864583969116,
0.007061437703669071,
0.15506696701049805,
0.1985820233821869,
-0.05856276676058769,
-0.1537407487630844,
-0.13705947995185852,
-0.0802094042301178,
-0.034424904733896255,
0.048461250960826874,
-0.15536600351333618,
-0.05332333967089653,
0.006583455950021744,
-0.019225720316171646,
0.039080653339624405,
-0.14120130240917206,
-0.13710415363311768,
0.002577042207121849,
0.13367189466953278,
-0.07148968428373337,
-0.2083582580089569,
-0.08203461766242981,
-0.05350854992866516,
-0.14659497141838074,
0.012236548587679863,
0.09475383162498474,
0.03790494427084923,
-0.062126073986291885,
0.10068684071302414,
-0.009788256138563156,
-0.06511646509170532,
0.08808979392051697,
-0.044375017285346985,
0.0071412548422813416,
-0.17868247628211975,
-0.12293718755245209,
0.06841778755187988,
-0.005789083894342184,
0.1787988692522049,
-0.06650369614362717,
0.01770574226975441,
-0.11929770559072495,
-0.027060000225901604,
-0.042167339473962784,
0.03237215057015419,
-0.055331479758024216,
-0.15992507338523865,
-0.11078330874443054,
0.15756069123744965,
0.02859199419617653,
0.021044155582785606,
0.02573242411017418,
-0.05374246835708618,
0.06549041718244553,
0.0194536205381155,
0.1941690742969513,
0.045731693506240845,
0.09413924068212509,
-0.09573347866535187,
-0.02959766611456871,
0.047583289444446564,
-0.22836291790008545,
-0.061951957643032074,
0.04838313162326813,
0.043046679347753525,
0.08586477488279343,
-0.05771863833069801,
-0.09888943284749985,
0.07706037163734436,
0.0779915601015091,
-0.053169574588537216,
-0.18921451270580292,
0.0071277916431427,
0.020965054631233215,
-0.02980569750070572,
0.04047791659832001,
0.10038738697767258,
-0.11037570983171463,
-0.025619080290198326,
0.00826460961252451,
0.06830252707004547,
-0.03661824390292168,
0.0465431809425354,
0.11312045156955719,
0.021521523594856262,
-0.020489949733018875,
0.03180796653032303,
0.08252362161874771,
-0.04051358625292778,
-0.018163319677114487,
0.09238212555646896,
-0.09646199643611908,
-0.11384779959917068,
-0.14580018818378448,
-0.08126000314950943,
-0.14047954976558685,
-0.015261559747159481,
0.002859034575521946,
0.015424286015331745,
0.004105349071323872,
0.0855964720249176,
-0.0036727830301970243,
0.007885799743235111,
0.02353837341070175,
0.04925234243273735,
-0.057472821325063705,
0.06836413592100143,
0.005416283383965492,
-0.0033262178767472506,
-0.046416766941547394,
0.0033690487034618855,
0.03564433753490448,
0.057278506457805634,
-0.013132631778717041,
-0.0004929902497678995,
-0.09995318949222565,
-0.03809988498687744,
-0.14785166084766388,
-0.03008667379617691,
-0.18455062806606293,
-0.017390424385666847,
-0.025929413735866547,
-0.06768853217363358,
-0.07106935977935791,
-0.005798930302262306,
-0.045122213661670685,
0.003111613215878606,
-0.041549716144800186,
0.13827408850193024,
-0.1620367169380188,
-0.06613685190677643,
0.10342417657375336,
-0.01804496720433235,
0.08934950083494186,
-0.04718121513724327,
-0.06999702006578445,
-0.02682109922170639,
-0.088138647377491,
0.10549554228782654,
-0.10263838618993759,
0.04225398972630501,
0.047939516603946686,
-0.0974053218960762,
-0.04047992080450058,
0.015314646065235138,
-0.0052756452932953835,
0.02838992141187191,
0.017457040026783943,
-0.039898063987493515,
0.015781130641698837,
-0.022489730268716812,
-0.05901852622628212,
-0.07382030785083771,
0.06485196202993393,
0.0513170100748539,
-0.004823717288672924,
0.11811865866184235,
-0.042420439422130585,
0.06185412406921387,
-0.09778407961130142,
0.014778852462768555,
0.039329688996076584,
-0.030772386118769646,
-0.0008966352324932814,
-0.1182641088962555,
-0.021053196862339973,
-0.03280803933739662,
0.14689573645591736,
-0.03710285946726799,
-0.11353074014186859,
0.06653588265180588,
0.02061491273343563,
0.11426755785942078,
-0.06911037862300873,
0.15248796343803406,
0.005593581590801477,
0.019833263009786606,
-0.09336899220943451,
0.03685252368450165,
-0.09612215310335159,
-0.15101702511310577,
0.08270333707332611,
0.11122701317071915,
0.06542106717824936,
-0.0443374402821064,
0.000011454631930973846,
-0.03775007277727127,
-0.02342328429222107,
-0.14999768137931824,
0.005844323430210352,
0.09022829681634903,
0.0012624856317415833,
0.2240518182516098,
0.26068100333213806,
-0.20914144814014435,
0.04200756922364235,
-0.001878291368484497,
-0.029441501945257187,
0.006981117185205221,
-0.21849152445793152,
-0.038072746247053146,
-0.08349508792161942,
0.07638408243656158,
-0.06951248645782471,
0.061371687799692154,
0.09748697280883789,
0.024438200518488884,
-0.02267923764884472,
0.1409202665090561,
0.03067644312977791,
-0.007328305393457413,
0.050461407750844955,
0.007407420314848423,
-0.059084344655275345,
-0.07913455367088318,
0.03205958753824234,
-0.07451581209897995,
-0.02982865273952484,
-0.006210130173712969,
0.04038494825363159,
-0.061321280896663666,
0.013481389731168747,
-0.016010090708732605,
-0.06396903097629547,
0.0005838482175022364,
0.044017232954502106,
0.050359614193439484,
0.22641630470752716,
0.008599870838224888,
0.02234962210059166,
0.043972212821245193,
0.06204138696193695,
0.011190863326191902,
-0.07861435413360596,
-0.0769650861620903,
0.11092440783977509,
-0.044342923909425735,
-0.0685151219367981,
0.050264522433280945,
-0.0503750778734684,
0.04871297627687454,
0.25833678245544434,
0.18998326361179352,
0.0005487210582941771,
-0.013055606745183468,
-0.048484161496162415,
0.027156660333275795,
-0.06858804821968079,
0.12745079398155212,
0.07471024245023727,
0.17014209926128387,
-0.0851018950343132,
-0.0035307109355926514,
-0.10065840184688568,
0.023852752521634102,
-0.09527166932821274,
0.08640390634536743,
-0.003320105606690049,
-0.07030710577964783,
-0.02938101999461651,
0.1438254714012146,
-0.09844110161066055,
-0.21046508848667145,
0.04088304564356804,
-0.0026467901188880205,
-0.05135618895292282,
0.02117077447474003,
-0.05235423520207405,
0.04850522801280022,
0.05106724053621292,
0.00696677528321743,
0.03685508668422699,
0.11215218901634216,
0.0838506743311882,
-0.06899186223745346,
0.025297554209828377,
0.18885233998298645,
-0.04651672765612602,
0.1822381317615509,
-0.030416173860430717,
0.0617075078189373,
0.10640528053045273,
0.003853579517453909,
-0.07574189454317093,
0.09661930054426193,
0.12391950190067291,
-0.10093236714601517,
-0.008716478943824768,
0.09889023751020432,
-0.009740744717419147,
0.002683861181139946,
0.056745000183582306,
-0.02295450121164322,
0.07238855957984924,
0.1475686877965927,
0.07002227008342743,
-0.117350272834301,
0.11502677947282791,
-0.14599232375621796,
0.1550118327140808,
0.08090832084417343,
-0.07330962270498276,
-0.05501972883939743,
-0.063527412712574,
0.03242100775241852,
0.0006258664652705193,
-0.07053371518850327,
-0.00659378943964839,
-0.1392815262079239,
0.024575507268309593,
-0.04278866574168205,
0.09012425690889359,
-0.21784938871860504,
-0.03238867595791817,
0.0010806191712617874,
-0.029533687978982925,
-0.09716318547725677,
0.08898352086544037,
0.06282998621463776,
-0.021751075983047485,
-0.008352724835276604,
-0.11773045361042023,
-0.03262433037161827,
0.03193245455622673,
-0.1099795252084732,
-0.04778027907013893
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information Keras had access to. You should
probably proofread and complete it, then remove this comment. -->
# recipe-improver
This model is a fine-tuned version of [albert-base-v2](https://huggingface.co/albert-base-v2) on an unknown dataset.
It achieves the following results on the evaluation set:
- Train Loss: 2.5570
- Epoch: 0
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- optimizer: {'name': 'Adam', 'learning_rate': {'class_name': 'PolynomialDecay', 'config': {'initial_learning_rate': 5e-05, 'decay_steps': 5539, 'end_learning_rate': 0.0, 'power': 1.0, 'cycle': False, 'name': None}}, 'decay': 0.0, 'beta_1': 0.9, 'beta_2': 0.999, 'epsilon': 1e-07, 'amsgrad': False}
- training_precision: float32
### Training results
| Train Loss | Epoch |
|:----------:|:-----:|
| 2.5570 | 0 |
### Framework versions
- Transformers 4.15.0
- TensorFlow 2.7.0
- Datasets 1.17.0
- Tokenizers 0.10.3
|
{"license": "apache-2.0", "tags": ["generated_from_keras_callback"], "model-index": [{"name": "recipe-improver", "results": []}]}
|
question-answering
|
aidan-o-brien/recipe-improver
|
[
"transformers",
"tf",
"albert",
"question-answering",
"generated_from_keras_callback",
"license:apache-2.0",
"endpoints_compatible",
"has_space",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #tf #albert #question-answering #generated_from_keras_callback #license-apache-2.0 #endpoints_compatible #has_space #region-us
|
recipe-improver
===============
This model is a fine-tuned version of albert-base-v2 on an unknown dataset.
It achieves the following results on the evaluation set:
* Train Loss: 2.5570
* Epoch: 0
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* optimizer: {'name': 'Adam', 'learning\_rate': {'class\_name': 'PolynomialDecay', 'config': {'initial\_learning\_rate': 5e-05, 'decay\_steps': 5539, 'end\_learning\_rate': 0.0, 'power': 1.0, 'cycle': False, 'name': None}}, 'decay': 0.0, 'beta\_1': 0.9, 'beta\_2': 0.999, 'epsilon': 1e-07, 'amsgrad': False}
* training\_precision: float32
### Training results
### Framework versions
* Transformers 4.15.0
* TensorFlow 2.7.0
* Datasets 1.17.0
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* optimizer: {'name': 'Adam', 'learning\\_rate': {'class\\_name': 'PolynomialDecay', 'config': {'initial\\_learning\\_rate': 5e-05, 'decay\\_steps': 5539, 'end\\_learning\\_rate': 0.0, 'power': 1.0, 'cycle': False, 'name': None}}, 'decay': 0.0, 'beta\\_1': 0.9, 'beta\\_2': 0.999, 'epsilon': 1e-07, 'amsgrad': False}\n* training\\_precision: float32",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.15.0\n* TensorFlow 2.7.0\n* Datasets 1.17.0\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #tf #albert #question-answering #generated_from_keras_callback #license-apache-2.0 #endpoints_compatible #has_space #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* optimizer: {'name': 'Adam', 'learning\\_rate': {'class\\_name': 'PolynomialDecay', 'config': {'initial\\_learning\\_rate': 5e-05, 'decay\\_steps': 5539, 'end\\_learning\\_rate': 0.0, 'power': 1.0, 'cycle': False, 'name': None}}, 'decay': 0.0, 'beta\\_1': 0.9, 'beta\\_2': 0.999, 'epsilon': 1e-07, 'amsgrad': False}\n* training\\_precision: float32",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.15.0\n* TensorFlow 2.7.0\n* Datasets 1.17.0\n* Tokenizers 0.10.3"
] |
[
52,
179,
4,
31
] |
[
"passage: TAGS\n#transformers #tf #albert #question-answering #generated_from_keras_callback #license-apache-2.0 #endpoints_compatible #has_space #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* optimizer: {'name': 'Adam', 'learning\\_rate': {'class\\_name': 'PolynomialDecay', 'config': {'initial\\_learning\\_rate': 5e-05, 'decay\\_steps': 5539, 'end\\_learning\\_rate': 0.0, 'power': 1.0, 'cycle': False, 'name': None}}, 'decay': 0.0, 'beta\\_1': 0.9, 'beta\\_2': 0.999, 'epsilon': 1e-07, 'amsgrad': False}\n* training\\_precision: float32### Training results### Framework versions\n\n\n* Transformers 4.15.0\n* TensorFlow 2.7.0\n* Datasets 1.17.0\n* Tokenizers 0.10.3"
] |
[
-0.0724559798836708,
0.02616341970860958,
-0.004955901764333248,
0.06773264706134796,
0.1381639987230301,
0.0515424869954586,
0.12553013861179352,
0.12411189824342728,
-0.07497887313365936,
0.11453232914209366,
0.1569124013185501,
0.11727006733417511,
0.06497757136821747,
0.09696158021688461,
-0.07938870042562485,
-0.15091583132743835,
0.06488942354917526,
-0.014420121908187866,
-0.08476164191961288,
0.08144539594650269,
0.083825021982193,
-0.0772114098072052,
0.07876499742269516,
-0.025265859439969063,
-0.09617175906896591,
0.03884348273277283,
0.06984274089336395,
-0.06708012521266937,
0.11988656967878342,
0.07522065192461014,
0.09095160663127899,
-0.007964609190821648,
-0.0005825235857628286,
-0.20924797654151917,
0.011434045620262623,
0.10376961529254913,
0.00016782243619672954,
0.06655494123697281,
0.005292508285492659,
-0.00047445963718928397,
0.0916488841176033,
-0.1129092127084732,
0.05490077659487724,
0.034863799810409546,
-0.14896249771118164,
-0.27638760209083557,
-0.11885959655046463,
0.004958198871463537,
0.08324992656707764,
0.10275672376155853,
-0.014446776360273361,
0.16661500930786133,
-0.062326688319444656,
0.0832308903336525,
0.2008405327796936,
-0.304641455411911,
-0.044972002506256104,
0.0416700653731823,
0.03724798932671547,
0.05163172632455826,
-0.0596599243581295,
0.019307110458612442,
0.045144934207201004,
0.04053020477294922,
0.023992080241441727,
-0.01434857677668333,
0.0038262116722762585,
-0.01294873096048832,
-0.08045203238725662,
-0.07620050013065338,
0.13484640419483185,
0.053453292697668076,
-0.06055653095245361,
-0.0647711306810379,
-0.03011236898601055,
-0.15137600898742676,
0.013468971475958824,
-0.02948876842856407,
0.010719641111791134,
-0.0034540819469839334,
-0.038229458034038544,
-0.019957315176725388,
-0.06858614087104797,
-0.05512192100286484,
-0.01868220418691635,
0.1315377652645111,
0.029560940340161324,
0.048833224922418594,
-0.02348950505256653,
0.0692266896367073,
-0.029263494536280632,
-0.12450046092271805,
-0.025864258408546448,
0.0038986869622021914,
-0.06550504267215729,
-0.01987125724554062,
-0.09368545562028885,
-0.0023287979420274496,
0.05607781559228897,
0.15164051949977875,
-0.058042630553245544,
0.0973261147737503,
-0.0038698159623891115,
0.023536622524261475,
-0.10269809514284134,
0.1416466236114502,
-0.03904969245195389,
0.014604697935283184,
-0.023523904383182526,
0.08746450394392014,
0.026256348937749863,
-0.045237183570861816,
-0.035288505256175995,
0.018041376024484634,
0.09331172704696655,
0.03371338173747063,
-0.05052786320447922,
0.069508858025074,
-0.0766986608505249,
0.0002699697797652334,
-0.05585211515426636,
-0.10953985899686813,
0.04112884774804115,
0.023804185912013054,
-0.0952429473400116,
0.028500191867351532,
0.05952166020870209,
0.007509663235396147,
-0.04515711963176727,
0.02897065132856369,
-0.06357301771640778,
-0.03837438300251961,
-0.10488636791706085,
-0.12691782414913177,
0.025964977219700813,
-0.07703566551208496,
-0.0014044377021491528,
-0.07109030336141586,
-0.14892582595348358,
-0.039485231041908264,
0.0766221210360527,
-0.042534131556749344,
-0.013924102298915386,
-0.053446412086486816,
-0.1626695692539215,
0.047802504152059555,
-0.00157206563744694,
0.1381533443927765,
-0.047688912600278854,
0.07571098953485489,
0.00665289955213666,
0.05904213339090347,
-0.026893489062786102,
0.03215905278921127,
-0.034697528928518295,
0.04173978790640831,
-0.14686864614486694,
0.07308674603700638,
-0.07917236536741257,
0.03528676554560661,
-0.1522284299135208,
-0.08296392858028412,
0.05633734539151192,
0.027333086356520653,
0.12219114601612091,
0.10584484040737152,
-0.1494808942079544,
-0.060695212334394455,
0.09357869625091553,
-0.06552530080080032,
-0.10809526592493057,
0.08945225924253464,
-0.053919468075037,
0.023520098999142647,
0.07006209343671799,
0.07147642225027084,
-0.00531797157600522,
-0.12567400932312012,
0.016788482666015625,
-0.0554434098303318,
0.03729986399412155,
0.06988534331321716,
0.04578213766217232,
-0.0343119353055954,
-0.1027047261595726,
0.006018307060003281,
-0.033069007098674774,
0.002174551133066416,
-0.07799699902534485,
-0.0640794187784195,
-0.025715753436088562,
-0.057149339467287064,
0.033649999648332596,
0.03238705173134804,
0.03546864911913872,
-0.10507446527481079,
-0.156456857919693,
0.035553812980651855,
0.0361250601708889,
-0.05241933465003967,
0.02038423903286457,
-0.07666844129562378,
0.03264357149600983,
0.035172440111637115,
0.009978733956813812,
-0.15087340772151947,
-0.06467775255441666,
0.014761827886104584,
-0.010952244512736797,
0.002944595878943801,
-0.009757614694535732,
0.06808961927890778,
0.006932183634489775,
-0.050620101392269135,
-0.004069036338478327,
-0.056907594203948975,
0.01376564335078001,
-0.07950130850076675,
-0.21557538211345673,
-0.029031015932559967,
-0.01943349465727806,
0.08778252452611923,
-0.2701845169067383,
0.007033177651464939,
0.0580613911151886,
0.10396037250757217,
0.02694406360387802,
-0.013417480513453484,
-0.037339646369218826,
0.05679645761847496,
-0.0156709011644125,
-0.05395008996129036,
0.0214077141135931,
0.01356334239244461,
-0.1216217577457428,
-0.04347947984933853,
-0.1746610701084137,
0.09737014025449753,
0.12387103587388992,
-0.08599084615707397,
-0.13045035302639008,
0.07262226194143295,
-0.03282805159687996,
-0.028346318751573563,
-0.01973303221166134,
-0.024196073412895203,
0.1570296734571457,
0.03166620805859566,
0.11977387964725494,
-0.048017073422670364,
-0.01768375188112259,
0.032744087278842926,
-0.029016515240073204,
-0.028428802266716957,
0.13776589930057526,
-0.02908778004348278,
-0.07843492180109024,
0.08609087020158768,
0.12181222438812256,
-0.11512148380279541,
0.0907658189535141,
-0.05105723440647125,
-0.07110980153083801,
-0.08430346101522446,
0.05136431008577347,
0.056957267224788666,
0.11334971338510513,
-0.10066232830286026,
0.00430151866748929,
0.017145222052931786,
0.01946384459733963,
-0.022495970129966736,
-0.19853220880031586,
-0.015618420206010342,
0.01855543814599514,
-0.044980861246585846,
-0.0057858191430568695,
0.010064343921840191,
0.024384813383221626,
0.12010827660560608,
0.03684411942958832,
-0.03452812880277634,
0.057985302060842514,
-0.0355428084731102,
-0.08268119394779205,
0.2319955974817276,
-0.12060173600912094,
-0.11776009947061539,
-0.11684349179267883,
-0.01216859556734562,
-0.03808450326323509,
-0.0036675971932709217,
0.014932620339095592,
-0.09914933890104294,
-0.05348971113562584,
-0.07446980476379395,
-0.0009683121461421251,
-0.023768456652760506,
0.027440104633569717,
0.04364911839365959,
-0.01880994625389576,
0.13250629603862762,
-0.10928533226251602,
-0.03344511613249779,
-0.016342977061867714,
-0.08280859887599945,
0.026578040793538094,
0.0009747992735356092,
0.010965688154101372,
0.10429178178310394,
-0.0014098944375291467,
0.023582452908158302,
-0.0489228256046772,
0.25961387157440186,
-0.06337962299585342,
-0.029943885281682014,
0.11927469819784164,
-0.013626650907099247,
0.056937310844659805,
0.13351523876190186,
0.048558708280324936,
-0.1254083663225174,
0.048478297889232635,
0.08349480479955673,
-0.018067307770252228,
-0.26119452714920044,
0.009542352519929409,
-0.03522685170173645,
-0.08536569774150848,
0.06111287325620651,
0.025770243257284164,
0.14154256880283356,
0.01305248774588108,
0.0008419178193435073,
0.09278891235589981,
0.03255124390125275,
0.05886548385024071,
0.13738861680030823,
0.05326345935463905,
0.09141340106725693,
-0.03293776512145996,
-0.009021834470331669,
0.02157207950949669,
-0.0026581883430480957,
0.2207770198583603,
0.02723696455359459,
0.06108413636684418,
0.07831203192472458,
0.07720700651407242,
-0.04035549238324165,
0.008052577264606953,
0.0036692668218165636,
-0.01674879714846611,
0.00748946750536561,
-0.060734570026397705,
-0.030629614368081093,
0.06145179644227028,
-0.01139574870467186,
0.07890897989273071,
-0.11673155426979065,
-0.009875585325062275,
0.05242977663874626,
0.24690109491348267,
0.11442746967077255,
-0.28213101625442505,
-0.11170405149459839,
0.011400101706385612,
-0.03305985406041145,
-0.04925230145454407,
0.0043160137720406055,
0.08294016122817993,
-0.07824798673391342,
0.058801259845495224,
-0.0542481392621994,
0.06065365672111511,
-0.01608925126492977,
0.0512131005525589,
0.11018431931734085,
0.07083622366189957,
0.007323115132749081,
0.02079627849161625,
-0.35831278562545776,
0.324638694524765,
0.03274328634142876,
0.13024216890335083,
-0.0727609321475029,
0.03841576725244522,
0.03244170919060707,
-0.05651630833745003,
0.07752314954996109,
-0.010949877090752125,
-0.11704451590776443,
-0.21808254718780518,
-0.029081372544169426,
0.01376081071794033,
0.14032408595085144,
-0.009027564898133278,
0.12020397186279297,
-0.037293802946805954,
0.022865651175379753,
0.07369788736104965,
0.03876844793558121,
-0.1678481101989746,
-0.06195814162492752,
0.05654408037662506,
0.03293991833925247,
-0.02715720422565937,
-0.08455538749694824,
-0.09004601836204529,
-0.09865652024745941,
0.1405840516090393,
-0.1260862946510315,
-0.030628856271505356,
-0.122866690158844,
0.0925433337688446,
0.09712522476911545,
-0.05843275785446167,
0.022541597485542297,
0.007218623999506235,
0.05850886553525925,
0.04683736339211464,
-0.07528319209814072,
0.14654773473739624,
-0.015064958482980728,
-0.22195936739444733,
-0.05593620240688324,
0.10590197145938873,
0.04795099049806595,
0.050431039184331894,
-0.012283623218536377,
0.08410653471946716,
0.023409688845276833,
-0.09814442694187164,
0.10125729441642761,
0.010619294829666615,
0.030453462153673172,
0.070770762860775,
-0.0044818236492574215,
0.020745165646076202,
-0.05188945308327675,
-0.0005772879812866449,
0.10984862595796585,
0.32626432180404663,
-0.07635071873664856,
0.001031609601341188,
0.0017336158780381083,
-0.08039340376853943,
-0.20096653699874878,
0.08032701164484024,
0.09574991464614868,
0.02753513865172863,
-0.029177691787481308,
-0.17758989334106445,
0.03744979202747345,
0.09403042495250702,
-0.02110694721341133,
0.050083182752132416,
-0.30297940969467163,
-0.14117705821990967,
0.07503703236579895,
0.12678532302379608,
0.1327323615550995,
-0.18874280154705048,
-0.03771490603685379,
-0.04035181552171707,
-0.08080311119556427,
0.12758170068264008,
-0.08091168850660324,
0.10592404752969742,
0.025779232382774353,
0.051512543112039566,
0.011253269389271736,
-0.042206596583127975,
0.17070718109607697,
-0.03754317760467529,
0.10045861452817917,
-0.03260120749473572,
-0.056121308356523514,
0.10237673670053482,
-0.08727822452783585,
0.024048903957009315,
-0.058121468871831894,
0.026407821103930473,
-0.14964595437049866,
0.0044424994848668575,
-0.09153048694133759,
0.04122230038046837,
-0.07549328356981277,
-0.009285807609558105,
-0.011111822910606861,
0.055998098105192184,
0.08201515674591064,
-0.014586862176656723,
0.12817512452602386,
-0.01897813193500042,
0.19430914521217346,
0.13553747534751892,
0.06760088354349136,
0.016997942700982094,
-0.033684492111206055,
0.09016434103250504,
-0.021133042871952057,
0.08563221246004105,
-0.17346489429473877,
0.049345292150974274,
0.14187568426132202,
0.005610623396933079,
0.15915977954864502,
0.06630803644657135,
-0.07568009942770004,
0.02687380649149418,
0.047604162245988846,
-0.12827740609645844,
-0.12523119151592255,
0.019411565735936165,
0.04720034450292587,
-0.0896134153008461,
0.02415912225842476,
0.14322833716869354,
-0.03832648694515228,
0.02164733223617077,
0.0024671342689543962,
0.03397246077656746,
-0.07697044312953949,
0.1376631259918213,
0.018371382728219032,
0.07887281477451324,
-0.07102078199386597,
0.11509345471858978,
0.05814974755048752,
-0.13245044648647308,
0.10688863694667816,
0.015806782990694046,
-0.040814656764268875,
-0.016106074675917625,
0.0264218021184206,
0.09045133739709854,
0.042945317924022675,
-0.046234358102083206,
-0.12142475694417953,
-0.17238442599773407,
0.06725112348794937,
0.20544904470443726,
0.04899687319993973,
0.06283529847860336,
-0.028153708204627037,
0.0003481167077552527,
-0.07761704921722412,
0.06324081122875214,
0.05942845344543457,
0.034201931208372116,
-0.12471876293420792,
0.14588965475559235,
0.000584387977141887,
-0.0017095488728955388,
-0.011686370708048344,
0.0017165553290396929,
-0.1989034116268158,
0.017323220148682594,
-0.16848091781139374,
0.003569020191207528,
0.022373266518115997,
-0.018148189410567284,
0.03592182323336601,
-0.06442276388406754,
-0.07118801027536392,
0.03488020598888397,
-0.10534165054559708,
-0.04338160529732704,
0.056674472987651825,
0.06711316853761673,
-0.11936475336551666,
-0.08201709389686584,
0.04327578470110893,
-0.10813926160335541,
0.04172041267156601,
0.05077880248427391,
-0.0005244334461167455,
0.03205406665802002,
-0.09960910677909851,
0.025227397680282593,
0.03268666937947273,
-0.0017775350715965033,
0.05107109993696213,
-0.17867569625377655,
0.009800645522773266,
-0.04166779667139053,
0.044446319341659546,
0.03380170837044716,
0.07316972315311432,
-0.0841108039021492,
-0.054360877722501755,
-0.005033998284488916,
-0.03420696035027504,
-0.04420330002903938,
0.04207080975174904,
0.12955833971500397,
-0.009449956938624382,
0.1582198292016983,
-0.11735573410987854,
0.023454640060663223,
-0.18620771169662476,
0.004503656644374132,
0.003406064584851265,
-0.08791815489530563,
-0.10121230781078339,
-0.02443784661591053,
0.11614565551280975,
-0.09567013382911682,
0.10940227657556534,
-0.06463242322206497,
0.10047449171543121,
0.04436936601996422,
-0.09466572850942612,
-0.03868453949689865,
0.07282837480306625,
0.18684904277324677,
0.042733121663331985,
-0.0179935023188591,
0.044205084443092346,
-0.024868067353963852,
0.0737939178943634,
0.10654815286397934,
0.21550177037715912,
0.15023717284202576,
0.05558527633547783,
0.12046992033720016,
0.0653112530708313,
-0.07385234534740448,
-0.07247208058834076,
0.10040979832410812,
-0.05944065377116203,
0.16119831800460815,
-0.057913344353437424,
0.09643884003162384,
0.06750879436731339,
-0.18192921578884125,
0.028031369671225548,
-0.11097660660743713,
-0.09534294903278351,
-0.12183331698179245,
-0.07821131497621536,
-0.07530482858419418,
-0.11156290769577026,
0.008729354478418827,
-0.10657206177711487,
0.03336108475923538,
0.10572291165590286,
0.03526771441102028,
0.00851923506706953,
0.09970816224813461,
-0.04035946726799011,
0.03689930960536003,
0.10756964981555939,
-0.0058692148886621,
-0.004934771452099085,
-0.051761701703071594,
-0.06493271142244339,
0.06525345891714096,
-0.0185234434902668,
0.03661502152681351,
0.0045216139405965805,
-0.031105052679777145,
0.04391755536198616,
-0.021032532677054405,
-0.10344910621643066,
0.049858611077070236,
0.02713996171951294,
0.021671483293175697,
0.06697863340377808,
0.0455712266266346,
0.0019515063613653183,
-0.022536220028996468,
0.14704594016075134,
-0.10630667209625244,
-0.02226671203970909,
-0.15652398765087128,
0.2720469534397125,
-0.019306326285004616,
0.04140033200383186,
0.0017667172942310572,
-0.07043515145778656,
-0.05085594579577446,
0.1799391210079193,
0.13157832622528076,
-0.07019568234682083,
-0.017283588647842407,
0.06461151689291,
-0.008733954280614853,
-0.052050888538360596,
0.11461521685123444,
0.08218161016702652,
-0.03247060626745224,
-0.06618166714906693,
-0.0466461107134819,
-0.01483948715031147,
-0.03311508521437645,
-0.029509738087654114,
0.07250126451253891,
0.028540290892124176,
-0.016562987118959427,
-0.026958486065268517,
0.05914608761668205,
-0.06704946607351303,
-0.14806444942951202,
0.1033020094037056,
-0.21676208078861237,
-0.15816259384155273,
-0.009807581081986427,
0.029233288019895554,
0.001076828921213746,
0.07694140076637268,
-0.013205258175730705,
-0.008097360841929913,
0.12223461270332336,
-0.0364411287009716,
0.004821260459721088,
-0.12172123789787292,
0.07861576229333878,
-0.056631337851285934,
0.17549681663513184,
-0.011923234909772873,
0.06068973243236542,
0.13986879587173462,
0.023485196754336357,
-0.08141323179006577,
0.03827597573399544,
0.09377918392419815,
-0.11887019872665405,
-0.024436526000499725,
0.09307771921157837,
-0.025937849655747414,
0.13664789497852325,
0.07444267719984055,
-0.10393757373094559,
0.03929273784160614,
-0.0834009051322937,
-0.07505140453577042,
-0.03910302743315697,
-0.04767632111907005,
-0.06669239699840546,
0.1264648288488388,
0.2518331706523895,
-0.04346214979887009,
0.012678942643105984,
-0.025359436869621277,
-0.0061329142190515995,
0.05873726308345795,
0.03183657303452492,
-0.06045164167881012,
-0.22417870163917542,
0.08479473739862442,
0.03490322828292847,
0.04442449286580086,
-0.1698037087917328,
-0.09260541200637817,
0.03150516003370285,
-0.023009296506643295,
-0.06948057562112808,
0.09574064612388611,
0.05960078164935112,
0.05219626426696777,
-0.058996330946683884,
-0.18271584808826447,
-0.03942064195871353,
0.20045523345470428,
-0.09183122962713242,
-0.08942700177431107
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# distilbert-base-uncased-finetuned-ner
This model is a fine-tuned version of [distilbert-base-uncased](https://huggingface.co/distilbert-base-uncased) on the conll2003 dataset.
It achieves the following results on the evaluation set:
- Loss: 0.0607
- Precision: 0.9260
- Recall: 0.9384
- F1: 0.9322
- Accuracy: 0.9834
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 2e-05
- train_batch_size: 16
- eval_batch_size: 16
- seed: 42
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- num_epochs: 3
### Training results
| Training Loss | Epoch | Step | Validation Loss | Precision | Recall | F1 | Accuracy |
|:-------------:|:-----:|:----:|:---------------:|:---------:|:------:|:------:|:--------:|
| 0.2545 | 1.0 | 878 | 0.0711 | 0.9096 | 0.9214 | 0.9154 | 0.9800 |
| 0.0555 | 2.0 | 1756 | 0.0593 | 0.9185 | 0.9356 | 0.9270 | 0.9827 |
| 0.0297 | 3.0 | 2634 | 0.0607 | 0.9260 | 0.9384 | 0.9322 | 0.9834 |
### Framework versions
- Transformers 4.16.2
- Pytorch 1.10.0+cu111
- Datasets 1.18.3
- Tokenizers 0.11.0
|
{"license": "apache-2.0", "tags": ["generated_from_trainer"], "datasets": ["conll2003"], "metrics": ["precision", "recall", "f1", "accuracy"], "model-index": [{"name": "distilbert-base-uncased-finetuned-ner", "results": [{"task": {"type": "token-classification", "name": "Token Classification"}, "dataset": {"name": "conll2003", "type": "conll2003", "args": "conll2003"}, "metrics": [{"type": "precision", "value": 0.9260322366968425, "name": "Precision"}, {"type": "recall", "value": 0.9383599955252265, "name": "Recall"}, {"type": "f1", "value": 0.9321553592265377, "name": "F1"}, {"type": "accuracy", "value": 0.9834146186474335, "name": "Accuracy"}]}]}]}
|
token-classification
|
aidj/distilbert-base-uncased-finetuned-ner
|
[
"transformers",
"pytorch",
"tensorboard",
"distilbert",
"token-classification",
"generated_from_trainer",
"dataset:conll2003",
"license:apache-2.0",
"model-index",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tensorboard #distilbert #token-classification #generated_from_trainer #dataset-conll2003 #license-apache-2.0 #model-index #autotrain_compatible #endpoints_compatible #region-us
|
distilbert-base-uncased-finetuned-ner
=====================================
This model is a fine-tuned version of distilbert-base-uncased on the conll2003 dataset.
It achieves the following results on the evaluation set:
* Loss: 0.0607
* Precision: 0.9260
* Recall: 0.9384
* F1: 0.9322
* Accuracy: 0.9834
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 2e-05
* train\_batch\_size: 16
* eval\_batch\_size: 16
* seed: 42
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* num\_epochs: 3
### Training results
### Framework versions
* Transformers 4.16.2
* Pytorch 1.10.0+cu111
* Datasets 1.18.3
* Tokenizers 0.11.0
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-05\n* train\\_batch\\_size: 16\n* eval\\_batch\\_size: 16\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 3",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.16.2\n* Pytorch 1.10.0+cu111\n* Datasets 1.18.3\n* Tokenizers 0.11.0"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #distilbert #token-classification #generated_from_trainer #dataset-conll2003 #license-apache-2.0 #model-index #autotrain_compatible #endpoints_compatible #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-05\n* train\\_batch\\_size: 16\n* eval\\_batch\\_size: 16\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 3",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.16.2\n* Pytorch 1.10.0+cu111\n* Datasets 1.18.3\n* Tokenizers 0.11.0"
] |
[
69,
98,
4,
35
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #distilbert #token-classification #generated_from_trainer #dataset-conll2003 #license-apache-2.0 #model-index #autotrain_compatible #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 2e-05\n* train\\_batch\\_size: 16\n* eval\\_batch\\_size: 16\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 3### Training results### Framework versions\n\n\n* Transformers 4.16.2\n* Pytorch 1.10.0+cu111\n* Datasets 1.18.3\n* Tokenizers 0.11.0"
] |
[
-0.10674282163381577,
0.10102853178977966,
-0.002465517958626151,
0.13086916506290436,
0.15187130868434906,
0.025957146659493446,
0.11255491524934769,
0.11721572279930115,
-0.09562104195356369,
0.0270137470215559,
0.1370638906955719,
0.16641782224178314,
0.013783784583210945,
0.12305346876382828,
-0.05596703663468361,
-0.24812856316566467,
-0.00004839667963096872,
0.046464625746011734,
-0.03727592155337334,
0.1317840814590454,
0.09936363995075226,
-0.13511036336421967,
0.09445904195308685,
0.017068661749362946,
-0.19642263650894165,
0.00028291752096265554,
0.0009192695142701268,
-0.05253244936466217,
0.13723407685756683,
0.014662216417491436,
0.12165889143943787,
-0.008518403396010399,
0.0882231667637825,
-0.17646488547325134,
0.004884419031441212,
0.0443740151822567,
0.009695981629192829,
0.09259212762117386,
0.03925318270921707,
0.006019055377691984,
0.10988382250070572,
-0.0687335878610611,
0.05746150016784668,
0.013257723301649094,
-0.121727854013443,
-0.21955782175064087,
-0.09356819838285446,
0.05253014713525772,
0.08678703755140305,
0.09994229674339294,
0.0014248932711780071,
0.14581012725830078,
-0.08803924173116684,
0.08871152997016907,
0.21807128190994263,
-0.2951975464820862,
-0.06571486592292786,
0.03994613140821457,
0.010211204178631306,
0.03563041239976883,
-0.09827253222465515,
-0.04465358704328537,
0.046999357640743256,
0.05221337080001831,
0.1336880922317505,
-0.028782129287719727,
-0.0979757234454155,
0.010821076110005379,
-0.14144182205200195,
-0.044912077486515045,
0.16239452362060547,
0.05129718407988548,
-0.03871642053127289,
-0.04960737004876137,
-0.05820475146174431,
-0.16618512570858002,
-0.031186511740088463,
-0.014052723534405231,
0.046592827886343,
-0.029446624219417572,
-0.05671209469437599,
0.007384976372122765,
-0.10098742693662643,
-0.061459071934223175,
-0.07898029685020447,
0.1378692239522934,
0.0380202978849411,
0.017170654609799385,
-0.025197463110089302,
0.11008170992136002,
-0.0007584613631479442,
-0.12385369837284088,
0.013307468965649605,
0.022157777100801468,
0.007831222377717495,
-0.04721156135201454,
-0.053048375993967056,
-0.046784933656454086,
0.005354710854589939,
0.14378765225410461,
-0.05120700225234032,
0.038449469953775406,
0.052312400192022324,
0.04207761958241463,
-0.08751679956912994,
0.1827598661184311,
-0.040401481091976166,
-0.01872001215815544,
0.005442928988486528,
0.04769450053572655,
0.026054345071315765,
-0.0006679188227280974,
-0.12291231751441956,
0.013516504317522049,
0.09664712846279144,
0.009179356507956982,
-0.06989584863185883,
0.06822370737791061,
-0.06309515237808228,
-0.025515535846352577,
0.024381037801504135,
-0.08730525523424149,
0.031791117042303085,
-0.012069731019437313,
-0.08048929274082184,
-0.02808321639895439,
0.015679024159908295,
0.022837277501821518,
-0.00988794770091772,
0.11039619147777557,
-0.09297914803028107,
0.022776952013373375,
-0.08954820036888123,
-0.0993276908993721,
0.01845838502049446,
-0.10994039475917816,
0.036175988614559174,
-0.09486308693885803,
-0.18903225660324097,
-0.0038846442475914955,
0.06562262773513794,
-0.024950560182332993,
-0.06624921411275864,
-0.04604743421077728,
-0.06844868510961533,
0.009623613208532333,
-0.010505547747015953,
0.12080568820238113,
-0.06546470522880554,
0.08635265380144119,
0.029610253870487213,
0.06161821633577347,
-0.04670310765504837,
0.04827064275741577,
-0.10792987793684006,
0.025507966056466103,
-0.16176465153694153,
0.030266569927334785,
-0.045471832156181335,
0.07196910679340363,
-0.09100852906703949,
-0.10512590408325195,
0.015515857376158237,
-0.01769314706325531,
0.07060462981462479,
0.08869008719921112,
-0.17287050187587738,
-0.06328638643026352,
0.14490774273872375,
-0.06518428027629852,
-0.12903136014938354,
0.12257422506809235,
-0.06550604850053787,
0.03821851685643196,
0.056645359843969345,
0.15878097712993622,
0.05736479535698891,
-0.08159112185239792,
-0.0012114942073822021,
0.00736780371516943,
0.04426080733537674,
-0.06593077629804611,
0.07466672360897064,
0.010129696689546108,
0.027494868263602257,
0.029349837452173233,
-0.027780141681432724,
0.05252052843570709,
-0.08934454619884491,
-0.09802853316068649,
-0.04370284080505371,
-0.09547458589076996,
0.035864461213350296,
0.06593748182058334,
0.06729681044816971,
-0.09844117611646652,
-0.08040683716535568,
0.046185001730918884,
0.0853930339217186,
-0.046075526624917984,
0.027247508987784386,
-0.07179886847734451,
0.07998564839363098,
-0.04585660248994827,
-0.030455738306045532,
-0.173146590590477,
-0.021135522052645683,
0.011449356563389301,
0.005250225309282541,
0.008500825613737106,
0.015068413689732552,
0.06364927440881729,
0.06626728177070618,
-0.04492207616567612,
-0.019944723695516586,
-0.028954509645700455,
0.00415543420240283,
-0.12995602190494537,
-0.19181965291500092,
-0.04576271027326584,
-0.02194259501993656,
0.13523346185684204,
-0.2007986456155777,
0.03426539525389671,
-0.0173116996884346,
0.08893585205078125,
0.014517122879624367,
-0.015163951553404331,
-0.04120983928442001,
0.06646399945020676,
-0.04824735224246979,
-0.05329236760735512,
0.06536132842302322,
0.014176443219184875,
-0.09443461149930954,
-0.05966218188405037,
-0.08352094143629074,
0.16836236417293549,
0.1288071572780609,
-0.10255037248134613,
-0.07724419981241226,
-0.013089139945805073,
-0.06763476878404617,
-0.03228446841239929,
-0.044649820774793625,
0.03615560755133629,
0.18236829340457916,
-0.006742941681295633,
0.14499835669994354,
-0.07430697977542877,
-0.05101504176855087,
0.026975082233548164,
-0.03516611456871033,
0.014795116148889065,
0.11928204447031021,
0.1253187507390976,
-0.09267465770244598,
0.1501709669828415,
0.15188230574131012,
-0.08425208181142807,
0.11108430474996567,
-0.043191276490688324,
-0.0634067952632904,
-0.026275664567947388,
-0.024664338678121567,
-0.005939875263720751,
0.11116600036621094,
-0.14112092554569244,
0.00290662026964128,
0.034061066806316376,
0.025776922702789307,
0.010538382455706596,
-0.21731895208358765,
-0.04119028523564339,
0.04029413312673569,
-0.033073846250772476,
-0.016511686146259308,
-0.010677571408450603,
0.004849852062761784,
0.09807133674621582,
0.007697263732552528,
-0.10082371532917023,
0.04869714751839638,
0.01059610117226839,
-0.07544916123151779,
0.20647667348384857,
-0.0838761180639267,
-0.14361785352230072,
-0.11599905043840408,
-0.0905027911067009,
-0.05345507338643074,
0.009856254793703556,
0.059931620955467224,
-0.07878634333610535,
-0.0372200682759285,
-0.07558973133563995,
-0.0029136089142411947,
-0.006422283127903938,
0.03108035773038864,
0.02529793605208397,
-0.007471349555999041,
0.06815987080335617,
-0.10210000723600388,
-0.01752580516040325,
-0.05290599167346954,
-0.04062176123261452,
0.03858441486954689,
0.03593001142144203,
0.11251383274793625,
0.14275948703289032,
-0.014959091320633888,
0.014134107157588005,
-0.024766532704234123,
0.25239330530166626,
-0.05814717337489128,
-0.024917688220739365,
0.13630107045173645,
-0.014195050112903118,
0.05360303074121475,
0.11823192238807678,
0.0721510648727417,
-0.08749548345804214,
-0.002848532050848007,
0.028049834072589874,
-0.040592268109321594,
-0.21315468847751617,
-0.04792482405900955,
-0.055354729294776917,
-0.011332789435982704,
0.09793354570865631,
0.024930022656917572,
0.03219345957040787,
0.07930275797843933,
0.038632556796073914,
0.08867747336626053,
-0.051558732986450195,
0.06800032407045364,
0.11710257828235626,
0.047733768820762634,
0.12461777776479721,
-0.03793270140886307,
-0.058964915573596954,
0.0440322607755661,
0.002559319604188204,
0.22588501870632172,
0.012717983685433865,
0.12898562848567963,
0.06364044547080994,
0.17565664649009705,
-0.011989033780992031,
0.07707777619361877,
-0.013251837342977524,
-0.0395975261926651,
-0.018950078636407852,
-0.034655436873435974,
-0.0357043594121933,
0.027804311364889145,
-0.06382867693901062,
0.07202896475791931,
-0.11216448247432709,
0.024230865761637688,
0.0554962120950222,
0.26348328590393066,
0.03910595923662186,
-0.34014397859573364,
-0.1007639467716217,
0.00034686445724219084,
-0.03767513483762741,
-0.0241052508354187,
0.030681224539875984,
0.07496927678585052,
-0.0916021540760994,
0.02920544147491455,
-0.06441991776227951,
0.09343470633029938,
-0.044736020267009735,
0.04377024993300438,
0.07920880615711212,
0.08378970623016357,
0.012969340197741985,
0.08495619893074036,
-0.28017479181289673,
0.2777101397514343,
0.0029753795824944973,
0.0658332034945488,
-0.07982791215181351,
0.008441386744379997,
0.027505360543727875,
0.06389282643795013,
0.08304000645875931,
-0.005217297002673149,
-0.039826568216085434,
-0.19002212584018707,
-0.054978903383016586,
0.021851669996976852,
0.06265510618686676,
-0.029603345319628716,
0.09129036217927933,
-0.026676936075091362,
0.010843174532055855,
0.06695637851953506,
0.012017486616969109,
-0.044859565794467926,
-0.10035078972578049,
-0.009613905102014542,
0.03524918109178543,
-0.056653618812561035,
-0.0620555654168129,
-0.10601742565631866,
-0.12040865421295166,
0.15700536966323853,
-0.024988263845443726,
-0.04100542888045311,
-0.10817570239305496,
0.07568781822919846,
0.07458308339118958,
-0.08317926526069641,
0.04716647416353226,
-0.003054100787267089,
0.07531878352165222,
0.028949754312634468,
-0.06046878919005394,
0.10396163165569305,
-0.0814371183514595,
-0.1629093736410141,
-0.06901572644710541,
0.10507304221391678,
0.035538200289011,
0.0638664960861206,
-0.003054016502574086,
0.01955837942659855,
-0.050755079835653305,
-0.08799341320991516,
0.030556347221136093,
-0.010500391013920307,
0.09959588944911957,
0.0054665314964950085,
-0.0447206124663353,
0.0323449969291687,
-0.05476900190114975,
-0.032835133373737335,
0.18673425912857056,
0.23573119938373566,
-0.10216915607452393,
0.02183579094707966,
0.02417724020779133,
-0.06260868161916733,
-0.17163985967636108,
0.024704085662961006,
0.055032216012477875,
0.003409328870475292,
0.03614190220832825,
-0.17390695214271545,
0.1411168873310089,
0.1134105995297432,
-0.017489926889538765,
0.10005639493465424,
-0.3180955648422241,
-0.11898104846477509,
0.1295904666185379,
0.13270096480846405,
0.11247265338897705,
-0.12470323592424393,
-0.017806313931941986,
-0.017273206263780594,
-0.1503603607416153,
0.10643728077411652,
-0.06807143241167068,
0.1109628975391388,
-0.035119764506816864,
0.09704557061195374,
0.0032560708932578564,
-0.06019850820302963,
0.12085691839456558,
0.03499970585107803,
0.10045310109853745,
-0.05871070921421051,
-0.04046986252069473,
0.03471328690648079,
-0.04558533802628517,
0.02915523387491703,
-0.08303598314523697,
0.03766954317688942,
-0.113989919424057,
-0.0221501924097538,
-0.06576741486787796,
0.04006345942616463,
-0.03517349064350128,
-0.06803809106349945,
-0.04485134407877922,
0.02729581855237484,
0.0612279511988163,
-0.011496484279632568,
0.143491730093956,
0.043676260858774185,
0.13765773177146912,
0.10363790392875671,
0.07146705687046051,
-0.08871149271726608,
-0.08053770661354065,
-0.026951611042022705,
-0.016436947509646416,
0.05504250153899193,
-0.1278722882270813,
0.02687564492225647,
0.14543835818767548,
0.023421823978424072,
0.13306835293769836,
0.08313989639282227,
-0.01810944452881813,
0.007063448429107666,
0.05403922125697136,
-0.16837191581726074,
-0.07124345749616623,
-0.001094398321583867,
-0.03538297861814499,
-0.11630727350711823,
0.0555201917886734,
0.08830641210079193,
-0.07034257054328918,
-0.011280592530965805,
-0.006711053662002087,
0.01500918809324503,
-0.052304212003946304,
0.18425290286540985,
0.05229801684617996,
0.04810124635696411,
-0.10273027420043945,
0.07023852318525314,
0.050831444561481476,
-0.06215246766805649,
0.0038517804350703955,
0.050537750124931335,
-0.08748049288988113,
-0.042676471173763275,
0.05316803231835365,
0.16745227575302124,
-0.06502221524715424,
-0.04683596268296242,
-0.13560546934604645,
-0.11896153539419174,
0.08686268329620361,
0.136448934674263,
0.11606625467538834,
0.01427344512194395,
-0.06237145513296127,
0.0007489633280783892,
-0.11452894657850266,
0.09314202517271042,
0.04565829783678055,
0.07186681777238846,
-0.15841245651245117,
0.13322530686855316,
0.0034389542415738106,
0.04350648820400238,
-0.014787247404456139,
0.03040989860892296,
-0.09584079682826996,
0.011858407407999039,
-0.11450181901454926,
-0.019432533532381058,
-0.04153725877404213,
0.012445530854165554,
-0.004439115524291992,
-0.055704232305288315,
-0.05927538871765137,
0.019824326038360596,
-0.10941969603300095,
-0.01811685971915722,
0.04256252944469452,
0.05963975191116333,
-0.11101070046424866,
-0.037789322435855865,
0.024938523769378662,
-0.061650775372982025,
0.07265954464673996,
0.0489344447851181,
0.022871850058436394,
0.0398770309984684,
-0.11427760124206543,
0.013910734094679356,
0.06993600726127625,
0.023240569978952408,
0.07710328698158264,
-0.10562027990818024,
-0.011992787010967731,
0.004989787936210632,
0.033350568264722824,
0.015847381204366684,
0.07284040749073029,
-0.13886187970638275,
-0.008171064779162407,
-0.00729010533541441,
-0.0787961333990097,
-0.0653151422739029,
0.021642902866005898,
0.1064024418592453,
0.013527140021324158,
0.20946772396564484,
-0.062065597623586655,
0.03913082554936409,
-0.20597189664840698,
0.001969011267647147,
-0.007653293199837208,
-0.1084112823009491,
-0.1260104477405548,
-0.057148925960063934,
0.052404046058654785,
-0.05882440134882927,
0.15651600062847137,
0.02671966701745987,
0.023507723584771156,
0.026373030617833138,
-0.007912973873317242,
0.018484774976968765,
0.010433545336127281,
0.1963111013174057,
0.032430727034807205,
-0.03997042402625084,
0.06263040006160736,
0.03891667351126671,
0.10380959510803223,
0.1113637164235115,
0.18906551599502563,
0.14337694644927979,
0.004050645045936108,
0.09300757199525833,
0.032170411199331284,
-0.05951380729675293,
-0.17797166109085083,
0.030102550983428955,
-0.03811167553067207,
0.10814602673053741,
-0.012081380002200603,
0.21837441623210907,
0.06686980277299881,
-0.17005465924739838,
0.037332724779844284,
-0.05451732501387596,
-0.07969462126493454,
-0.10215689241886139,
-0.07059860974550247,
-0.07879133522510529,
-0.12883445620536804,
0.004674707073718309,
-0.11389997601509094,
0.010397393256425858,
0.11740593612194061,
0.006749420426785946,
-0.02397792786359787,
0.1461671143770218,
0.008625485934317112,
0.04190048947930336,
0.04274088516831398,
0.008663094602525234,
-0.038811590522527695,
-0.10210888832807541,
-0.0673278421163559,
-0.019397543743252754,
-0.023396525532007217,
0.038504183292388916,
-0.06824186444282532,
-0.03711562603712082,
0.02567237615585327,
-0.011723523028194904,
-0.09057211875915527,
0.005746922921389341,
0.010010451078414917,
0.055684708058834076,
0.03810015693306923,
0.004775369074195623,
0.033840011805295944,
-0.011677463538944721,
0.19643491506576538,
-0.07451341301202774,
-0.05845458060503006,
-0.10950228571891785,
0.24114395678043365,
0.03693195804953575,
-0.02210855670273304,
0.03894197568297386,
-0.0664689913392067,
0.00470855413004756,
0.23232252895832062,
0.19555817544460297,
-0.0834326446056366,
-0.012748012319207191,
0.005029243417084217,
-0.011920949444174767,
-0.028262468054890633,
0.09249874949455261,
0.13674820959568024,
0.04225609451532364,
-0.09074192494153976,
-0.042047761380672455,
-0.0727899968624115,
-0.012567151337862015,
-0.037049513310194016,
0.06375797837972641,
0.04465434327721596,
0.005771932192146778,
-0.04135825112462044,
0.04752006754279137,
-0.06755976378917694,
-0.0883558914065361,
0.06430817395448685,
-0.20212537050247192,
-0.16304227709770203,
-0.006964839994907379,
0.09373234212398529,
0.005723511800169945,
0.06281958520412445,
-0.029416315257549286,
-0.0034009122755378485,
0.08030503243207932,
-0.01716633327305317,
-0.0925389900803566,
-0.0801924392580986,
0.10340769588947296,
-0.08788875490427017,
0.22933462262153625,
-0.043892644345760345,
0.07098235934972763,
0.1248423233628273,
0.06573887169361115,
-0.08509965240955353,
0.05806952342391014,
0.04861181601881981,
-0.04972453415393829,
0.022362101823091507,
0.07280808687210083,
-0.02689461037516594,
0.07473395764827728,
0.042557165026664734,
-0.13498808443546295,
0.009949230588972569,
-0.04935310035943985,
-0.05053258687257767,
-0.04572097584605217,
-0.029754679650068283,
-0.054324671626091,
0.13611038029193878,
0.21150311827659607,
-0.03477213904261589,
-0.013347029685974121,
-0.06998419016599655,
0.02596234157681465,
0.06159254536032677,
0.004541901871562004,
-0.06296718865633011,
-0.2194681167602539,
0.020278602838516235,
0.057225458323955536,
-0.02004455402493477,
-0.2154700756072998,
-0.10131493210792542,
0.003165888600051403,
-0.07604747265577316,
-0.0877133160829544,
0.06855418533086777,
0.08297023177146912,
0.0543673038482666,
-0.06316111981868744,
-0.025643769651651382,
-0.08014149218797684,
0.13839265704154968,
-0.12965811789035797,
-0.08774403482675552
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# vivos_prj1tha
This model is a fine-tuned version of [facebook/wav2vec2-xls-r-300m](https://huggingface.co/facebook/wav2vec2-xls-r-300m) on the vivos_dataset dataset.
It achieves the following results on the evaluation set:
- Loss: 0.7737
- Wer: 0.5128
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 0.0003
- train_batch_size: 16
- eval_batch_size: 8
- seed: 42
- gradient_accumulation_steps: 2
- total_train_batch_size: 32
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- lr_scheduler_warmup_steps: 500
- num_epochs: 30
- mixed_precision_training: Native AMP
### Training results
| Training Loss | Epoch | Step | Validation Loss | Wer |
|:-------------:|:-----:|:----:|:---------------:|:------:|
| 5.0541 | 10.25 | 400 | 1.0293 | 0.7051 |
| 0.5514 | 20.51 | 800 | 0.7737 | 0.5128 |
### Framework versions
- Transformers 4.11.3
- Pytorch 1.10.0+cu111
- Datasets 1.18.3
- Tokenizers 0.10.3
|
{"license": "apache-2.0", "tags": ["generated_from_trainer"], "datasets": ["vivos_dataset"], "model-index": [{"name": "vivos_prj1tha", "results": []}]}
|
automatic-speech-recognition
|
aiface/vivos_prj1tha
|
[
"transformers",
"pytorch",
"tensorboard",
"wav2vec2",
"automatic-speech-recognition",
"generated_from_trainer",
"dataset:vivos_dataset",
"license:apache-2.0",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tensorboard #wav2vec2 #automatic-speech-recognition #generated_from_trainer #dataset-vivos_dataset #license-apache-2.0 #endpoints_compatible #region-us
|
vivos\_prj1tha
==============
This model is a fine-tuned version of facebook/wav2vec2-xls-r-300m on the vivos\_dataset dataset.
It achieves the following results on the evaluation set:
* Loss: 0.7737
* Wer: 0.5128
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 0.0003
* train\_batch\_size: 16
* eval\_batch\_size: 8
* seed: 42
* gradient\_accumulation\_steps: 2
* total\_train\_batch\_size: 32
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* lr\_scheduler\_warmup\_steps: 500
* num\_epochs: 30
* mixed\_precision\_training: Native AMP
### Training results
### Framework versions
* Transformers 4.11.3
* Pytorch 1.10.0+cu111
* Datasets 1.18.3
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0003\n* train\\_batch\\_size: 16\n* eval\\_batch\\_size: 8\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 32\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_steps: 500\n* num\\_epochs: 30\n* mixed\\_precision\\_training: Native AMP",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.10.0+cu111\n* Datasets 1.18.3\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #wav2vec2 #automatic-speech-recognition #generated_from_trainer #dataset-vivos_dataset #license-apache-2.0 #endpoints_compatible #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0003\n* train\\_batch\\_size: 16\n* eval\\_batch\\_size: 8\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 32\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_steps: 500\n* num\\_epochs: 30\n* mixed\\_precision\\_training: Native AMP",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.10.0+cu111\n* Datasets 1.18.3\n* Tokenizers 0.10.3"
] |
[
65,
158,
4,
35
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #wav2vec2 #automatic-speech-recognition #generated_from_trainer #dataset-vivos_dataset #license-apache-2.0 #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0003\n* train\\_batch\\_size: 16\n* eval\\_batch\\_size: 8\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 32\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_steps: 500\n* num\\_epochs: 30\n* mixed\\_precision\\_training: Native AMP### Training results### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.10.0+cu111\n* Datasets 1.18.3\n* Tokenizers 0.10.3"
] |
[
-0.11628668010234833,
0.07660609483718872,
-0.0028862720355391502,
0.06176292523741722,
0.12809611856937408,
0.011977367103099823,
0.10501951724290848,
0.15239787101745605,
-0.0908098965883255,
0.0794660896062851,
0.10239481180906296,
0.0956323891878128,
0.062352072447538376,
0.11145596951246262,
-0.019892774522304535,
-0.315613716840744,
0.006286907009780407,
0.01862046681344509,
-0.11621979624032974,
0.11978521198034286,
0.10782769322395325,
-0.11809857189655304,
0.03453505411744118,
0.02418813481926918,
-0.12029695510864258,
0.009256538935005665,
-0.022943543270230293,
-0.06932210177183151,
0.12887537479400635,
0.02988753654062748,
0.10573695600032806,
0.014054151251912117,
0.09938539564609528,
-0.2571040987968445,
0.014805560000240803,
0.06405238807201385,
0.04176047444343567,
0.07796618342399597,
0.10673012584447861,
-0.02639247663319111,
0.1385377198457718,
-0.07963477820158005,
0.07902920246124268,
0.03995304927229881,
-0.11811505258083344,
-0.3257524073123932,
-0.08276672661304474,
0.032096631824970245,
0.11228683590888977,
0.1026497334241867,
-0.02880268543958664,
0.08005950599908829,
-0.07251852005720139,
0.0834386795759201,
0.22403310239315033,
-0.2493399679660797,
-0.08781274408102036,
-0.013091319240629673,
0.04513964429497719,
0.041431061923503876,
-0.134155735373497,
-0.03005998581647873,
0.02310989610850811,
0.043313492089509964,
0.1188068613409996,
0.009464167058467865,
-0.04581019654870033,
0.027299560606479645,
-0.137825146317482,
-0.053925540298223495,
0.12263248860836029,
0.07508411258459091,
-0.02653510309755802,
-0.08818360418081284,
-0.012443765066564083,
-0.19499920308589935,
-0.0484275184571743,
0.017304211854934692,
0.024374423548579216,
-0.042905014008283615,
-0.11723405122756958,
0.008794795721769333,
-0.09662403911352158,
-0.08769930154085159,
0.012605004012584686,
0.1300237625837326,
0.04706482216715813,
-0.01895919069647789,
-0.007256337907165289,
0.10897339880466461,
0.0430089607834816,
-0.1395234763622284,
0.005516663659363985,
0.0378882959485054,
-0.09764408320188522,
-0.023148950189352036,
-0.03891311213374138,
-0.04295078292489052,
-0.0011206992203369737,
0.11794956028461456,
-0.02112707495689392,
0.07689861208200455,
0.0192731861025095,
0.03149246424436569,
-0.08630353957414627,
0.17795473337173462,
-0.07361416518688202,
-0.007353967987000942,
-0.060201551765203476,
0.0939907431602478,
-0.028801551088690758,
-0.012323521077632904,
-0.06074700877070427,
0.03249196335673332,
0.10301240533590317,
0.04057910665869713,
-0.022389810532331467,
0.024151507765054703,
-0.056673888117074966,
-0.023918606340885162,
-0.04781610518693924,
-0.1000441163778305,
0.045810896903276443,
0.02541428618133068,
-0.09258730709552765,
0.008079145103693008,
0.00278493482619524,
0.02174820564687252,
-0.022093433886766434,
0.12914730608463287,
-0.07015976309776306,
0.014338159002363682,
-0.09774967283010483,
-0.10787547379732132,
0.030236443504691124,
-0.04041469097137451,
0.007658900693058968,
-0.06929821521043777,
-0.11504583805799484,
-0.04756396636366844,
0.07462036609649658,
-0.05356653034687042,
-0.057612765580415726,
-0.0640273317694664,
-0.0689677745103836,
0.04664810746908188,
-0.021700887009501457,
0.18745306134223938,
-0.05776531621813774,
0.11890724301338196,
0.015266425907611847,
0.04263187572360039,
0.03827816620469093,
0.07365697622299194,
-0.05785198509693146,
0.03309474512934685,
-0.11980584263801575,
0.0623592883348465,
-0.0892091616988182,
0.060999900102615356,
-0.15220706164836884,
-0.12694784998893738,
-0.018644362688064575,
-0.001756813726387918,
0.10138832777738571,
0.08743266761302948,
-0.18994823098182678,
-0.08292775601148605,
0.1734926700592041,
-0.06961696594953537,
-0.10757414251565933,
0.12450388818979263,
-0.035129111260175705,
0.013454253785312176,
0.03508789464831352,
0.1751500368118286,
0.06865314394235611,
-0.0948781967163086,
0.013823465444147587,
-0.05309898406267166,
0.11996707320213318,
0.016072792932391167,
0.10192447900772095,
-0.03395356982946396,
0.0397186353802681,
-0.004827001132071018,
-0.03341012820601463,
0.06482899934053421,
-0.09941399842500687,
-0.09570158272981644,
-0.02045234851539135,
-0.07646037638187408,
0.011000200174748898,
0.06630809605121613,
0.04174523428082466,
-0.09373928606510162,
-0.13016629219055176,
0.02247525192797184,
0.10296861827373505,
-0.10109047591686249,
0.03137243166565895,
-0.07977674156427383,
0.03927408531308174,
-0.01825895719230175,
-0.009132874198257923,
-0.16384102404117584,
-0.02516757883131504,
0.02324841357767582,
-0.03959443420171738,
0.02065240778028965,
-0.003126373514533043,
0.08945710211992264,
0.03960959613323212,
-0.04956340789794922,
-0.07165324687957764,
-0.08138716220855713,
-0.005169847514480352,
-0.07932722568511963,
-0.23154805600643158,
-0.08119553327560425,
-0.027467621490359306,
0.15145742893218994,
-0.23387104272842407,
0.011900035664439201,
0.017651651054620743,
0.10605153441429138,
0.03409533575177193,
-0.0467960424721241,
-0.017912594601511955,
0.07208621501922607,
-0.02139933779835701,
-0.06780215352773666,
0.036696963012218475,
-0.0020991761703044176,
-0.11760199069976807,
0.013419320806860924,
-0.10492785274982452,
0.09765984863042831,
0.10968896746635437,
-0.04020491987466812,
-0.08408518880605698,
-0.06145445629954338,
-0.07272497564554214,
-0.060429032891988754,
-0.042903054505586624,
0.007200535852462053,
0.18983404338359833,
0.038256675004959106,
0.11356930434703827,
-0.07156997919082642,
-0.043584760278463364,
0.030743779614567757,
-0.00295208184979856,
-0.013154623098671436,
0.14447489380836487,
0.07818424701690674,
-0.04249178618192673,
0.10132965445518494,
0.09390727430582047,
-0.080816350877285,
0.1265116184949875,
-0.06924814730882645,
-0.12843717634677887,
-0.017748242244124413,
0.022397169843316078,
0.03288665786385536,
0.13299040496349335,
-0.1420745998620987,
0.0014570915373042226,
0.024800173938274384,
0.02356991358101368,
0.024101585149765015,
-0.21860070526599884,
-0.021433759480714798,
0.041279446333646774,
-0.05948597565293312,
-0.04419203847646713,
-0.018534479662775993,
0.0038805874064564705,
0.09255852550268173,
0.006760738790035248,
-0.05636702477931976,
-0.008333469741046429,
-0.02236049994826317,
-0.07933969795703888,
0.1962806135416031,
-0.09248721599578857,
-0.13497696816921234,
-0.12843385338783264,
-0.04104870185256004,
-0.0037046847864985466,
-0.020127180963754654,
0.046510159969329834,
-0.11113626509904861,
-0.03956548124551773,
-0.0611138790845871,
0.050020791590213776,
-0.06449829787015915,
0.02489822916686535,
-0.0087654460221529,
0.015622615814208984,
0.07541549205780029,
-0.10023970156908035,
0.02260119467973709,
-0.024945374578237534,
-0.04411407187581062,
0.04222046583890915,
0.041038878262043,
0.10000995546579361,
0.15521809458732605,
0.026720033958554268,
0.027100849896669388,
-0.03875475749373436,
0.15749125182628632,
-0.09793954342603683,
-0.027878932654857635,
0.11645414680242538,
0.007931274361908436,
0.042688291519880295,
0.11711318045854568,
0.06599608063697815,
-0.08439190685749054,
0.02390175685286522,
0.05639374256134033,
-0.018307138234376907,
-0.24851134419441223,
-0.026321738958358765,
-0.058576326817274094,
-0.024106226861476898,
0.13488540053367615,
0.03264046087861061,
-0.009147807955741882,
0.0415102057158947,
-0.0006071026436984539,
0.0042318543419241905,
-0.00048754544695839286,
0.0682898759841919,
0.06639771908521652,
0.04072032868862152,
0.11486135423183441,
-0.017085840925574303,
-0.04838954284787178,
0.02508980967104435,
0.008135255426168442,
0.26754680275917053,
-0.007686758413910866,
0.15780745446681976,
0.05858780816197395,
0.1658485233783722,
-0.0009921514429152012,
0.07863864302635193,
0.007084154989570379,
-0.02847672440111637,
0.011146527715027332,
-0.05742403119802475,
-0.02200341783463955,
0.045735862106084824,
0.07318208366632462,
0.03760528936982155,
-0.12664583325386047,
-0.02575742080807686,
0.024799656122922897,
0.34176602959632874,
0.06196270510554314,
-0.3056641221046448,
-0.08784161508083344,
0.0023342857602983713,
-0.0756366178393364,
-0.05398339405655861,
0.0248249564319849,
0.10531506687402725,
-0.10242849588394165,
0.04017189145088196,
-0.08333948999643326,
0.10088976472616196,
-0.05367075279355049,
0.005409028381109238,
0.08958697319030762,
0.07043762505054474,
-0.0028616529889404774,
0.08113323897123337,
-0.28552278876304626,
0.31460970640182495,
-0.016845185309648514,
0.07686439156532288,
-0.05295070260763168,
0.024824749678373337,
0.02475990168750286,
-0.04805601015686989,
0.07729796320199966,
-0.014130808413028717,
-0.08564167469739914,
-0.18888522684574127,
-0.07644445449113846,
0.028751758858561516,
0.12425529956817627,
-0.059506043791770935,
0.12367202341556549,
-0.027360903099179268,
-0.0014710300602018833,
0.05959806963801384,
-0.06482715159654617,
-0.09175166487693787,
-0.11174656450748444,
0.01554486621171236,
0.015431173145771027,
0.07073229551315308,
-0.1063772663474083,
-0.11590196937322617,
-0.0666126012802124,
0.15130123496055603,
-0.05961798503994942,
-0.009652707725763321,
-0.13679175078868866,
0.10882708430290222,
0.16625256836414337,
-0.06585245579481125,
0.04526108130812645,
0.024549955502152443,
0.119631826877594,
0.02885841578245163,
-0.009423640556633472,
0.11102330684661865,
-0.08011016994714737,
-0.18181267380714417,
-0.05446520075201988,
0.1487891972064972,
0.04434184730052948,
0.0678117424249649,
-0.02321004495024681,
0.026032259687781334,
-0.03710251301527023,
-0.07688730210065842,
0.057641446590423584,
0.00008267266093753278,
0.011204872280359268,
0.046865545213222504,
-0.040817778557538986,
0.010320303030312061,
-0.07522667944431305,
-0.0648539587855339,
0.15750057995319366,
0.2690031826496124,
-0.077384352684021,
0.021124422550201416,
0.0287310853600502,
-0.04871976003050804,
-0.14773325622081757,
0.034228693693876266,
0.12043014913797379,
0.02876107767224312,
0.021667124703526497,
-0.23189841210842133,
0.0657002329826355,
0.08487144112586975,
-0.021863475441932678,
0.07494450360536575,
-0.33122608065605164,
-0.12581859529018402,
0.11362825334072113,
0.1261482983827591,
-0.03931895270943642,
-0.15687225759029388,
-0.05265791341662407,
-0.002983837155625224,
-0.09977207332849503,
0.08149488270282745,
-0.03421396762132645,
0.12200463563203812,
-0.009403099305927753,
0.08328762650489807,
0.024286221712827682,
-0.06015276536345482,
0.14920687675476074,
-0.007910758256912231,
0.06520052999258041,
-0.007914828136563301,
0.04999417066574097,
0.02547335997223854,
-0.04536915943026543,
0.009959572926163673,
-0.06650867313146591,
0.014292598702013493,
-0.14455243945121765,
-0.027219392359256744,
-0.08937475085258484,
0.029569175094366074,
-0.04505027085542679,
-0.05385373532772064,
-0.00943602342158556,
0.0524895004928112,
0.06373880058526993,
0.0006962937768548727,
0.12829852104187012,
-0.038301318883895874,
0.154245525598526,
0.05900581553578377,
0.08611422032117844,
0.002962918020784855,
-0.09900500625371933,
-0.013897654600441456,
-0.003299699630588293,
0.05273912847042084,
-0.1261214315891266,
0.025705281645059586,
0.1552269607782364,
0.04481987655162811,
0.15057097375392914,
0.06616434454917908,
-0.0815417692065239,
0.012763643637299538,
0.06987850368022919,
-0.08623601496219635,
-0.11593310534954071,
-0.010508691892027855,
0.07801292836666107,
-0.144977867603302,
0.0044820369221270084,
0.09792905300855637,
-0.05519716441631317,
-0.012368948198854923,
0.007676966954022646,
0.022699641063809395,
-0.05314026400446892,
0.22063495218753815,
0.03346656262874603,
0.07645775377750397,
-0.09342744946479797,
0.07551231235265732,
0.0636996328830719,
-0.17633168399333954,
0.023017453029751778,
0.0814182385802269,
-0.035377927124500275,
-0.022953355684876442,
0.03780203312635422,
0.08900445699691772,
-0.011697321198880672,
-0.05492455139756203,
-0.11365794390439987,
-0.148381307721138,
0.08122242987155914,
0.08233839273452759,
0.03598569706082344,
0.025474121794104576,
-0.04450393468141556,
0.03534775227308273,
-0.11547639220952988,
0.09203539788722992,
0.09289807826280594,
0.07169657200574875,
-0.1283409148454666,
0.14489659667015076,
0.010255892761051655,
0.01093893963843584,
0.007761612068861723,
-0.0016761041479185224,
-0.10245067626237869,
0.035099152475595474,
-0.10052856802940369,
-0.028997423127293587,
-0.052105776965618134,
-0.005408929660916328,
0.009654839523136616,
-0.05881986767053604,
-0.05028221383690834,
0.01419720146805048,
-0.12458266317844391,
-0.0443427674472332,
-0.00012144567881477997,
0.06923418492078781,
-0.11804383248090744,
-0.025695297867059708,
0.04401163011789322,
-0.10564345866441727,
0.09214046597480774,
0.05781160667538643,
0.027691002935171127,
0.04556657373905182,
-0.1227039247751236,
0.020412568002939224,
0.03250594809651375,
0.0005483790300786495,
0.036093492060899734,
-0.1571740061044693,
-0.008944588713347912,
-0.02521035633981228,
0.034282393753528595,
0.00019730876374524087,
0.02674306184053421,
-0.14054861664772034,
-0.03828682750463486,
-0.03128911554813385,
-0.07150563597679138,
-0.05453211069107056,
0.05622169375419617,
0.05116748809814453,
0.045184724032878876,
0.16901548206806183,
-0.08291488885879517,
0.053071144968271255,
-0.21979236602783203,
0.01605740189552307,
-0.03693050146102905,
-0.06367500871419907,
-0.0650276392698288,
-0.04011566936969757,
0.08267493546009064,
-0.07626272737979889,
0.08536586910486221,
-0.02286294661462307,
0.04794055223464966,
0.036068547517061234,
-0.10776781290769577,
0.021216655150055885,
0.04506046324968338,
0.22905217111110687,
0.041545845568180084,
-0.022848021239042282,
0.07132984697818756,
0.010391300544142723,
0.05372055619955063,
0.17059709131717682,
0.16043376922607422,
0.18440836668014526,
0.05096598342061043,
0.08417890965938568,
0.06590402126312256,
-0.11538631469011307,
-0.12039460241794586,
0.12090428918600082,
-0.01823275536298752,
0.12146256119012833,
-0.01798749715089798,
0.24749894440174103,
0.09603194892406464,
-0.19813676178455353,
0.052737221121788025,
-0.042296402156353,
-0.0888708159327507,
-0.08659440279006958,
-0.060094576328992844,
-0.0653156191110611,
-0.17194212973117828,
0.01215718686580658,
-0.11246706545352936,
0.05396947264671326,
0.05912154167890549,
0.03971957787871361,
0.01358743291348219,
0.13634787499904633,
0.050704918801784515,
0.0049888743087649345,
0.10385249555110931,
0.02042389288544655,
-0.012559759430587292,
-0.057569682598114014,
-0.08643071353435516,
0.02178066410124302,
-0.02905239723622799,
0.04413545876741409,
-0.0513870045542717,
-0.09550607204437256,
0.05983791872859001,
0.00873673614114523,
-0.10455934703350067,
0.017769338563084602,
-0.020618146285414696,
0.06915707141160965,
0.08509384840726852,
0.030626777559518814,
-0.0006730353343300521,
-0.028356000781059265,
0.2753808796405792,
-0.10188119113445282,
-0.05774208530783653,
-0.1319880187511444,
0.24147674441337585,
0.01863858290016651,
-0.025206178426742554,
0.017995314672589302,
-0.06952186673879623,
0.0002936204837169498,
0.170392706990242,
0.1391981691122055,
-0.033274874091148376,
-0.021267417818307877,
0.015377427451312542,
-0.0116307083517313,
-0.054587144404649734,
0.08650100976228714,
0.12818743288516998,
0.048851378262043,
-0.0878010243177414,
-0.035939183086156845,
-0.0590004026889801,
-0.0425783172249794,
-0.03148075193166733,
0.07504384219646454,
0.04118939861655235,
-0.012007822282612324,
-0.03121723234653473,
0.11900407820940018,
-0.07078564167022705,
-0.1091776192188263,
0.028388293460011482,
-0.18174061179161072,
-0.18906645476818085,
-0.04277648404240608,
0.07479751110076904,
0.025556620210409164,
0.058461420238018036,
-0.01300796028226614,
-0.015534614212810993,
0.09609545767307281,
0.010933063924312592,
-0.04321344941854477,
-0.11374679952859879,
0.10247194021940231,
-0.09781350940465927,
0.19906571507453918,
-0.0466105081140995,
0.019273893907666206,
0.12494844198226929,
0.06498944014310837,
-0.0755867063999176,
0.0458027720451355,
0.07207029312849045,
-0.11586485058069229,
0.04610356315970421,
0.18387214839458466,
-0.03293147683143616,
0.13257241249084473,
0.0398864671587944,
-0.12103519588708878,
0.025310155004262924,
-0.09006181359291077,
-0.04118140786886215,
-0.05375690013170242,
-0.011527571827173233,
-0.04082595929503441,
0.13087525963783264,
0.2247810959815979,
-0.06310980021953583,
-0.014954736456274986,
-0.06803775578737259,
0.012099499814212322,
0.04492849484086037,
0.11575006693601608,
-0.04682115092873573,
-0.2657455801963806,
0.009553414769470692,
0.002989665837958455,
0.009382023476064205,
-0.2586856782436371,
-0.09167927503585815,
0.04095565900206566,
-0.06380970031023026,
-0.05717044323682785,
0.11171051859855652,
0.08385166525840759,
0.05094791203737259,
-0.057317689061164856,
-0.07416354864835739,
-0.03884820640087128,
0.1835239976644516,
-0.16769619286060333,
-0.05476595461368561
] |
null | null |
transformers
|
# My Awesome Model
|
{"tags": ["conversational"]}
|
text-generation
|
aimiekhe/yummv1
|
[
"transformers",
"pytorch",
"gpt2",
"text-generation",
"conversational",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
# My Awesome Model
|
[
"# My Awesome Model"
] |
[
"TAGS\n#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n",
"# My Awesome Model"
] |
[
51,
4
] |
[
"passage: TAGS\n#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n# My Awesome Model"
] |
[
-0.05259015038609505,
0.05521034821867943,
-0.005910294596105814,
0.017722278833389282,
0.15250112116336823,
0.02286236733198166,
0.07657632976770401,
0.09513414651155472,
-0.025391526520252228,
-0.047348517924547195,
0.15119488537311554,
0.19781284034252167,
-0.020334534347057343,
0.101333387196064,
-0.04688440263271332,
-0.3143521845340729,
0.06439975649118423,
0.05463787540793419,
-0.015605635941028595,
0.12023304402828217,
0.09468326717615128,
-0.0530015267431736,
0.08742043375968933,
-0.012155864387750626,
-0.1293085366487503,
-0.0027921805158257484,
-0.002384399762377143,
-0.10180269181728363,
0.11194873601198196,
0.033712033182382584,
0.05166437849402428,
0.0182647667825222,
-0.05843055993318558,
-0.139859139919281,
0.03845210000872612,
-0.015005595050752163,
-0.05602653697133064,
0.05648263916373253,
0.059830192476511,
-0.07164353132247925,
0.1669619083404541,
0.13275989890098572,
-0.04237370565533638,
0.056127581745386124,
-0.17620700597763062,
0.017941240221261978,
0.01800798624753952,
0.019184142351150513,
0.05306641012430191,
0.10830496996641159,
-0.03932326287031174,
0.09217294305562973,
-0.11410652846097946,
0.08313368260860443,
0.07800983637571335,
-0.29151955246925354,
-0.025312699377536774,
0.10440942645072937,
0.06437138468027115,
0.048375632613897324,
-0.013386772945523262,
0.0621674507856369,
0.02149512618780136,
0.008602659218013287,
0.02225899137556553,
-0.06727100163698196,
-0.05789240449666977,
0.032748885452747345,
-0.0967593789100647,
-0.03634428232908249,
0.19753605127334595,
-0.024647634476423264,
0.053590498864650726,
-0.06265407055616379,
-0.11300963163375854,
-0.039751436561346054,
-0.050429005175828934,
-0.029761891812086105,
-0.05090925097465515,
0.09489558637142181,
0.004352911841124296,
-0.09534718841314316,
-0.13405443727970123,
-0.01370926946401596,
-0.1618979275226593,
0.15892250835895538,
0.012579603120684624,
0.046201955527067184,
-0.19210097193717957,
0.11465331166982651,
-0.03857925534248352,
-0.08259090781211853,
0.030513519421219826,
-0.12010065466165543,
0.03160654753446579,
-0.008132083341479301,
-0.019599268212914467,
-0.049325279891490936,
0.061037879437208176,
0.08101806789636612,
0.018783701583743095,
0.005755073390901089,
0.018167443573474884,
0.05343452841043472,
0.05891622602939606,
0.10033947974443436,
-0.02891627699136734,
-0.0625043511390686,
0.0025436533614993095,
-0.12051084637641907,
-0.01122665498405695,
-0.05357983708381653,
-0.18095199763774872,
0.002246231772005558,
0.02455340512096882,
0.05192234739661217,
0.011778532527387142,
0.09955989569425583,
-0.028496338054537773,
-0.026898741722106934,
0.06898727267980576,
0.002862759632989764,
-0.015707949176430702,
-0.005368964280933142,
-0.010934269987046719,
0.11485416442155838,
-0.023099146783351898,
0.04774846136569977,
-0.12022071331739426,
0.020393015816807747,
-0.07851235568523407,
-0.0019349842332303524,
-0.06214260309934616,
-0.04864754155278206,
-0.0019346009939908981,
-0.06985589861869812,
0.021118074655532837,
-0.14833110570907593,
-0.17990200221538544,
-0.005064866971224546,
0.021302316337823868,
-0.052403319627046585,
-0.09162671118974686,
-0.0982397273182869,
-0.02586611732840538,
0.03574685752391815,
-0.05873546749353409,
0.013170980848371983,
-0.06884536147117615,
0.06542801111936569,
0.0029820678755640984,
0.05682007595896721,
-0.14085575938224792,
0.08719147741794586,
-0.12582023441791534,
-0.023288866505026817,
-0.061977192759513855,
0.1109607070684433,
0.024780582636594772,
0.1267160177230835,
0.004311583004891872,
-0.0033308975398540497,
-0.08729329705238342,
0.08271238207817078,
-0.04243258014321327,
0.22770646214485168,
-0.10479787737131119,
-0.08809807151556015,
0.2632525563240051,
-0.05423165112733841,
-0.16432519257068634,
0.10179096460342407,
-0.014350244775414467,
0.12198644131422043,
0.13850919902324677,
0.16080057621002197,
0.007628654129803181,
0.03313867375254631,
0.10115300863981247,
0.08631709218025208,
-0.08573295921087265,
-0.0611947737634182,
0.023627014830708504,
-0.011463395319879055,
-0.10670105367898941,
0.046802595257759094,
0.04794782027602196,
0.08188598603010178,
-0.04982871189713478,
-0.028600862249732018,
-0.01972118206322193,
-0.044152840971946716,
0.05264130234718323,
0.007675500120967627,
0.13217447698116302,
-0.03674980252981186,
-0.03692879155278206,
-0.023745311424136162,
0.01699630729854107,
-0.03115241602063179,
0.007061392068862915,
-0.05687357112765312,
0.11091547459363937,
-0.03406180441379547,
0.051789235323667526,
-0.16953988373279572,
-0.04873261600732803,
-0.02087729424238205,
0.1402055323123932,
0.04973345249891281,
0.1329866498708725,
0.06287940591573715,
-0.010758201591670513,
0.00859389640390873,
0.007998145185410976,
0.13181665539741516,
0.007865442894399166,
-0.07660657912492752,
-0.047718439251184464,
0.09176599979400635,
-0.05973208695650101,
0.06147782504558563,
-0.098741315305233,
-0.004747362341731787,
-0.01433002483099699,
0.08674649894237518,
0.006352655589580536,
0.029382232576608658,
-0.006192679051309824,
0.003654100699350238,
-0.06161240115761757,
0.017873648554086685,
0.12492607533931732,
-0.01421504095196724,
-0.07439801841974258,
0.22084392607212067,
-0.15798072516918182,
0.18006981909275055,
0.18165533244609833,
-0.3081994652748108,
0.024602634832262993,
-0.08860466629266739,
-0.036338552832603455,
0.03426366671919823,
0.0491504967212677,
-0.034147560596466064,
0.16587987542152405,
-0.016766328364610672,
0.201018825173378,
-0.03547777235507965,
-0.01287798210978508,
-0.010399105958640575,
-0.03656993433833122,
-0.010632630437612534,
0.09065473079681396,
0.15122920274734497,
-0.1677125245332718,
0.18270380795001984,
0.1660280078649521,
0.06873020529747009,
0.17776396870613098,
0.034313347190618515,
-0.006856906693428755,
0.07112615555524826,
-0.022670727223157883,
-0.07675548642873764,
-0.049287427216768265,
-0.26302891969680786,
-0.027947327122092247,
0.06471601128578186,
0.04510856419801712,
0.11924877762794495,
-0.10971947014331818,
-0.037208184599876404,
0.010892451740801334,
-0.013165894895792007,
0.02132410928606987,
0.09682225435972214,
0.01171150617301464,
0.11804302036762238,
-0.021027036011219025,
-0.05209195241332054,
0.0898953229188919,
0.02727191150188446,
-0.0787680521607399,
0.19168277084827423,
-0.10074768215417862,
-0.3233809769153595,
-0.11354339867830276,
-0.18166927993297577,
-0.017843691632151604,
0.05878754332661629,
0.08049646019935608,
-0.09228580445051193,
-0.02625267766416073,
-0.01639235019683838,
0.0758359357714653,
-0.09145816415548325,
-0.015880629420280457,
-0.09367848187685013,
0.034986745566129684,
-0.10827737301588058,
-0.07011983543634415,
-0.05141967162489891,
-0.03368452936410904,
-0.04457031562924385,
0.13157756626605988,
-0.12242637574672699,
0.06396433711051941,
0.2076517641544342,
0.06227295100688934,
0.05622440204024315,
-0.0229496993124485,
0.23288212716579437,
-0.10842552781105042,
0.02383521944284439,
0.1717897206544876,
-0.03566030040383339,
0.0727933868765831,
0.13435456156730652,
0.006721907295286655,
-0.08144525438547134,
0.03465581312775612,
-0.04592517390847206,
-0.08630958944559097,
-0.20441576838493347,
-0.14156180620193481,
-0.12814727425575256,
0.07913564145565033,
0.03285396471619606,
0.05478321388363838,
0.15024253726005554,
0.11386489123106003,
0.007987297140061855,
0.00976672861725092,
-0.006888182368129492,
0.05438044294714928,
0.17482298612594604,
-0.05838097631931305,
0.10041683167219162,
-0.037591226398944855,
-0.1924494504928589,
0.08022978901863098,
0.04309763014316559,
0.08280511945486069,
0.07474655658006668,
0.0856199786067009,
0.013537914492189884,
0.03723837807774544,
0.10897084325551987,
0.1165735274553299,
0.031679023057222366,
-0.038079675287008286,
-0.04882059991359711,
-0.026300756260752678,
-0.03285675123333931,
0.05745977535843849,
0.07790146768093109,
-0.1608346849679947,
-0.06348084658384323,
-0.06350091099739075,
0.07662643492221832,
0.09017108380794525,
0.11811108142137527,
-0.21219493448734283,
0.01579318381845951,
0.092556893825531,
-0.0494147390127182,
-0.1304239183664322,
0.07402537018060684,
-0.00466050673276186,
-0.1397053301334381,
0.037663187831640244,
-0.014095795340836048,
0.1359514445066452,
-0.0778401643037796,
0.10336452722549438,
-0.08307972550392151,
-0.06147889420390129,
0.03632286190986633,
0.1355396956205368,
-0.30774354934692383,
0.2137020230293274,
-0.022472934797406197,
-0.05296783149242401,
-0.10508129745721817,
-0.011727629229426384,
0.020913105458021164,
0.09079049527645111,
0.10090240091085434,
-0.0025442070327699184,
0.0061299679800868034,
-0.0345483273267746,
-0.053218815475702286,
0.024456629529595375,
0.07957815378904343,
-0.08542889356613159,
0.0017540202243253589,
-0.02361489273607731,
-0.004407065454870462,
-0.032844748347997665,
-0.01189463958144188,
-0.011617658659815788,
-0.16786961257457733,
0.06556065380573273,
-0.002625665394589305,
0.11129079759120941,
0.03491498529911041,
0.0024013579823076725,
-0.1009332686662674,
0.19977013766765594,
0.01796281896531582,
-0.08052749931812286,
-0.08830537647008896,
-0.03254766762256622,
0.03660419583320618,
-0.06121435388922691,
0.027481911703944206,
-0.06916457414627075,
0.033381566405296326,
-0.06441576033830643,
-0.18325145542621613,
0.1268530637025833,
-0.10945470631122589,
-0.03609596937894821,
-0.04321056231856346,
0.18323224782943726,
-0.00929707009345293,
-0.0011623724130913615,
0.05866571143269539,
0.0032208464108407497,
-0.1347510665655136,
-0.10740556567907333,
0.020214511081576347,
-0.015275230631232262,
0.009142245166003704,
0.05559912323951721,
-0.009665844030678272,
0.00045268211397342384,
-0.039558928459882736,
-0.023234419524669647,
0.32348164916038513,
0.10732097923755646,
-0.04944206401705742,
0.17007054388523102,
0.13087597489356995,
-0.0827672928571701,
-0.30699312686920166,
-0.10971353948116302,
-0.10529600828886032,
-0.026918673887848854,
-0.037983208894729614,
-0.19617970287799835,
0.09504909813404083,
-0.03528566658496857,
-0.022136637941002846,
0.11253651231527328,
-0.2759084105491638,
-0.0770430713891983,
0.1826775223016739,
0.003314757253974676,
0.3998824954032898,
-0.10265109688043594,
-0.08777514100074768,
-0.06741699576377869,
-0.1120782196521759,
0.2033512443304062,
-0.05560711398720741,
0.08663415163755417,
-0.00517998356372118,
0.15513743460178375,
0.055607251822948456,
-0.02176513522863388,
0.08932057023048401,
-0.005811662413179874,
-0.0546204075217247,
-0.1219351515173912,
-0.03444604203104973,
-0.009159418754279613,
0.007239421829581261,
0.03589896112680435,
-0.04242607578635216,
0.01279151439666748,
-0.1399589478969574,
-0.045490626245737076,
-0.0764620453119278,
0.024699507281184196,
0.021008269861340523,
-0.0652410089969635,
-0.01643640361726284,
-0.03945036977529526,
-0.012804778292775154,
0.03164318576455116,
0.15236099064350128,
-0.06478006392717361,
0.1476556956768036,
0.04904455319046974,
0.15412139892578125,
-0.14745712280273438,
-0.02258288487792015,
-0.06896031647920609,
-0.05498642474412918,
0.04900865629315376,
-0.10053684562444687,
0.050061121582984924,
0.1202658861875534,
-0.0742902010679245,
0.0987328365445137,
0.0922594666481018,
-0.01938629150390625,
0.0012483424507081509,
0.1226617842912674,
-0.2489612102508545,
-0.07742628455162048,
-0.10509459674358368,
0.013337249867618084,
0.10138551890850067,
0.06995654851198196,
0.17304721474647522,
-0.0037713919300585985,
-0.036284226924180984,
-0.0064643872901797295,
0.025414984673261642,
-0.03540204465389252,
0.05724727362394333,
-0.002706433180719614,
0.016663886606693268,
-0.15213344991207123,
0.060368724167346954,
-0.00024176653823815286,
-0.1438901126384735,
-0.013603870756924152,
0.16073721647262573,
-0.11208858340978622,
-0.15145981311798096,
-0.007263668347150087,
0.13685113191604614,
-0.13171035051345825,
-0.03302847594022751,
-0.03708777576684952,
-0.170182466506958,
0.07439173012971878,
0.1024777740240097,
0.08549231290817261,
0.08025266975164413,
-0.06620611250400543,
-0.00807863101363182,
-0.011656313203275204,
-0.026087598875164986,
0.031810320913791656,
-0.023377234116196632,
-0.09044221043586731,
0.03872343525290489,
-0.026654237881302834,
0.13591371476650238,
-0.09607382118701935,
-0.09331836551427841,
-0.135749951004982,
0.039314381778240204,
-0.12405620515346527,
-0.08138058334589005,
-0.12200927734375,
-0.0591500885784626,
0.00224387738853693,
-0.0001289021165575832,
-0.035674065351486206,
-0.06687422841787338,
-0.13582271337509155,
0.04366770386695862,
-0.04484611004590988,
0.0013091047294437885,
-0.040241483598947525,
0.04561002552509308,
0.06766383349895477,
-0.03493715822696686,
0.13722217082977295,
0.11722734570503235,
-0.07864081114530563,
0.08946478366851807,
-0.16657429933547974,
-0.0683990865945816,
0.08854512125253677,
0.008173754438757896,
0.06165994703769684,
0.06743349134922028,
0.033807408064603806,
0.06109451875090599,
0.04151686280965805,
0.03488299250602722,
0.01739438995718956,
-0.09271225333213806,
0.015541021712124348,
0.022296719253063202,
-0.1294609159231186,
-0.04801803454756737,
-0.029226921498775482,
0.00939185917377472,
0.008117396384477615,
0.11003357172012329,
-0.0426274873316288,
0.09439733624458313,
-0.05888751894235611,
0.036728594452142715,
0.016222506761550903,
-0.16461637616157532,
-0.020102784037590027,
-0.11915475130081177,
0.028684545308351517,
-0.0033096212428063154,
0.25625869631767273,
0.06346847862005234,
0.020517030730843544,
0.01250078622251749,
0.08567021042108536,
0.07241600006818771,
0.02562166005373001,
0.1956365555524826,
0.10854171961545944,
-0.05020022392272949,
-0.12334850430488586,
0.09686340391635895,
0.034720368683338165,
0.06432123482227325,
0.13385434448719025,
-0.026959087699651718,
0.002498799469321966,
0.11019360274076462,
0.011678861454129219,
0.04961980879306793,
-0.09859088063240051,
-0.16400282084941864,
-0.00994415208697319,
0.061864156275987625,
-0.04559077322483063,
0.12240655720233917,
0.11382720619440079,
-0.020697353407740593,
0.03180128335952759,
-0.010503606870770454,
-0.05694027617573738,
-0.16998925805091858,
-0.1630837321281433,
-0.08357038348913193,
-0.11794789135456085,
-0.0027763545513153076,
-0.11386270076036453,
0.013879159465432167,
0.06452289968729019,
0.0604364387691021,
-0.09019444137811661,
0.08891061693429947,
0.0687386617064476,
-0.11843101680278778,
0.08828350901603699,
-0.033263903111219406,
0.07249268144369125,
0.0015160300536081195,
0.003872724948450923,
-0.13800905644893646,
0.032393742352724075,
-0.008493867702782154,
0.04159298539161682,
-0.09244006127119064,
0.022458361461758614,
-0.11297028511762619,
-0.07659684121608734,
-0.07971972227096558,
0.05093973129987717,
-0.03541257977485657,
0.1390930563211441,
0.001295371213927865,
-0.035233911126852036,
0.024190181866288185,
0.22729112207889557,
-0.06350252777338028,
-0.030667411163449287,
-0.0618741400539875,
0.21414142847061157,
0.024466563016176224,
0.10703565180301666,
-0.016775688156485558,
0.019240234047174454,
-0.0764411985874176,
0.3689337372779846,
0.344390869140625,
-0.1225387305021286,
-0.0015968306688591838,
0.031062176451086998,
0.036916591227054596,
0.11621878296136856,
0.12602226436138153,
0.057955991476774216,
0.2995031177997589,
-0.08396036922931671,
-0.002026971662417054,
-0.02688612788915634,
-0.03624163940548897,
-0.04409930482506752,
0.10547586530447006,
0.06835740804672241,
-0.03330419585108757,
-0.027012333273887634,
0.1376710683107376,
-0.2966996431350708,
0.12323499470949173,
-0.15714547038078308,
-0.1487535685300827,
-0.06873904913663864,
-0.005042468197643757,
0.08589684963226318,
0.04748665541410446,
0.1069009080529213,
-0.019124338403344154,
-0.08203735202550888,
0.05766449123620987,
0.0320524163544178,
-0.22844897210597992,
0.011852608993649483,
0.08361081779003143,
-0.06153005734086037,
0.011767351068556309,
-0.017906347289681435,
0.038472190499305725,
0.07790610194206238,
0.025976579636335373,
-0.032770540565252304,
0.06325861811637878,
-0.005814229138195515,
-0.05033424496650696,
0.04302205145359039,
0.05059972032904625,
0.017107632011175156,
-0.1511564701795578,
0.07320158183574677,
-0.1762860119342804,
0.0566408596932888,
-0.005331212189048529,
-0.04948166385293007,
0.000018263708625454456,
0.01998119056224823,
-0.06808236241340637,
0.05880929157137871,
0.0952666699886322,
-0.012173139490187168,
-0.002317852806299925,
-0.056667573750019073,
0.007662574760615826,
-0.0679154172539711,
-0.0747012197971344,
-0.10497893393039703,
-0.1338900774717331,
-0.11392296850681305,
0.10846775025129318,
-0.011928223073482513,
-0.19833622872829437,
0.02906924858689308,
-0.11258108913898468,
0.04933213070034981,
-0.13360801339149475,
0.08599711954593658,
0.1282832771539688,
0.021543797105550766,
-0.01265349704772234,
0.04020093381404877,
0.01591683179140091,
0.08550478518009186,
-0.09200563281774521,
-0.10515180230140686
] |
null | null |
transformers
|
# My Awesome Model
|
{"tags": ["conversational"]}
|
text-generation
|
aimiekhe/yummv2
|
[
"transformers",
"pytorch",
"gpt2",
"text-generation",
"conversational",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
# My Awesome Model
|
[
"# My Awesome Model"
] |
[
"TAGS\n#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n",
"# My Awesome Model"
] |
[
51,
4
] |
[
"passage: TAGS\n#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n# My Awesome Model"
] |
[
-0.05259015038609505,
0.05521034821867943,
-0.005910294596105814,
0.017722278833389282,
0.15250112116336823,
0.02286236733198166,
0.07657632976770401,
0.09513414651155472,
-0.025391526520252228,
-0.047348517924547195,
0.15119488537311554,
0.19781284034252167,
-0.020334534347057343,
0.101333387196064,
-0.04688440263271332,
-0.3143521845340729,
0.06439975649118423,
0.05463787540793419,
-0.015605635941028595,
0.12023304402828217,
0.09468326717615128,
-0.0530015267431736,
0.08742043375968933,
-0.012155864387750626,
-0.1293085366487503,
-0.0027921805158257484,
-0.002384399762377143,
-0.10180269181728363,
0.11194873601198196,
0.033712033182382584,
0.05166437849402428,
0.0182647667825222,
-0.05843055993318558,
-0.139859139919281,
0.03845210000872612,
-0.015005595050752163,
-0.05602653697133064,
0.05648263916373253,
0.059830192476511,
-0.07164353132247925,
0.1669619083404541,
0.13275989890098572,
-0.04237370565533638,
0.056127581745386124,
-0.17620700597763062,
0.017941240221261978,
0.01800798624753952,
0.019184142351150513,
0.05306641012430191,
0.10830496996641159,
-0.03932326287031174,
0.09217294305562973,
-0.11410652846097946,
0.08313368260860443,
0.07800983637571335,
-0.29151955246925354,
-0.025312699377536774,
0.10440942645072937,
0.06437138468027115,
0.048375632613897324,
-0.013386772945523262,
0.0621674507856369,
0.02149512618780136,
0.008602659218013287,
0.02225899137556553,
-0.06727100163698196,
-0.05789240449666977,
0.032748885452747345,
-0.0967593789100647,
-0.03634428232908249,
0.19753605127334595,
-0.024647634476423264,
0.053590498864650726,
-0.06265407055616379,
-0.11300963163375854,
-0.039751436561346054,
-0.050429005175828934,
-0.029761891812086105,
-0.05090925097465515,
0.09489558637142181,
0.004352911841124296,
-0.09534718841314316,
-0.13405443727970123,
-0.01370926946401596,
-0.1618979275226593,
0.15892250835895538,
0.012579603120684624,
0.046201955527067184,
-0.19210097193717957,
0.11465331166982651,
-0.03857925534248352,
-0.08259090781211853,
0.030513519421219826,
-0.12010065466165543,
0.03160654753446579,
-0.008132083341479301,
-0.019599268212914467,
-0.049325279891490936,
0.061037879437208176,
0.08101806789636612,
0.018783701583743095,
0.005755073390901089,
0.018167443573474884,
0.05343452841043472,
0.05891622602939606,
0.10033947974443436,
-0.02891627699136734,
-0.0625043511390686,
0.0025436533614993095,
-0.12051084637641907,
-0.01122665498405695,
-0.05357983708381653,
-0.18095199763774872,
0.002246231772005558,
0.02455340512096882,
0.05192234739661217,
0.011778532527387142,
0.09955989569425583,
-0.028496338054537773,
-0.026898741722106934,
0.06898727267980576,
0.002862759632989764,
-0.015707949176430702,
-0.005368964280933142,
-0.010934269987046719,
0.11485416442155838,
-0.023099146783351898,
0.04774846136569977,
-0.12022071331739426,
0.020393015816807747,
-0.07851235568523407,
-0.0019349842332303524,
-0.06214260309934616,
-0.04864754155278206,
-0.0019346009939908981,
-0.06985589861869812,
0.021118074655532837,
-0.14833110570907593,
-0.17990200221538544,
-0.005064866971224546,
0.021302316337823868,
-0.052403319627046585,
-0.09162671118974686,
-0.0982397273182869,
-0.02586611732840538,
0.03574685752391815,
-0.05873546749353409,
0.013170980848371983,
-0.06884536147117615,
0.06542801111936569,
0.0029820678755640984,
0.05682007595896721,
-0.14085575938224792,
0.08719147741794586,
-0.12582023441791534,
-0.023288866505026817,
-0.061977192759513855,
0.1109607070684433,
0.024780582636594772,
0.1267160177230835,
0.004311583004891872,
-0.0033308975398540497,
-0.08729329705238342,
0.08271238207817078,
-0.04243258014321327,
0.22770646214485168,
-0.10479787737131119,
-0.08809807151556015,
0.2632525563240051,
-0.05423165112733841,
-0.16432519257068634,
0.10179096460342407,
-0.014350244775414467,
0.12198644131422043,
0.13850919902324677,
0.16080057621002197,
0.007628654129803181,
0.03313867375254631,
0.10115300863981247,
0.08631709218025208,
-0.08573295921087265,
-0.0611947737634182,
0.023627014830708504,
-0.011463395319879055,
-0.10670105367898941,
0.046802595257759094,
0.04794782027602196,
0.08188598603010178,
-0.04982871189713478,
-0.028600862249732018,
-0.01972118206322193,
-0.044152840971946716,
0.05264130234718323,
0.007675500120967627,
0.13217447698116302,
-0.03674980252981186,
-0.03692879155278206,
-0.023745311424136162,
0.01699630729854107,
-0.03115241602063179,
0.007061392068862915,
-0.05687357112765312,
0.11091547459363937,
-0.03406180441379547,
0.051789235323667526,
-0.16953988373279572,
-0.04873261600732803,
-0.02087729424238205,
0.1402055323123932,
0.04973345249891281,
0.1329866498708725,
0.06287940591573715,
-0.010758201591670513,
0.00859389640390873,
0.007998145185410976,
0.13181665539741516,
0.007865442894399166,
-0.07660657912492752,
-0.047718439251184464,
0.09176599979400635,
-0.05973208695650101,
0.06147782504558563,
-0.098741315305233,
-0.004747362341731787,
-0.01433002483099699,
0.08674649894237518,
0.006352655589580536,
0.029382232576608658,
-0.006192679051309824,
0.003654100699350238,
-0.06161240115761757,
0.017873648554086685,
0.12492607533931732,
-0.01421504095196724,
-0.07439801841974258,
0.22084392607212067,
-0.15798072516918182,
0.18006981909275055,
0.18165533244609833,
-0.3081994652748108,
0.024602634832262993,
-0.08860466629266739,
-0.036338552832603455,
0.03426366671919823,
0.0491504967212677,
-0.034147560596466064,
0.16587987542152405,
-0.016766328364610672,
0.201018825173378,
-0.03547777235507965,
-0.01287798210978508,
-0.010399105958640575,
-0.03656993433833122,
-0.010632630437612534,
0.09065473079681396,
0.15122920274734497,
-0.1677125245332718,
0.18270380795001984,
0.1660280078649521,
0.06873020529747009,
0.17776396870613098,
0.034313347190618515,
-0.006856906693428755,
0.07112615555524826,
-0.022670727223157883,
-0.07675548642873764,
-0.049287427216768265,
-0.26302891969680786,
-0.027947327122092247,
0.06471601128578186,
0.04510856419801712,
0.11924877762794495,
-0.10971947014331818,
-0.037208184599876404,
0.010892451740801334,
-0.013165894895792007,
0.02132410928606987,
0.09682225435972214,
0.01171150617301464,
0.11804302036762238,
-0.021027036011219025,
-0.05209195241332054,
0.0898953229188919,
0.02727191150188446,
-0.0787680521607399,
0.19168277084827423,
-0.10074768215417862,
-0.3233809769153595,
-0.11354339867830276,
-0.18166927993297577,
-0.017843691632151604,
0.05878754332661629,
0.08049646019935608,
-0.09228580445051193,
-0.02625267766416073,
-0.01639235019683838,
0.0758359357714653,
-0.09145816415548325,
-0.015880629420280457,
-0.09367848187685013,
0.034986745566129684,
-0.10827737301588058,
-0.07011983543634415,
-0.05141967162489891,
-0.03368452936410904,
-0.04457031562924385,
0.13157756626605988,
-0.12242637574672699,
0.06396433711051941,
0.2076517641544342,
0.06227295100688934,
0.05622440204024315,
-0.0229496993124485,
0.23288212716579437,
-0.10842552781105042,
0.02383521944284439,
0.1717897206544876,
-0.03566030040383339,
0.0727933868765831,
0.13435456156730652,
0.006721907295286655,
-0.08144525438547134,
0.03465581312775612,
-0.04592517390847206,
-0.08630958944559097,
-0.20441576838493347,
-0.14156180620193481,
-0.12814727425575256,
0.07913564145565033,
0.03285396471619606,
0.05478321388363838,
0.15024253726005554,
0.11386489123106003,
0.007987297140061855,
0.00976672861725092,
-0.006888182368129492,
0.05438044294714928,
0.17482298612594604,
-0.05838097631931305,
0.10041683167219162,
-0.037591226398944855,
-0.1924494504928589,
0.08022978901863098,
0.04309763014316559,
0.08280511945486069,
0.07474655658006668,
0.0856199786067009,
0.013537914492189884,
0.03723837807774544,
0.10897084325551987,
0.1165735274553299,
0.031679023057222366,
-0.038079675287008286,
-0.04882059991359711,
-0.026300756260752678,
-0.03285675123333931,
0.05745977535843849,
0.07790146768093109,
-0.1608346849679947,
-0.06348084658384323,
-0.06350091099739075,
0.07662643492221832,
0.09017108380794525,
0.11811108142137527,
-0.21219493448734283,
0.01579318381845951,
0.092556893825531,
-0.0494147390127182,
-0.1304239183664322,
0.07402537018060684,
-0.00466050673276186,
-0.1397053301334381,
0.037663187831640244,
-0.014095795340836048,
0.1359514445066452,
-0.0778401643037796,
0.10336452722549438,
-0.08307972550392151,
-0.06147889420390129,
0.03632286190986633,
0.1355396956205368,
-0.30774354934692383,
0.2137020230293274,
-0.022472934797406197,
-0.05296783149242401,
-0.10508129745721817,
-0.011727629229426384,
0.020913105458021164,
0.09079049527645111,
0.10090240091085434,
-0.0025442070327699184,
0.0061299679800868034,
-0.0345483273267746,
-0.053218815475702286,
0.024456629529595375,
0.07957815378904343,
-0.08542889356613159,
0.0017540202243253589,
-0.02361489273607731,
-0.004407065454870462,
-0.032844748347997665,
-0.01189463958144188,
-0.011617658659815788,
-0.16786961257457733,
0.06556065380573273,
-0.002625665394589305,
0.11129079759120941,
0.03491498529911041,
0.0024013579823076725,
-0.1009332686662674,
0.19977013766765594,
0.01796281896531582,
-0.08052749931812286,
-0.08830537647008896,
-0.03254766762256622,
0.03660419583320618,
-0.06121435388922691,
0.027481911703944206,
-0.06916457414627075,
0.033381566405296326,
-0.06441576033830643,
-0.18325145542621613,
0.1268530637025833,
-0.10945470631122589,
-0.03609596937894821,
-0.04321056231856346,
0.18323224782943726,
-0.00929707009345293,
-0.0011623724130913615,
0.05866571143269539,
0.0032208464108407497,
-0.1347510665655136,
-0.10740556567907333,
0.020214511081576347,
-0.015275230631232262,
0.009142245166003704,
0.05559912323951721,
-0.009665844030678272,
0.00045268211397342384,
-0.039558928459882736,
-0.023234419524669647,
0.32348164916038513,
0.10732097923755646,
-0.04944206401705742,
0.17007054388523102,
0.13087597489356995,
-0.0827672928571701,
-0.30699312686920166,
-0.10971353948116302,
-0.10529600828886032,
-0.026918673887848854,
-0.037983208894729614,
-0.19617970287799835,
0.09504909813404083,
-0.03528566658496857,
-0.022136637941002846,
0.11253651231527328,
-0.2759084105491638,
-0.0770430713891983,
0.1826775223016739,
0.003314757253974676,
0.3998824954032898,
-0.10265109688043594,
-0.08777514100074768,
-0.06741699576377869,
-0.1120782196521759,
0.2033512443304062,
-0.05560711398720741,
0.08663415163755417,
-0.00517998356372118,
0.15513743460178375,
0.055607251822948456,
-0.02176513522863388,
0.08932057023048401,
-0.005811662413179874,
-0.0546204075217247,
-0.1219351515173912,
-0.03444604203104973,
-0.009159418754279613,
0.007239421829581261,
0.03589896112680435,
-0.04242607578635216,
0.01279151439666748,
-0.1399589478969574,
-0.045490626245737076,
-0.0764620453119278,
0.024699507281184196,
0.021008269861340523,
-0.0652410089969635,
-0.01643640361726284,
-0.03945036977529526,
-0.012804778292775154,
0.03164318576455116,
0.15236099064350128,
-0.06478006392717361,
0.1476556956768036,
0.04904455319046974,
0.15412139892578125,
-0.14745712280273438,
-0.02258288487792015,
-0.06896031647920609,
-0.05498642474412918,
0.04900865629315376,
-0.10053684562444687,
0.050061121582984924,
0.1202658861875534,
-0.0742902010679245,
0.0987328365445137,
0.0922594666481018,
-0.01938629150390625,
0.0012483424507081509,
0.1226617842912674,
-0.2489612102508545,
-0.07742628455162048,
-0.10509459674358368,
0.013337249867618084,
0.10138551890850067,
0.06995654851198196,
0.17304721474647522,
-0.0037713919300585985,
-0.036284226924180984,
-0.0064643872901797295,
0.025414984673261642,
-0.03540204465389252,
0.05724727362394333,
-0.002706433180719614,
0.016663886606693268,
-0.15213344991207123,
0.060368724167346954,
-0.00024176653823815286,
-0.1438901126384735,
-0.013603870756924152,
0.16073721647262573,
-0.11208858340978622,
-0.15145981311798096,
-0.007263668347150087,
0.13685113191604614,
-0.13171035051345825,
-0.03302847594022751,
-0.03708777576684952,
-0.170182466506958,
0.07439173012971878,
0.1024777740240097,
0.08549231290817261,
0.08025266975164413,
-0.06620611250400543,
-0.00807863101363182,
-0.011656313203275204,
-0.026087598875164986,
0.031810320913791656,
-0.023377234116196632,
-0.09044221043586731,
0.03872343525290489,
-0.026654237881302834,
0.13591371476650238,
-0.09607382118701935,
-0.09331836551427841,
-0.135749951004982,
0.039314381778240204,
-0.12405620515346527,
-0.08138058334589005,
-0.12200927734375,
-0.0591500885784626,
0.00224387738853693,
-0.0001289021165575832,
-0.035674065351486206,
-0.06687422841787338,
-0.13582271337509155,
0.04366770386695862,
-0.04484611004590988,
0.0013091047294437885,
-0.040241483598947525,
0.04561002552509308,
0.06766383349895477,
-0.03493715822696686,
0.13722217082977295,
0.11722734570503235,
-0.07864081114530563,
0.08946478366851807,
-0.16657429933547974,
-0.0683990865945816,
0.08854512125253677,
0.008173754438757896,
0.06165994703769684,
0.06743349134922028,
0.033807408064603806,
0.06109451875090599,
0.04151686280965805,
0.03488299250602722,
0.01739438995718956,
-0.09271225333213806,
0.015541021712124348,
0.022296719253063202,
-0.1294609159231186,
-0.04801803454756737,
-0.029226921498775482,
0.00939185917377472,
0.008117396384477615,
0.11003357172012329,
-0.0426274873316288,
0.09439733624458313,
-0.05888751894235611,
0.036728594452142715,
0.016222506761550903,
-0.16461637616157532,
-0.020102784037590027,
-0.11915475130081177,
0.028684545308351517,
-0.0033096212428063154,
0.25625869631767273,
0.06346847862005234,
0.020517030730843544,
0.01250078622251749,
0.08567021042108536,
0.07241600006818771,
0.02562166005373001,
0.1956365555524826,
0.10854171961545944,
-0.05020022392272949,
-0.12334850430488586,
0.09686340391635895,
0.034720368683338165,
0.06432123482227325,
0.13385434448719025,
-0.026959087699651718,
0.002498799469321966,
0.11019360274076462,
0.011678861454129219,
0.04961980879306793,
-0.09859088063240051,
-0.16400282084941864,
-0.00994415208697319,
0.061864156275987625,
-0.04559077322483063,
0.12240655720233917,
0.11382720619440079,
-0.020697353407740593,
0.03180128335952759,
-0.010503606870770454,
-0.05694027617573738,
-0.16998925805091858,
-0.1630837321281433,
-0.08357038348913193,
-0.11794789135456085,
-0.0027763545513153076,
-0.11386270076036453,
0.013879159465432167,
0.06452289968729019,
0.0604364387691021,
-0.09019444137811661,
0.08891061693429947,
0.0687386617064476,
-0.11843101680278778,
0.08828350901603699,
-0.033263903111219406,
0.07249268144369125,
0.0015160300536081195,
0.003872724948450923,
-0.13800905644893646,
0.032393742352724075,
-0.008493867702782154,
0.04159298539161682,
-0.09244006127119064,
0.022458361461758614,
-0.11297028511762619,
-0.07659684121608734,
-0.07971972227096558,
0.05093973129987717,
-0.03541257977485657,
0.1390930563211441,
0.001295371213927865,
-0.035233911126852036,
0.024190181866288185,
0.22729112207889557,
-0.06350252777338028,
-0.030667411163449287,
-0.0618741400539875,
0.21414142847061157,
0.024466563016176224,
0.10703565180301666,
-0.016775688156485558,
0.019240234047174454,
-0.0764411985874176,
0.3689337372779846,
0.344390869140625,
-0.1225387305021286,
-0.0015968306688591838,
0.031062176451086998,
0.036916591227054596,
0.11621878296136856,
0.12602226436138153,
0.057955991476774216,
0.2995031177997589,
-0.08396036922931671,
-0.002026971662417054,
-0.02688612788915634,
-0.03624163940548897,
-0.04409930482506752,
0.10547586530447006,
0.06835740804672241,
-0.03330419585108757,
-0.027012333273887634,
0.1376710683107376,
-0.2966996431350708,
0.12323499470949173,
-0.15714547038078308,
-0.1487535685300827,
-0.06873904913663864,
-0.005042468197643757,
0.08589684963226318,
0.04748665541410446,
0.1069009080529213,
-0.019124338403344154,
-0.08203735202550888,
0.05766449123620987,
0.0320524163544178,
-0.22844897210597992,
0.011852608993649483,
0.08361081779003143,
-0.06153005734086037,
0.011767351068556309,
-0.017906347289681435,
0.038472190499305725,
0.07790610194206238,
0.025976579636335373,
-0.032770540565252304,
0.06325861811637878,
-0.005814229138195515,
-0.05033424496650696,
0.04302205145359039,
0.05059972032904625,
0.017107632011175156,
-0.1511564701795578,
0.07320158183574677,
-0.1762860119342804,
0.0566408596932888,
-0.005331212189048529,
-0.04948166385293007,
0.000018263708625454456,
0.01998119056224823,
-0.06808236241340637,
0.05880929157137871,
0.0952666699886322,
-0.012173139490187168,
-0.002317852806299925,
-0.056667573750019073,
0.007662574760615826,
-0.0679154172539711,
-0.0747012197971344,
-0.10497893393039703,
-0.1338900774717331,
-0.11392296850681305,
0.10846775025129318,
-0.011928223073482513,
-0.19833622872829437,
0.02906924858689308,
-0.11258108913898468,
0.04933213070034981,
-0.13360801339149475,
0.08599711954593658,
0.1282832771539688,
0.021543797105550766,
-0.01265349704772234,
0.04020093381404877,
0.01591683179140091,
0.08550478518009186,
-0.09200563281774521,
-0.10515180230140686
] |
null | null |
transformers
|
# BART base model fine-tuned on CNN Dailymail
- This model is a [bart-base model](https://huggingface.co/facebook/bart-base) fine-tuned on the [CNN/Dailymail summarization dataset](https://huggingface.co/datasets/cnn_dailymail) using [Ainize Teachable-NLP](https://ainize.ai/teachable-nlp).
The Bart model was proposed by Mike Lewis, Yinhan Liu, Naman Goyal, Marjan Ghazvininejad, Abdelrahman Mohamed, Omer Levy, Ves Stoyanov and Luke Zettlemoyer on 29 Oct, 2019. According to the abstract,
Bart uses a standard seq2seq/machine translation architecture with a bidirectional encoder (like BERT) and a left-to-right decoder (like GPT).
The pretraining task involves randomly shuffling the order of the original sentences and a novel in-filling scheme, where spans of text are replaced with a single mask token.
BART is particularly effective when fine tuned for text generation but also works well for comprehension tasks. It matches the performance of RoBERTa with comparable training resources on GLUE and SQuAD, achieves new state-of-the-art results on a range of abstractive dialogue, question answering, and summarization tasks, with gains of up to 6 ROUGE.
The Authors’ code can be found here:
https://github.com/pytorch/fairseq/tree/master/examples/bart
## Usage
### Python Code
```python
from transformers import PreTrainedTokenizerFast, BartForConditionalGeneration
# Load Model and Tokenize
tokenizer = PreTrainedTokenizerFast.from_pretrained("ainize/bart-base-cnn")
model = BartForConditionalGeneration.from_pretrained("ainize/bart-base-cnn")
# Encode Input Text
input_text = '(CNN) -- South Korea launched an investigation Tuesday into reports of toxic chemicals being dumped at a former U.S. military base, the Defense Ministry said. The tests follow allegations of American soldiers burying chemicals on Korean soil. The first tests are being carried out by a joint military, government and civilian task force at the site of what was Camp Mercer, west of Seoul. "Soil and underground water will be taken in the areas where toxic chemicals were allegedly buried," said the statement from the South Korean Defense Ministry. Once testing is finished, the government will decide on how to test more than 80 other sites -- all former bases. The alarm was raised this month when a U.S. veteran alleged barrels of the toxic herbicide Agent Orange were buried at an American base in South Korea in the late 1970s. Two of his fellow soldiers corroborated his story about Camp Carroll, about 185 miles (300 kilometers) southeast of the capital, Seoul. "We\'ve been working very closely with the Korean government since we had the initial claims," said Lt. Gen. John Johnson, who is heading the Camp Carroll Task Force. "If we get evidence that there is a risk to health, we are going to fix it." A joint U.S.- South Korean investigation is being conducted at Camp Carroll to test the validity of allegations. The U.S. military sprayed Agent Orange from planes onto jungles in Vietnam to kill vegetation in an effort to expose guerrilla fighters. Exposure to the chemical has been blamed for a wide variety of ailments, including certain forms of cancer and nerve disorders. It has also been linked to birth defects, according to the Department of Veterans Affairs. Journalist Yoonjung Seo contributed to this report.'
input_ids = tokenizer.encode(input_text, return_tensors="pt")
# Generate Summary Text Ids
summary_text_ids = model.generate(
input_ids=input_ids,
bos_token_id=model.config.bos_token_id,
eos_token_id=model.config.eos_token_id,
length_penalty=2.0,
max_length=142,
min_length=56,
num_beams=4,
)
# Decoding Text
print(tokenizer.decode(summary_text_ids[0], skip_special_tokens=True))
```
### API
You can experience this model through [ainize](https://ainize.ai/gkswjdzz/summarize-torchserve?branch=main).
|
{"language": "en", "license": "apache-2.0", "tags": ["summarization", "bart"], "datasets": ["cnn_dailymail"]}
|
summarization
|
ainize/bart-base-cnn
|
[
"transformers",
"pytorch",
"bart",
"feature-extraction",
"summarization",
"en",
"dataset:cnn_dailymail",
"license:apache-2.0",
"endpoints_compatible",
"has_space",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #bart #feature-extraction #summarization #en #dataset-cnn_dailymail #license-apache-2.0 #endpoints_compatible #has_space #region-us
|
# BART base model fine-tuned on CNN Dailymail
- This model is a bart-base model fine-tuned on the CNN/Dailymail summarization dataset using Ainize Teachable-NLP.
The Bart model was proposed by Mike Lewis, Yinhan Liu, Naman Goyal, Marjan Ghazvininejad, Abdelrahman Mohamed, Omer Levy, Ves Stoyanov and Luke Zettlemoyer on 29 Oct, 2019. According to the abstract,
Bart uses a standard seq2seq/machine translation architecture with a bidirectional encoder (like BERT) and a left-to-right decoder (like GPT).
The pretraining task involves randomly shuffling the order of the original sentences and a novel in-filling scheme, where spans of text are replaced with a single mask token.
BART is particularly effective when fine tuned for text generation but also works well for comprehension tasks. It matches the performance of RoBERTa with comparable training resources on GLUE and SQuAD, achieves new state-of-the-art results on a range of abstractive dialogue, question answering, and summarization tasks, with gains of up to 6 ROUGE.
The Authors’ code can be found here:
URL
## Usage
### Python Code
### API
You can experience this model through ainize.
|
[
"# BART base model fine-tuned on CNN Dailymail\n\n- This model is a bart-base model fine-tuned on the CNN/Dailymail summarization dataset using Ainize Teachable-NLP.\n\nThe Bart model was proposed by Mike Lewis, Yinhan Liu, Naman Goyal, Marjan Ghazvininejad, Abdelrahman Mohamed, Omer Levy, Ves Stoyanov and Luke Zettlemoyer on 29 Oct, 2019. According to the abstract,\n\nBart uses a standard seq2seq/machine translation architecture with a bidirectional encoder (like BERT) and a left-to-right decoder (like GPT).\n\nThe pretraining task involves randomly shuffling the order of the original sentences and a novel in-filling scheme, where spans of text are replaced with a single mask token.\n\nBART is particularly effective when fine tuned for text generation but also works well for comprehension tasks. It matches the performance of RoBERTa with comparable training resources on GLUE and SQuAD, achieves new state-of-the-art results on a range of abstractive dialogue, question answering, and summarization tasks, with gains of up to 6 ROUGE.\n\nThe Authors’ code can be found here:\nURL",
"## Usage",
"### Python Code",
"### API\nYou can experience this model through ainize."
] |
[
"TAGS\n#transformers #pytorch #bart #feature-extraction #summarization #en #dataset-cnn_dailymail #license-apache-2.0 #endpoints_compatible #has_space #region-us \n",
"# BART base model fine-tuned on CNN Dailymail\n\n- This model is a bart-base model fine-tuned on the CNN/Dailymail summarization dataset using Ainize Teachable-NLP.\n\nThe Bart model was proposed by Mike Lewis, Yinhan Liu, Naman Goyal, Marjan Ghazvininejad, Abdelrahman Mohamed, Omer Levy, Ves Stoyanov and Luke Zettlemoyer on 29 Oct, 2019. According to the abstract,\n\nBart uses a standard seq2seq/machine translation architecture with a bidirectional encoder (like BERT) and a left-to-right decoder (like GPT).\n\nThe pretraining task involves randomly shuffling the order of the original sentences and a novel in-filling scheme, where spans of text are replaced with a single mask token.\n\nBART is particularly effective when fine tuned for text generation but also works well for comprehension tasks. It matches the performance of RoBERTa with comparable training resources on GLUE and SQuAD, achieves new state-of-the-art results on a range of abstractive dialogue, question answering, and summarization tasks, with gains of up to 6 ROUGE.\n\nThe Authors’ code can be found here:\nURL",
"## Usage",
"### Python Code",
"### API\nYou can experience this model through ainize."
] |
[
56,
287,
3,
4,
12
] |
[
"passage: TAGS\n#transformers #pytorch #bart #feature-extraction #summarization #en #dataset-cnn_dailymail #license-apache-2.0 #endpoints_compatible #has_space #region-us \n# BART base model fine-tuned on CNN Dailymail\n\n- This model is a bart-base model fine-tuned on the CNN/Dailymail summarization dataset using Ainize Teachable-NLP.\n\nThe Bart model was proposed by Mike Lewis, Yinhan Liu, Naman Goyal, Marjan Ghazvininejad, Abdelrahman Mohamed, Omer Levy, Ves Stoyanov and Luke Zettlemoyer on 29 Oct, 2019. According to the abstract,\n\nBart uses a standard seq2seq/machine translation architecture with a bidirectional encoder (like BERT) and a left-to-right decoder (like GPT).\n\nThe pretraining task involves randomly shuffling the order of the original sentences and a novel in-filling scheme, where spans of text are replaced with a single mask token.\n\nBART is particularly effective when fine tuned for text generation but also works well for comprehension tasks. It matches the performance of RoBERTa with comparable training resources on GLUE and SQuAD, achieves new state-of-the-art results on a range of abstractive dialogue, question answering, and summarization tasks, with gains of up to 6 ROUGE.\n\nThe Authors’ code can be found here:\nURL## Usage### Python Code### API\nYou can experience this model through ainize."
] |
[
-0.01728367805480957,
0.12727048993110657,
-0.005754969082772732,
0.015707015991210938,
0.03505747392773628,
0.007795763202011585,
0.08458833396434784,
0.05177730321884155,
-0.03414448723196983,
0.08060745149850845,
0.04171506687998772,
-0.012091260403394699,
0.07941151410341263,
0.1395728886127472,
0.04823509603738785,
-0.2250659018754959,
0.06677394360303879,
-0.02865234762430191,
-0.08740756660699844,
0.06660719215869904,
0.08149243891239166,
-0.014796214178204536,
0.01813821867108345,
0.011884834617376328,
0.03945257514715195,
0.01900484599173069,
-0.03988473862409592,
-0.05594294145703316,
0.0924658551812172,
0.07700609415769577,
0.08106400072574615,
0.06994011998176575,
0.034628886729478836,
-0.13861523568630219,
0.030744072049856186,
0.06912589073181152,
-0.022083904594182968,
0.04051526263356209,
0.05235883221030235,
-0.10216737538576126,
0.13715878129005432,
-0.06978663802146912,
-0.002963389502838254,
0.01664576679468155,
-0.12668552994728088,
0.016523990780115128,
-0.07816548645496368,
-0.016065914183855057,
0.0631742998957634,
0.012655537575483322,
-0.02406568080186844,
0.019694583490490913,
-0.035442933440208435,
0.09310732036828995,
0.13590717315673828,
-0.2865513265132904,
-0.03129291906952858,
0.10841585695743561,
-0.01330201793462038,
0.022693317383527756,
-0.024928970262408257,
0.020678985863924026,
0.024155596271157265,
-0.029025666415691376,
0.06544964015483856,
-0.036525383591651917,
0.002005009911954403,
0.032950062304735184,
-0.16909393668174744,
-0.03206508606672287,
0.10152945667505264,
-0.016900284215807915,
-0.05421043932437897,
-0.06368608772754669,
-0.06655889749526978,
0.07388422638177872,
-0.03058210201561451,
0.02051524631679058,
0.003599728923290968,
0.04604841396212578,
-0.01368170976638794,
-0.03762306272983551,
-0.08725330978631973,
-0.051741745322942734,
-0.0506659671664238,
0.1491250991821289,
0.06495650857686996,
0.00522206723690033,
-0.04408258572220802,
0.06660038232803345,
-0.047800932079553604,
-0.0950930267572403,
-0.07182003557682037,
-0.006568551529198885,
-0.169467031955719,
-0.0010340340668335557,
-0.08002524077892303,
-0.14508765935897827,
0.02004433423280716,
0.1258527785539627,
0.021020768210291862,
0.08694889396429062,
0.03601512685418129,
0.03716917708516121,
0.012781279161572456,
0.15691757202148438,
-0.005261742044240236,
-0.03674006462097168,
-0.002724765334278345,
0.044434595853090286,
-0.0203389972448349,
-0.0321589857339859,
-0.017415262758731842,
-0.00020722886256407946,
-0.03508207947015762,
0.015507749281823635,
0.0020279516465961933,
0.07472173869609833,
-0.03415916860103607,
0.017940301448106766,
-0.0018601552583277225,
-0.13513843715190887,
-0.013105975463986397,
0.024527907371520996,
-0.08099876344203949,
-0.08164877444505692,
0.013810845091938972,
-0.02162342146039009,
-0.08500391989946365,
0.10599889606237411,
-0.060352250933647156,
-0.026556722819805145,
-0.05041389912366867,
-0.08792334049940109,
-0.02313084527850151,
0.0037953821010887623,
-0.0774083212018013,
-0.08443544805049896,
-0.17234811186790466,
-0.04879441112279892,
0.06447985768318176,
-0.02082296647131443,
-0.01334223523736,
-0.08028192818164825,
0.02183643728494644,
0.008669357746839523,
0.012739424593746662,
0.020446443930268288,
-0.03974911570549011,
0.0628696084022522,
0.0002995897666551173,
0.05552949383854866,
0.043059080839157104,
0.013343670405447483,
-0.11428462713956833,
0.003932460211217403,
-0.0419074110686779,
0.1101197749376297,
-0.07624166458845139,
-0.058316729962825775,
-0.0751773938536644,
-0.008844107389450073,
-0.050482377409935,
0.04808324575424194,
0.02648870088160038,
0.1325128674507141,
-0.15946917235851288,
-0.09812631458044052,
0.19131635129451752,
-0.06061182916164398,
0.06121295318007469,
0.08429421484470367,
-0.049039699137210846,
0.017860060557723045,
0.09259118884801865,
0.14117801189422607,
0.11584997177124023,
-0.0274784117937088,
-0.02688525803387165,
0.02339135855436325,
-0.02520773932337761,
0.12853914499282837,
0.034925490617752075,
0.03886093199253082,
-0.0007481117499992251,
0.05044809728860855,
-0.07367787510156631,
0.04051172360777855,
0.0049145095981657505,
-0.044249486178159714,
0.02740776166319847,
0.0039056790992617607,
0.005682160146534443,
-0.06457674503326416,
0.07218869030475616,
0.007460721768438816,
-0.08526109904050827,
-0.06089020147919655,
0.0956864058971405,
-0.04571007564663887,
0.02345280349254608,
-0.07078350335359573,
-0.011625957675278187,
0.01403843704611063,
0.05525960028171539,
-0.13498945534229279,
-0.031749624758958817,
0.04235052317380905,
-0.04329867288470268,
0.09441982209682465,
0.015363998711109161,
0.04563317447900772,
0.06026729568839073,
-0.03038657456636429,
0.005054537672549486,
-0.011223793029785156,
-0.07672007381916046,
-0.010250157676637173,
-0.07677477598190308,
-0.006117038428783417,
-0.0529889352619648,
0.05396348610520363,
-0.09527627378702164,
0.020644601434469223,
0.005659729242324829,
0.11912520229816437,
-0.004620788618922234,
-0.04780648276209831,
0.010989068076014519,
0.024076953530311584,
-0.03747253865003586,
-0.018440812826156616,
0.01644105836749077,
0.05184997245669365,
-0.0907241702079773,
0.0941188856959343,
-0.15743759274482727,
-0.13435639441013336,
0.042439743876457214,
0.07748421281576157,
-0.059745654463768005,
0.013193130493164062,
-0.036071546375751495,
0.008544212207198143,
-0.048762302845716476,
0.006157274823635817,
0.14546968042850494,
0.015476711094379425,
0.14417771995067596,
-0.06965392827987671,
-0.048852477222681046,
-0.02297208085656166,
-0.006614334415644407,
-0.005699342116713524,
0.11193864047527313,
0.027574792504310608,
-0.08044234663248062,
0.02434256486594677,
-0.09907479584217072,
0.002200464252382517,
0.1596996784210205,
-0.023111777380108833,
-0.05724277347326279,
-0.008776452392339706,
0.05238228291273117,
0.027187373489141464,
0.02085964009165764,
0.09276852756738663,
-0.018418578431010246,
0.05200295150279999,
0.07137561589479446,
0.039390865713357925,
-0.11700071394443512,
0.04743175208568573,
0.022369379177689552,
-0.07534626126289368,
-0.05423029884696007,
0.012838575057685375,
-0.03014233708381653,
0.06983346492052078,
-0.003987350966781378,
-0.04321034997701645,
-0.04994172975420952,
-0.03017175942659378,
-0.06854555010795593,
0.123198002576828,
-0.06682682782411575,
-0.25501856207847595,
-0.18192222714424133,
0.02040349692106247,
-0.041991110891103745,
0.03928350657224655,
0.03858422115445137,
-0.06616190820932388,
-0.05954654514789581,
-0.06939907371997833,
0.09963857382535934,
-0.0885433703660965,
-0.019794637337327003,
-0.020011361688375473,
-0.008761794306337833,
-0.02446836233139038,
-0.17279179394245148,
0.019798848778009415,
-0.039694491773843765,
-0.05515136197209358,
0.05139240622520447,
-0.04579079523682594,
0.03813102841377258,
0.07046248763799667,
0.04063345119357109,
0.007929889485239983,
-0.05858585238456726,
0.205149844288826,
-0.06888940185308456,
0.0560271218419075,
0.04164321348071098,
0.005309815518558025,
0.067909836769104,
0.057842668145895004,
-0.03172415494918823,
-0.06959224492311478,
0.05290951207280159,
0.05199982225894928,
-0.08365797996520996,
-0.1685183346271515,
-0.06095404550433159,
-0.0994042456150055,
0.037776172161102295,
0.07106627523899078,
0.06425203382968903,
-0.05225581303238869,
0.010877964086830616,
-0.10431209206581116,
0.04727941006422043,
0.11169777810573578,
0.09198273718357086,
0.012711165472865105,
0.035639770328998566,
0.08462047576904297,
-0.05089721828699112,
-0.08507411181926727,
0.07139420509338379,
0.007438619155436754,
0.23985156416893005,
-0.04573921486735344,
0.19505637884140015,
0.022851839661598206,
0.03743362054228783,
0.07750044018030167,
0.055587038397789,
-0.04496302083134651,
0.04866018891334534,
-0.06292280554771423,
-0.04788242653012276,
-0.03393501788377762,
0.07541316002607346,
0.016660958528518677,
-0.10134586691856384,
0.005304647609591484,
0.029562707990407944,
0.05195461958646774,
0.22590607404708862,
0.003382057650014758,
-0.1363772451877594,
-0.029247356578707695,
0.03438764438033104,
-0.042326584458351135,
-0.009470820426940918,
0.01073269173502922,
0.02835712395608425,
-0.05583718791604042,
0.07391006499528885,
-0.008172815665602684,
0.0973806232213974,
-0.03297516331076622,
-0.008350759744644165,
-0.11926113814115524,
0.030092088505625725,
0.004032351076602936,
0.08830294758081436,
-0.233682319521904,
0.1519647091627121,
0.00550874276086688,
0.09052371978759766,
-0.015610234811902046,
0.031130537390708923,
0.03526538237929344,
-0.07009562849998474,
0.07659922540187836,
0.025709783658385277,
-0.08999063074588776,
-0.042283788323402405,
-0.05227620527148247,
0.03338403254747391,
0.09831494092941284,
-0.0741436704993248,
0.10470305383205414,
-0.036145493388175964,
-0.005685773678123951,
-0.023443326354026794,
0.06418427079916,
-0.03285567834973335,
-0.1824357807636261,
0.05607534945011139,
-0.0015843035653233528,
0.035049472004175186,
-0.0540488101541996,
-0.0466262549161911,
-0.085150346159935,
0.15636983513832092,
-0.1212274432182312,
-0.051193419843912125,
-0.06748703122138977,
0.0161538477987051,
0.06175199896097183,
-0.05527272820472717,
-0.05863029137253761,
-0.002355928998440504,
0.09079927206039429,
-0.07758446037769318,
-0.11469689011573792,
-0.018615417182445526,
-0.01963673159480095,
-0.09347591549158096,
-0.06019323319196701,
0.12849584221839905,
0.07172936201095581,
0.059996262192726135,
-0.01405817735940218,
0.07534240186214447,
-0.021875455975532532,
-0.07886303216218948,
-0.0007713710074312985,
0.00040374798118136823,
0.014497448690235615,
0.08042043447494507,
-0.04720422998070717,
0.0041563911363482475,
-0.07945185899734497,
-0.006365599110722542,
0.05523982271552086,
0.21875236928462982,
-0.028155282139778137,
0.15362538397312164,
0.1104586273431778,
-0.0953252911567688,
-0.15438209474086761,
-0.11289303749799728,
0.049388252198696136,
0.06443318724632263,
-0.057120274752378464,
-0.23519404232501984,
-0.03797980397939682,
0.019680870696902275,
-0.012386485002934933,
-0.06919313222169876,
-0.2305665910243988,
-0.10275162011384964,
0.05643519386649132,
-0.021202366799116135,
0.26185116171836853,
-0.09807291626930237,
-0.035104114562273026,
-0.009267167188227177,
-0.10139701515436172,
0.12035688757896423,
-0.01904495619237423,
0.13406167924404144,
0.021218622103333473,
-0.0070863463915884495,
0.016627492383122444,
-0.027034884318709373,
0.12077676504850388,
0.03276878222823143,
0.02865123748779297,
-0.02049611322581768,
0.043257612735033035,
0.057892899960279465,
-0.029447633773088455,
0.049236398190259933,
-0.08124897629022598,
0.043967124074697495,
-0.07526812702417374,
-0.07015244662761688,
-0.022711370140314102,
0.05780012160539627,
-0.014819732867181301,
-0.029901141300797462,
-0.09675942361354828,
0.07103606313467026,
0.08877561241388321,
0.022991996258497238,
0.10887587070465088,
-0.009410878643393517,
0.007928508333861828,
0.046797361224889755,
-0.00305194198153913,
-0.01259858813136816,
-0.07739683240652084,
-0.0316275991499424,
0.02646118588745594,
0.08726266026496887,
-0.07062746584415436,
0.029401147738099098,
0.12506499886512756,
0.03156286105513573,
0.13431105017662048,
0.05555364117026329,
-0.11273615807294846,
0.030640458688139915,
0.1009460836648941,
-0.13363555073738098,
-0.16648028790950775,
0.015101117081940174,
-0.12704409658908844,
0.011765378527343273,
-0.033655960112810135,
0.17943094670772552,
-0.01087531354278326,
-0.016058487817645073,
0.039601005613803864,
0.027317728847265244,
0.0070138657465577126,
0.13114367425441742,
-0.04404802247881889,
0.03830830752849579,
-0.07172153145074844,
0.08618580549955368,
0.12927143275737762,
-0.026525016874074936,
0.0022001073230057955,
0.09980810433626175,
-0.11318482458591461,
0.0014248505467548966,
-0.07027403265237808,
0.0732894167304039,
0.01773836277425289,
-0.020823271945118904,
-0.13542084395885468,
-0.03731002286076546,
0.053517360240221024,
0.07189575582742691,
0.03341696783900261,
0.06792927533388138,
-0.05633850023150444,
0.05694376304745674,
-0.002078590216115117,
0.0032544508576393127,
0.11002619564533234,
0.02316507324576378,
-0.03880744427442551,
0.14935462176799774,
-0.034297194331884384,
0.029456114396452904,
-0.03079484961926937,
-0.08313997089862823,
-0.10901259630918503,
-0.000015118350347620435,
-0.06601829081773758,
0.06391928344964981,
-0.09288561344146729,
-0.015397468581795692,
-0.0224477406591177,
-0.03587581589818001,
-0.01909027248620987,
0.01987067237496376,
-0.037766825407743454,
-0.019736962392926216,
-0.10048424452543259,
0.0634719654917717,
-0.08267826586961746,
0.013544523157179356,
0.07853078097105026,
-0.08958273380994797,
0.04246210306882858,
0.030246417969465256,
-0.05818290263414383,
0.09297901391983032,
-0.04690137505531311,
-0.02990438975393772,
-0.017523368820548058,
0.04938530549407005,
-0.04241099953651428,
-0.05470104515552521,
0.03512857109308243,
-0.013475758023560047,
0.015824727714061737,
-0.011899134144186974,
0.0410647913813591,
-0.10217344760894775,
-0.012428795918822289,
-0.03186637535691261,
-0.016887500882148743,
-0.0655127465724945,
-0.03050835058093071,
-0.0037230157759040594,
0.1335294395685196,
0.10897721350193024,
-0.05501484125852585,
-0.015133773908019066,
-0.11805332452058792,
0.0029324195347726345,
0.029450630769133568,
-0.08592924475669861,
-0.008362430147826672,
-0.07880993187427521,
0.05285993590950966,
-0.00034070308902300894,
0.0974482074379921,
-0.010215558111667633,
-0.10243731737136841,
0.018574414774775505,
-0.03067086450755596,
0.018752122297883034,
0.003265627194195986,
0.06754350662231445,
0.10649778693914413,
-0.030015287920832634,
-0.02141626924276352,
-0.005069275852292776,
0.02713501825928688,
0.05150385946035385,
0.13691815733909607,
0.08868493884801865,
0.1719880849123001,
0.0756981298327446,
-0.024711886420845985,
-0.06259733438491821,
-0.008363505825400352,
0.029467616230249405,
-0.06345529109239578,
0.04046470299363136,
0.025871362537145615,
0.07049276679754257,
0.16808678209781647,
-0.11186747997999191,
0.13223212957382202,
-0.00042430488974787295,
-0.0501873753964901,
-0.05311138555407524,
-0.15979433059692383,
-0.052363455295562744,
-0.0350269116461277,
0.03297990560531616,
-0.10675275325775146,
0.04060439392924309,
0.05450822040438652,
0.03180541470646858,
0.034714873880147934,
-0.011688126251101494,
-0.05984067916870117,
-0.0991930216550827,
0.0538938008248806,
-0.04899729788303375,
0.03073924034833908,
0.05872424319386482,
-0.05881005898118019,
0.022203152999281883,
0.0495016947388649,
0.06636442244052887,
0.048411495983600616,
0.10178414732217789,
0.028770655393600464,
-0.06292697787284851,
-0.03054555132985115,
-0.005375792738050222,
0.0077917384915053844,
0.04061627760529518,
0.10158167034387589,
0.05535142496228218,
-0.0372125543653965,
0.031001893803477287,
0.27220842242240906,
-0.0026391937863081694,
-0.122220478951931,
-0.16763123869895935,
0.08723577111959457,
0.07512269914150238,
0.036250192672014236,
0.03906198963522911,
-0.10738954693078995,
-0.10007722675800323,
0.1921667605638504,
0.12272431701421738,
0.005238048732280731,
-0.0075447214767336845,
-0.011984605342149734,
0.019411327317357063,
0.09182393550872803,
0.04216127097606659,
0.05626467987895012,
0.332763135433197,
-0.04227181151509285,
-0.0054012686014175415,
-0.024797946214675903,
-0.007777821738272905,
-0.07202669233083725,
0.17068111896514893,
-0.023911772295832634,
-0.015863383188843727,
-0.07581491023302078,
0.019790662452578545,
-0.07352032512426376,
-0.24455539882183075,
-0.09958235919475555,
-0.055774983018636703,
-0.09919962286949158,
0.0017255409620702267,
0.009408364072442055,
0.011287631466984749,
0.08683465421199799,
0.025693083181977272,
-0.013984685763716698,
-0.020338987931609154,
0.00672009726986289,
-0.02916392683982849,
-0.04669852927327156,
0.07578574120998383,
-0.08933991938829422,
0.07689326256513596,
0.005461354739964008,
0.09806413948535919,
0.06898503750562668,
0.03762264922261238,
-0.02241450734436512,
0.07592251151800156,
0.011944589205086231,
-0.03978486359119415,
0.01109845656901598,
0.10564303398132324,
-0.02488333359360695,
0.03237161412835121,
0.06012110412120819,
-0.043059565126895905,
0.045048218220472336,
0.07020384818315506,
-0.027624428272247314,
-0.08754397183656693,
0.03042524866759777,
-0.06391318142414093,
0.05843941122293472,
0.16509224474430084,
-0.0072419531643390656,
0.02878633327782154,
-0.0451200045645237,
0.03388191759586334,
-0.028617456555366516,
0.04256436228752136,
-0.018426472321152687,
-0.17820385098457336,
-0.04638737812638283,
-0.04351234436035156,
0.027269208803772926,
-0.2080155611038208,
-0.0610886812210083,
-0.06244990602135658,
-0.0036827493458986282,
0.03287326171994209,
0.13698618113994598,
0.0806250274181366,
0.0009346468723379076,
-0.038096554577350616,
-0.18986105918884277,
0.021789085119962692,
0.03917866572737694,
-0.09642075002193451,
-0.09563401341438293
] |
null | null |
transformers
|
Original repository : <https://huggingface.co/EleutherAI/gpt-j-6B>
|
{"license": "apache-2.0"}
|
feature-extraction
|
ainize/gpt-j-6B-float16
|
[
"transformers",
"pytorch",
"gptj",
"feature-extraction",
"license:apache-2.0",
"endpoints_compatible",
"has_space",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #gptj #feature-extraction #license-apache-2.0 #endpoints_compatible #has_space #region-us
|
Original repository : <URL
|
[] |
[
"TAGS\n#transformers #pytorch #gptj #feature-extraction #license-apache-2.0 #endpoints_compatible #has_space #region-us \n"
] |
[
43
] |
[
"passage: TAGS\n#transformers #pytorch #gptj #feature-extraction #license-apache-2.0 #endpoints_compatible #has_space #region-us \n"
] |
[
-0.019439825788140297,
0.0986516997218132,
-0.004801807459443808,
0.02175869420170784,
0.027963092550635338,
0.01574639230966568,
0.01916668191552162,
0.17641215026378632,
-0.027291765436530113,
-0.021829940378665924,
0.14229167997837067,
0.19018436968326569,
-0.016704445704817772,
0.04481127858161926,
-0.0018889732891693711,
-0.20110805332660675,
0.135574609041214,
0.06672656536102295,
-0.1139005497097969,
0.07898275554180145,
0.10461928695440292,
-0.034139540046453476,
0.03996124863624573,
0.030799655243754387,
-0.08969509601593018,
0.007869230583310127,
-0.0030008249450474977,
-0.08574727177619934,
0.09258241206407547,
0.033258769661188126,
0.02925013191998005,
0.04034041240811348,
-0.07681526988744736,
-0.13510523736476898,
0.016661129891872406,
0.04209522902965546,
-0.06998889148235321,
0.055685438215732574,
0.01404917985200882,
-0.01874551549553871,
0.06112971529364586,
-0.002435169415548444,
-0.03944433853030205,
0.009107564575970173,
-0.09758937358856201,
-0.3526577353477478,
-0.11672261357307434,
0.11498265713453293,
-0.029703756794333458,
0.07926484197378159,
0.0406496524810791,
0.15417832136154175,
-0.15264403820037842,
0.013094411231577396,
0.22845257818698883,
-0.3429918587207794,
0.015313324518501759,
0.1309099644422531,
0.09624426066875458,
-0.044949695467948914,
0.016290243715047836,
0.040763434022665024,
0.033411212265491486,
0.02017221413552761,
0.0698489174246788,
-0.041657499969005585,
-0.10163521021604538,
0.08712126314640045,
-0.08047313243150711,
-0.14084455370903015,
0.2918797731399536,
0.008616090752184391,
0.06386911869049072,
-0.01169411651790142,
-0.07036110758781433,
0.04748548939824104,
-0.0029177742544561625,
0.05709906667470932,
0.042231131345033646,
0.11688534915447235,
0.0540812686085701,
-0.06495404243469238,
-0.15116682648658752,
-0.005788368172943592,
-0.17358727753162384,
0.09107853472232819,
0.03605877235531807,
0.11090516299009323,
-0.10739169269800186,
0.08760353177785873,
-0.07997726649045944,
-0.09990515559911728,
-0.03650195151567459,
-0.07693523168563843,
0.1424417942762375,
0.06287854164838791,
-0.08469708263874054,
0.07962146401405334,
0.13526755571365356,
0.20286235213279724,
-0.027792224660515785,
-0.055168841034173965,
0.032912980765104294,
0.13552944362163544,
0.007277755066752434,
0.09189847856760025,
-0.07634762674570084,
0.027385611087083817,
0.09448213130235672,
-0.12078516185283661,
0.04550017789006233,
-0.02577289380133152,
-0.11009690910577774,
-0.0667402520775795,
-0.0007280976860783994,
0.09465273469686508,
0.11892952769994736,
0.03903399035334587,
-0.027031272649765015,
0.03224259614944458,
0.17929939925670624,
-0.03904787451028824,
0.016680849716067314,
-0.03953475505113602,
0.028731172904372215,
0.0517529733479023,
0.06856248527765274,
-0.003450678428635001,
-0.01556185819208622,
0.003259938443079591,
-0.06579697132110596,
0.000995337381027639,
-0.04660030081868172,
-0.02405078150331974,
0.10320714861154556,
-0.08889204263687134,
0.04811524972319603,
-0.15467892587184906,
-0.12253875285387039,
0.03980782628059387,
0.0706200823187828,
0.007204459514468908,
-0.07845897972583771,
0.1109885647892952,
-0.06935778260231018,
0.037645742297172546,
-0.08863331377506256,
0.01213799137622118,
-0.09811343252658844,
0.06346189975738525,
-0.09609775990247726,
0.040915895253419876,
-0.17503036558628082,
0.06863546371459961,
-0.12151569873094559,
0.03723234310746193,
-0.05389462783932686,
-0.03432502970099449,
-0.08917307108640671,
0.13686859607696533,
-0.050546564161777496,
-0.05462932586669922,
-0.030469002202153206,
0.009337611496448517,
-0.03437061980366707,
0.06507351249456406,
-0.09521947801113129,
-0.0375433973968029,
0.20780940353870392,
-0.1039067953824997,
-0.1683838814496994,
0.04313752055168152,
0.032533131539821625,
0.005013080779463053,
-0.006550103425979614,
0.20696955919265747,
0.05690973252058029,
-0.028545252978801727,
-0.008736376650631428,
0.15910282731056213,
-0.10095228999853134,
-0.16564300656318665,
0.0855158120393753,
-0.05806855112314224,
-0.014277254231274128,
0.0479339100420475,
-0.087301105260849,
0.11285844445228577,
0.018103910610079765,
-0.06789179891347885,
-0.11162707209587097,
-0.036381639540195465,
0.012630684301257133,
0.005510540679097176,
0.061321698129177094,
-0.032199639827013016,
-0.053626272827386856,
-0.016665872186422348,
0.06077835336327553,
0.030315855517983437,
0.07695850729942322,
-0.04111986607313156,
0.14482754468917847,
-0.06585671752691269,
0.03027230314910412,
-0.11035221070051193,
0.005174559541046619,
-0.00514648761600256,
-0.07973936945199966,
-0.026237381622195244,
0.18168218433856964,
0.05373256653547287,
-0.10258043557405472,
0.028894690796732903,
-0.0072534834034740925,
0.03464585542678833,
0.06677906215190887,
0.010204795747995377,
-0.09294482320547104,
-0.00771938543766737,
-0.03247392550110817,
-0.030839232727885246,
0.09318673610687256,
0.029695546254515648,
0.09700649231672287,
0.0888235792517662,
-0.06939245015382767,
0.06965086609125137,
0.0006409267662093043,
-0.03478167578577995,
-0.036544814705848694,
-0.048313405364751816,
0.07216198742389679,
0.043089400976896286,
-0.024649914354085922,
0.2403455227613449,
-0.03321832790970802,
0.2660619020462036,
0.21696870028972626,
-0.1357911229133606,
0.08734118938446045,
0.07620842009782791,
-0.06206252798438072,
0.015597967430949211,
0.04096081480383873,
-0.007422634866088629,
-0.03120102733373642,
-0.009389015845954418,
0.08486513048410416,
-0.055095575749874115,
-0.053357064723968506,
-0.004986490122973919,
-0.07018166780471802,
-0.007362316362559795,
-0.011338124051690102,
0.12432137131690979,
-0.10178026556968689,
0.20200976729393005,
0.38087764382362366,
-0.018912944942712784,
0.07406695187091827,
-0.07945618033409119,
-0.01523701474070549,
0.00242452509701252,
-0.016843387857079506,
-0.07376367598772049,
0.09624896943569183,
-0.20147448778152466,
-0.0027907490730285645,
0.1191166415810585,
0.061104316264390945,
0.0792897418141365,
-0.1522602140903473,
-0.05528312176465988,
0.03728907182812691,
-0.017741940915584564,
-0.07663299888372421,
0.10065062344074249,
-0.0032198303379118443,
0.053508102893829346,
0.00930533092468977,
-0.028169596567749977,
0.10462702065706253,
0.037916865199804306,
-0.012006416916847229,
0.1107882410287857,
-0.15628407895565033,
-0.23326264321804047,
-0.06127910315990448,
-0.018203100189566612,
-0.012995082885026932,
0.0013111905427649617,
0.15532393753528595,
-0.039115164428949356,
-0.04711780697107315,
0.0040540192276239395,
-0.0412466898560524,
-0.05282727628946304,
0.01896878331899643,
-0.0472334660589695,
0.03268712759017944,
0.0016525730025023222,
-0.14152683317661285,
-0.0663471445441246,
0.029244285076856613,
-0.07560195028781891,
0.06657842546701431,
-0.008137409575283527,
0.08320551365613937,
0.09276188164949417,
0.038954369723796844,
0.03361355885863304,
-0.01384796854108572,
0.17468790709972382,
-0.03042542189359665,
-0.009382289834320545,
0.24709583818912506,
0.05679060146212578,
0.06809939444065094,
0.08814135938882828,
0.030771255493164062,
-0.014041854999959469,
-0.04239881411194801,
-0.05040683597326279,
-0.10350479185581207,
-0.17693239450454712,
-0.10061285644769669,
-0.13049736618995667,
0.021016495302319527,
0.018552016466856003,
0.10411867499351501,
0.11471779644489288,
0.09068580716848373,
-0.02003697119653225,
0.005774817429482937,
-0.05582801252603531,
0.03831888362765312,
0.20991376042366028,
-0.06647765636444092,
0.08867336809635162,
-0.1230664998292923,
-0.026541732251644135,
0.12398646026849747,
0.12020599842071533,
0.18150240182876587,
0.05613958090543747,
0.014459238387644291,
0.1339072734117508,
0.28135231137275696,
0.054575350135564804,
0.08540478348731995,
-0.002749249106273055,
-0.008058271370828152,
-0.05170174688100815,
-0.022003669291734695,
-0.033210135996341705,
0.08503254503011703,
0.07166524976491928,
-0.1492057591676712,
0.006947541609406471,
-0.23386472463607788,
0.10317662358283997,
0.1659191995859146,
0.030294213443994522,
-0.09824959933757782,
0.007254767697304487,
0.08178047090768814,
0.014198102988302708,
-0.02186625823378563,
0.06785072386264801,
-0.019722748547792435,
-0.09029606729745865,
0.048483096063137054,
0.009143399074673653,
0.08351538330316544,
0.024701762944459915,
0.03387979045510292,
-0.014568110927939415,
-0.15669313073158264,
0.07855595648288727,
0.14088191092014313,
-0.23494090139865875,
0.1863626390695572,
-0.03564976528286934,
-0.056716229766607285,
-0.0915718823671341,
0.034811921417713165,
0.05972853675484657,
0.17433291673660278,
0.0884561538696289,
0.06044498458504677,
-0.10208180546760559,
-0.006779542658478022,
-0.02473306469619274,
0.04188448190689087,
-0.025999819859862328,
-0.01681969314813614,
-0.05214598774909973,
-0.04641738161444664,
0.013092882931232452,
0.0035330024547874928,
0.2158324420452118,
0.007382094860076904,
-0.13258543610572815,
0.07453335076570511,
0.0628424733877182,
0.01753632351756096,
-0.09099498391151428,
0.01427454873919487,
-0.12716998159885406,
0.17185592651367188,
-0.03335581347346306,
-0.07453563064336777,
-0.07645758986473083,
-0.11837498843669891,
0.10811065882444382,
-0.045198604464530945,
0.07813092321157455,
-0.07181049883365631,
-0.02091808058321476,
-0.06455972045660019,
-0.17865489423274994,
0.0899621993303299,
-0.1284242570400238,
-0.013862132094800472,
-0.005795770324766636,
0.09619850665330887,
-0.14461083710193634,
0.029925011098384857,
0.04420607164502144,
0.03292619809508324,
-0.17233841121196747,
-0.14639951288700104,
-0.002301414031535387,
0.03874227777123451,
0.047226861119270325,
-0.03285001963376999,
-0.006271667778491974,
0.035574544221162796,
0.09878994524478912,
-0.013199270702898502,
0.1978096216917038,
0.1530098021030426,
-0.12014928460121155,
0.15743488073349,
0.09533654898405075,
-0.031650952994823456,
-0.27899086475372314,
-0.1468854695558548,
-0.1811368614435196,
-0.10245014727115631,
0.0031970408745110035,
-0.10047556459903717,
0.09477370977401733,
0.08515132963657379,
-0.10730106383562088,
0.07706386595964432,
-0.23644930124282837,
-0.04543456807732582,
0.12514707446098328,
-0.0639478862285614,
0.2951014041900635,
-0.14726345241069794,
-0.04472167789936066,
-0.015962637960910797,
-0.2914557456970215,
0.1330706626176834,
-0.13728490471839905,
0.06715521216392517,
-0.0316426157951355,
0.012315005995333195,
-0.015692221000790596,
-0.08030983060598373,
0.15814490616321564,
-0.019957732409238815,
0.025422532111406326,
-0.1023307740688324,
0.019776618108153343,
0.15788966417312622,
-0.04804897680878639,
0.06765534728765488,
-0.10699237883090973,
0.027363939210772514,
-0.11677680909633636,
0.019357187673449516,
-0.13867780566215515,
0.10273776948451996,
0.010669637471437454,
-0.060217343270778656,
-0.08900222182273865,
-0.013053154572844505,
0.03950547054409981,
0.012668834999203682,
0.22416307032108307,
0.048147015273571014,
0.048677537590265274,
0.05460085719823837,
-0.08199090510606766,
-0.23870159685611725,
-0.1526230126619339,
-0.03992944210767746,
-0.06095949187874794,
0.08616102486848831,
-0.24392227828502655,
0.051613710820674896,
0.04470206797122955,
-0.042895711958408356,
0.025069959461688995,
0.08499255776405334,
-0.0356689915060997,
-0.022161975502967834,
0.13025546073913574,
-0.15390267968177795,
-0.02354789339005947,
0.00901632010936737,
0.06460388004779816,
0.10137715935707092,
0.05191129446029663,
0.10209579020738602,
0.0026624994352459908,
-0.04240325838327408,
0.013489790260791779,
0.0285252146422863,
-0.14315567910671234,
0.009844664484262466,
0.04880892112851143,
0.010019734501838684,
-0.13249969482421875,
0.0651996061205864,
0.0069326115772128105,
-0.13099992275238037,
-0.02827589027583599,
0.013166597113013268,
-0.09365952014923096,
-0.15107938647270203,
-0.033212918788194656,
-0.039957694709300995,
-0.1269271969795227,
-0.0869130864739418,
0.0013284074375405908,
-0.09896011650562286,
0.047397758811712265,
0.0042173368856310844,
0.11479424685239792,
0.10711142420768738,
0.025684067979454994,
-0.037631403654813766,
0.02953939326107502,
-0.06058642268180847,
-0.07450815290212631,
0.032483674585819244,
-0.08940589427947998,
-0.02746846340596676,
0.015157867223024368,
0.12081905454397202,
-0.03816244378685951,
0.017597496509552002,
-0.08077965676784515,
0.010340312495827675,
-0.09801722317934036,
-0.0674065425992012,
-0.15160199999809265,
-0.04381842166185379,
0.03691491857171059,
-0.08007994294166565,
-0.025356411933898926,
0.04596741497516632,
-0.1445522904396057,
-0.04161571338772774,
-0.035237256437540054,
0.06357266753911972,
-0.11498838663101196,
-0.043420955538749695,
0.09340724349021912,
-0.015200987458229065,
0.09867311269044876,
0.08322402089834213,
-0.052633631974458694,
0.07648342102766037,
-0.051817357540130615,
-0.1429324746131897,
0.08011433482170105,
0.028385816141963005,
0.020025281235575676,
-0.007340346463024616,
0.005886963102966547,
0.08738887310028076,
-0.04011199250817299,
0.010882006958127022,
-0.05561092123389244,
-0.13219434022903442,
-0.06842681020498276,
-0.023379530757665634,
-0.0771775096654892,
0.019257904961705208,
-0.09998518228530884,
0.1571153849363327,
0.045161399990320206,
0.1159067377448082,
0.053613994270563126,
0.010586682707071304,
-0.059576548635959625,
0.0043113138526678085,
-0.05014164000749588,
-0.15921439230442047,
-0.06002034991979599,
-0.032579854130744934,
-0.04524431750178337,
-0.021432561799883842,
0.32098978757858276,
0.05905439332127571,
-0.11223573237657547,
0.02925143390893936,
0.0740198865532875,
0.06502670794725418,
-0.012195857241749763,
0.2517085373401642,
0.04467334970831871,
-0.00476433290168643,
-0.07843443751335144,
0.057110950350761414,
0.0037241545505821705,
-0.14141005277633667,
0.027340438216924667,
0.11170431971549988,
0.15122488141059875,
0.07365740835666656,
0.03967125341296196,
-0.044917453080415726,
-0.1399686485528946,
-0.12439750880002975,
0.05562012642621994,
0.12494135648012161,
-0.034537333995103836,
0.05651009827852249,
0.12919513881206512,
-0.05833446606993675,
0.06123480945825577,
-0.01665452867746353,
0.0387239046394825,
-0.113587886095047,
-0.11075152456760406,
-0.030851833522319794,
-0.18520383536815643,
-0.010537729598581791,
-0.04852696508169174,
0.04181815683841705,
0.17061154544353485,
0.009464059956371784,
-0.0337398424744606,
0.009107986465096474,
0.013996277935802937,
-0.04770350083708763,
-0.0014191606314852834,
-0.011270464397966862,
-0.03037455305457115,
-0.02696133404970169,
-0.014147569425404072,
-0.06443806737661362,
-0.04015347734093666,
-0.018349140882492065,
0.03745797276496887,
-0.030690496787428856,
0.03055677004158497,
-0.10527601838111877,
-0.05116289108991623,
-0.08222679048776627,
0.03250877559185028,
-0.016297508031129837,
0.15550237894058228,
0.004248938988894224,
0.02286018803715706,
0.07061862200498581,
0.18417419493198395,
-0.08060569316148758,
-0.12609553337097168,
-0.05441905930638313,
0.08451194316148758,
0.057865872979164124,
0.030245620757341385,
0.009578077122569084,
0.01930304989218712,
-0.04756021499633789,
0.23555973172187805,
0.2633891701698303,
0.0015449996571987867,
0.05979280173778534,
0.023417621850967407,
0.0030172283295542,
0.025152109563350677,
0.07889268547296524,
0.15068119764328003,
0.18666362762451172,
-0.10408277809619904,
-0.03599991649389267,
-0.046604692935943604,
0.004895911086350679,
-0.15087604522705078,
0.018765343353152275,
-0.017137430608272552,
-0.12086733430624008,
-0.0005143979215063155,
0.04558375105261803,
-0.06767912954092026,
0.10264749079942703,
0.061890531331300735,
-0.14255133271217346,
-0.03002885729074478,
0.009123294614255428,
0.19294849038124084,
0.007555350195616484,
0.07033183425664902,
-0.04798978567123413,
-0.04366201534867287,
0.13612253963947296,
-0.006375066004693508,
-0.2216489315032959,
-0.06601469218730927,
0.12536406517028809,
0.020587068051099777,
0.13275495171546936,
-0.01801413483917713,
0.011672384105622768,
0.09265510737895966,
0.06931949406862259,
-0.1283247172832489,
0.07325425744056702,
0.022186975926160812,
-0.10443834215402603,
-0.0742209404706955,
-0.12161864340305328,
-0.02489137463271618,
-0.11559770256280899,
0.04344979673624039,
-0.10907921940088272,
0.03405870869755745,
0.05528273060917854,
0.007846124470233917,
-0.057355981320142746,
-0.04916253313422203,
-0.06403056532144547,
0.0596146285533905,
-0.0027744185645133257,
-0.03343478590250015,
-0.07259248942136765,
-0.07830612361431122,
0.009486345574259758,
0.043193452060222626,
-0.12640920281410217,
-0.12600229680538177,
0.05243716388940811,
-0.007175988517701626,
0.03226586431264877,
0.010935061611235142,
0.04907636344432831,
-0.05810684710741043,
-0.02024405635893345,
0.004996659234166145,
-0.08354802429676056,
0.031589288264513016,
0.06555220484733582,
0.00429330812767148,
0.010637813247740269,
-0.04704902693629265,
-0.0031752553768455982,
0.018840443342924118,
-0.08415421098470688,
-0.067699134349823
] |
null | null |
transformers
|
### Model information
Fine tuning data 1: https://www.kaggle.com/andradaolteanu/rickmorty-scripts
Base model: e-tony/gpt2-rnm
Epoch: 1
Train runtime: 3.4982 secs
Loss: 3.0894
Training notebook: [Colab](https://colab.research.google.com/drive/1RawVxulLETFicWMY0YANUdP-H-e7Eeyc)
### ===Teachable NLP=== ###
To train a GPT-2 model, write code and require GPU resources, but can easily fine-tune and get an API to use the model here for free.
Teachable NLP: [Teachable NLP](https://ainize.ai/teachable-nlp)
Tutorial: [Tutorial](https://forum.ainetwork.ai/t/teachable-nlp-how-to-use-teachable-nlp/65?utm_source=community&utm_medium=huggingface&utm_campaign=model&utm_content=teachable%20nlp)
|
{}
|
text-generation
|
ainize/gpt2-rnm-with-only-rick
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
### Model information
Fine tuning data 1: URL
Base model: e-tony/gpt2-rnm
Epoch: 1
Train runtime: 3.4982 secs
Loss: 3.0894
Training notebook: Colab
### ===Teachable NLP=== ###
To train a GPT-2 model, write code and require GPU resources, but can easily fine-tune and get an API to use the model here for free.
Teachable NLP: Teachable NLP
Tutorial: Tutorial
|
[
"### Model information\n \n Fine tuning data 1: URL\n Base model: e-tony/gpt2-rnm\n Epoch: 1\n Train runtime: 3.4982 secs\n Loss: 3.0894\n\n\nTraining notebook: Colab",
"### ===Teachable NLP=== ###\n\nTo train a GPT-2 model, write code and require GPU resources, but can easily fine-tune and get an API to use the model here for free.\n\nTeachable NLP: Teachable NLP\n\nTutorial: Tutorial"
] |
[
"TAGS\n#transformers #pytorch #jax #gpt2 #text-generation #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n",
"### Model information\n \n Fine tuning data 1: URL\n Base model: e-tony/gpt2-rnm\n Epoch: 1\n Train runtime: 3.4982 secs\n Loss: 3.0894\n\n\nTraining notebook: Colab",
"### ===Teachable NLP=== ###\n\nTo train a GPT-2 model, write code and require GPU resources, but can easily fine-tune and get an API to use the model here for free.\n\nTeachable NLP: Teachable NLP\n\nTutorial: Tutorial"
] |
[
50,
48,
57
] |
[
"passage: TAGS\n#transformers #pytorch #jax #gpt2 #text-generation #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n### Model information\n \n Fine tuning data 1: URL\n Base model: e-tony/gpt2-rnm\n Epoch: 1\n Train runtime: 3.4982 secs\n Loss: 3.0894\n\n\nTraining notebook: Colab### ===Teachable NLP=== ###\n\nTo train a GPT-2 model, write code and require GPU resources, but can easily fine-tune and get an API to use the model here for free.\n\nTeachable NLP: Teachable NLP\n\nTutorial: Tutorial"
] |
[
-0.12563487887382507,
0.21889199316501617,
-0.0010620722314342856,
0.12395568937063217,
0.10471464693546295,
0.05623902380466461,
0.09183236956596375,
0.12939651310443878,
0.0014779592165723443,
-0.06210627779364586,
0.1288062036037445,
0.1617022454738617,
0.00819578766822815,
0.1606428623199463,
0.015814824029803276,
-0.30020517110824585,
-0.01660892553627491,
0.059668879956007004,
0.02345879375934601,
0.10277337580919266,
0.08664008229970932,
-0.02826336957514286,
0.06259487569332123,
-0.0014675214188173413,
-0.10885100811719894,
-0.02114686742424965,
-0.002404489554464817,
-0.08413239568471909,
0.12762318551540375,
0.052812907844781876,
0.04021449759602547,
0.010976260527968407,
0.06968823820352554,
-0.1807827204465866,
0.030514094978570938,
0.002785667311400175,
-0.04686238616704941,
0.10301714390516281,
0.02329012006521225,
0.008915740065276623,
0.2252814620733261,
0.024910351261496544,
0.012130915187299252,
0.044967710971832275,
-0.09854009747505188,
-0.09151196479797363,
-0.028614355251193047,
0.10520768165588379,
0.12396670132875443,
0.08800409734249115,
0.01536658126860857,
0.12391536682844162,
-0.12161461263895035,
0.06136205047369003,
0.08653998374938965,
-0.295506089925766,
-0.0528949499130249,
0.14274926483631134,
-0.07876225560903549,
-0.0305787343531847,
-0.017320040613412857,
0.048304859548807144,
0.03496970236301422,
0.027385706081986427,
0.04183938354253769,
-0.03647675737738609,
-0.092762790620327,
0.01751532219350338,
-0.143564835190773,
-0.039993029087781906,
0.18976126611232758,
0.011732286773622036,
-0.00034348006010986865,
-0.07715122401714325,
-0.05376341938972473,
-0.04101463779807091,
-0.042937010526657104,
-0.000846695271320641,
-0.02613380365073681,
0.05616237595677376,
-0.01642494648694992,
-0.1293674111366272,
-0.09120573848485947,
-0.11039634793996811,
-0.00012677549966610968,
0.04178335517644882,
0.05969587340950966,
0.041712258011102676,
-0.04044710099697113,
0.18825960159301758,
-0.0923549085855484,
-0.012525082565844059,
-0.034800030291080475,
-0.0641256794333458,
0.010760520584881306,
-0.013014553114771843,
-0.03961050510406494,
0.07406070083379745,
0.00839644018560648,
0.15146765112876892,
0.004996788688004017,
0.006101059727370739,
0.10684829205274582,
0.06107981130480766,
-0.02804238349199295,
0.026658691465854645,
-0.026822425425052643,
-0.0703364834189415,
0.11735497415065765,
-0.03240197151899338,
0.05079909786581993,
-0.026075314730405807,
-0.09262683242559433,
-0.027889713644981384,
0.02119981311261654,
0.08634694665670395,
0.023926151916384697,
0.059125129133462906,
-0.037288468331098557,
-0.038913458585739136,
0.12092290818691254,
-0.05776609852910042,
0.007263923063874245,
0.00017538534302730113,
-0.06793726235628128,
-0.005605736281722784,
0.10849230736494064,
0.0008625762420706451,
-0.10399536788463593,
-0.14853455126285553,
-0.049890220165252686,
-0.027439389377832413,
-0.0994754210114479,
-0.00008899954264052212,
0.018567068502306938,
0.01303956750780344,
0.036912448704242706,
-0.16239508986473083,
-0.3544772267341614,
0.03558993339538574,
0.05524998530745506,
-0.03031240776181221,
-0.1153525859117508,
-0.11955811828374863,
-0.01607835292816162,
-0.015866069123148918,
-0.039116643369197845,
0.04534396901726723,
-0.05286429822444916,
0.03848632797598839,
0.04915305972099304,
0.0681101530790329,
-0.04926922172307968,
0.0073811933398246765,
-0.05582886189222336,
-0.009362313896417618,
-0.004354939330369234,
0.13969115912914276,
-0.061427533626556396,
0.009455678053200245,
-0.06536294519901276,
-0.06912102550268173,
-0.029013706371188164,
0.03224317356944084,
0.05739692971110344,
0.14503036439418793,
-0.10670234262943268,
-0.03707148879766464,
0.15233762562274933,
-0.06226981431245804,
-0.07562940567731857,
0.12164218723773956,
-0.031433604657649994,
0.10812370479106903,
0.12865273654460907,
0.09112289547920227,
0.1635788083076477,
-0.042950235307216644,
0.03323819860816002,
0.08360477536916733,
-0.06463772058486938,
-0.21579186618328094,
0.02331240475177765,
0.11055578291416168,
-0.17571613192558289,
0.05494578927755356,
-0.050602663308382034,
0.11857540905475616,
-0.07550137490034103,
-0.08476828038692474,
-0.02411111630499363,
-0.12976866960525513,
0.06126774847507477,
0.025462862104177475,
0.11140669137239456,
-0.02887853793799877,
-0.12219666689634323,
0.010996625758707523,
0.16827164590358734,
-0.06906164437532425,
0.02165532298386097,
-0.10164793580770493,
0.09746047854423523,
-0.08626008033752441,
0.007350281812250614,
-0.09909725189208984,
-0.08282872289419174,
-0.028578495606780052,
0.10285451263189316,
0.07751921564340591,
0.08480729162693024,
0.04511810839176178,
0.08423139899969101,
-0.043248292058706284,
-0.00873502530157566,
-0.026386430487036705,
-0.0047827837988734245,
-0.09146333485841751,
-0.05252756550908089,
-0.01769215054810047,
-0.02449627034366131,
0.07888699322938919,
-0.2186194807291031,
0.04572613537311554,
-0.034981220960617065,
0.04108857735991478,
-0.024127092212438583,
0.0015260953223332763,
0.02529289573431015,
0.013515438884496689,
-0.003359649796038866,
-0.0538809560239315,
0.1159137412905693,
0.039219893515110016,
-0.062070392072200775,
-0.022739507257938385,
-0.04082249850034714,
0.0035665163304656744,
0.1293504238128662,
-0.11924862116575241,
-0.041553959250450134,
-0.0030060301069170237,
-0.06317827850580215,
0.00930340401828289,
-0.04568514600396156,
0.0670476108789444,
0.22309255599975586,
-0.020445646718144417,
0.10804010927677155,
-0.029000531882047653,
-0.03519536182284355,
-0.04574142396450043,
-0.05569860711693764,
0.03200317919254303,
0.08957988768815994,
0.17781515419483185,
-0.05442127212882042,
0.06084977090358734,
0.034938596189022064,
-0.07632432132959366,
0.12798967957496643,
0.06423474103212357,
-0.05611404404044151,
0.011686763726174831,
-0.01694096066057682,
0.008461285382509232,
0.058605894446372986,
-0.06827184557914734,
0.01213858649134636,
0.030706994235515594,
0.01226990669965744,
0.12319086492061615,
-0.20405301451683044,
-0.03787337616086006,
-0.006453593261539936,
-0.055660180747509,
0.0008491809712722898,
0.021966977044939995,
-0.08976709097623825,
0.06473104655742645,
-0.043774839490652084,
-0.06221500784158707,
0.07756567001342773,
0.022765275090932846,
-0.09003060311079025,
0.16666269302368164,
-0.057311974465847015,
-0.24604272842407227,
-0.10083319991827011,
0.015233728103339672,
-0.032215043902397156,
0.03211892768740654,
0.04848437011241913,
-0.07601536065340042,
-0.07450302690267563,
-0.05952715501189232,
-0.010241393931210041,
0.027838338166475296,
-0.03310801461338997,
0.04730159044265747,
0.023572875186800957,
-0.04605846479535103,
-0.1334214061498642,
0.0012132246047258377,
-0.020828645676374435,
-0.1425030678510666,
0.08760198205709457,
-0.08974828571081161,
0.009656747803092003,
0.18959163129329681,
0.01552735548466444,
0.039457980543375015,
-0.01165578979998827,
0.22713419795036316,
-0.041464243084192276,
0.03575502336025238,
0.19044475257396698,
0.0765465795993805,
0.02615622617304325,
-0.03225398808717728,
0.04021300747990608,
-0.12956055998802185,
0.04962411895394325,
-0.0013416968286037445,
-0.07397410273551941,
-0.175836443901062,
-0.11098568141460419,
-0.06896628439426422,
0.048111360520124435,
0.07036267220973969,
0.06401453167200089,
0.03720380738377571,
0.10875357687473297,
0.021193871274590492,
0.09422101080417633,
0.013149040751159191,
0.05868198350071907,
0.1605682671070099,
-0.0433538518846035,
0.05976060777902603,
-0.08157678693532944,
-0.022816989570856094,
0.11091335117816925,
0.10707798600196838,
0.1355886608362198,
-0.051845647394657135,
0.09167703241109848,
0.01083496492356062,
0.14595729112625122,
0.0635591372847557,
0.07478782534599304,
-0.02937840297818184,
0.002615320961922407,
-0.02034470997750759,
0.00542715098708868,
-0.10866311192512512,
0.054224081337451935,
-0.015960562974214554,
-0.08816088736057281,
-0.08167587965726852,
0.052657850086688995,
0.03194233030080795,
0.07013637572526932,
0.07590515911579132,
-0.31159040331840515,
-0.10136226564645767,
0.04752838611602783,
-0.020360194146633148,
-0.021989542990922928,
0.0814998671412468,
0.010127201676368713,
-0.09267614781856537,
-0.021840794011950493,
-0.013271115720272064,
0.10843140631914139,
-0.15139725804328918,
-0.029665032401680946,
-0.03656427934765816,
0.08797502517700195,
0.021099332720041275,
0.1440536230802536,
-0.14955414831638336,
0.12717872858047485,
-0.008118199184536934,
0.10992931574583054,
-0.07631435990333557,
-0.012521875090897083,
0.06917708367109299,
0.08025534451007843,
0.11295287311077118,
0.00552758015692234,
-0.06877158582210541,
-0.09494490176439285,
-0.15705381333827972,
0.04113904759287834,
-0.07424518465995789,
0.0035933549515902996,
0.014327257871627808,
-0.02561458759009838,
-0.0007397675653919578,
0.019437460228800774,
-0.08899202197790146,
-0.1635560691356659,
-0.1216951236128807,
0.023893972858786583,
0.09232233464717865,
0.0021687988191843033,
-0.04733375459909439,
-0.0865333303809166,
-0.049748487770557404,
0.24970370531082153,
0.022842157632112503,
-0.09259781986474991,
-0.1281932145357132,
0.06035831943154335,
0.051440250128507614,
-0.08739326894283295,
0.02042066678404808,
0.06651179492473602,
0.07347498089075089,
-0.01577560417354107,
-0.0989646390080452,
0.06193425878882408,
-0.09528254717588425,
-0.07589806616306305,
0.02145569957792759,
0.07931282371282578,
0.06611892580986023,
0.039060670882463455,
0.05578159540891647,
-0.03386982902884483,
-0.03525581583380699,
-0.13603746891021729,
-0.01858317106962204,
0.09412410855293274,
-0.030413195490837097,
-0.01779644750058651,
-0.03986440598964691,
0.07986202836036682,
-0.01025239285081625,
0.0039043838623911142,
0.23233896493911743,
0.1853552609682083,
-0.053598761558532715,
0.07314502447843552,
0.10411316901445389,
-0.05963386967778206,
-0.26261842250823975,
-0.031609997153282166,
-0.04680847004055977,
-0.0012933907564729452,
-0.05818760395050049,
-0.22184285521507263,
0.09912769496440887,
0.034194111824035645,
-0.03577110171318054,
0.0990070030093193,
-0.2718280255794525,
-0.08843522518873215,
0.17495115101337433,
0.1486770510673523,
0.22900959849357605,
-0.0471406951546669,
-0.0021137879230082035,
-0.023952124640345573,
-0.12370415031909943,
0.214342400431633,
-0.11590161174535751,
0.12229540199041367,
-0.03385784476995468,
0.11424775421619415,
0.004057285841554403,
-0.0396537147462368,
0.0838361606001854,
-0.0009114817949011922,
0.03274012356996536,
-0.057468004524707794,
0.09727304428815842,
-0.000905330409295857,
-0.05461578071117401,
0.10147292912006378,
0.01381720881909132,
0.06558925658464432,
-0.16434158384799957,
-0.08959437906742096,
-0.02747551165521145,
0.04610976576805115,
0.024113882333040237,
-0.09107006341218948,
0.017417823895812035,
0.04667112976312637,
0.01908896677196026,
0.025435369461774826,
-0.006256130523979664,
0.012519816868007183,
-0.027913326397538185,
0.05731775611639023,
0.1268356293439865,
-0.07383368909358978,
-0.026698078960180283,
-0.043098606169223785,
-0.028860345482826233,
0.05231662467122078,
-0.1834012269973755,
0.013173658400774002,
0.07646076381206512,
-0.0031698301900178194,
0.059031885117292404,
0.06801261007785797,
-0.023016581311821938,
0.04284531623125076,
0.07494297623634338,
-0.12535905838012695,
-0.07248686999082565,
-0.08792129904031754,
-0.08725697547197342,
0.03010191209614277,
0.056072577834129333,
0.13208721578121185,
-0.11339233815670013,
-0.039591096341609955,
-0.019317032769322395,
0.02643583156168461,
-0.08251671493053436,
0.10182875394821167,
0.08399292081594467,
-0.013171015307307243,
-0.07352455705404282,
0.10758502781391144,
-0.002246301854029298,
0.0032744521740823984,
0.03013717383146286,
0.12739092111587524,
-0.09745761007070541,
-0.14476792514324188,
-0.008086178451776505,
0.08919638395309448,
-0.13757435977458954,
-0.06592113524675369,
-0.07043468207120895,
0.003333350410684943,
0.07194946706295013,
-0.0737123042345047,
0.06284181773662567,
0.004361277911812067,
-0.07634104043245316,
0.021920200437307358,
-0.11447370797395706,
0.031342100352048874,
0.09396853297948837,
0.009137583896517754,
-0.09334192425012589,
0.10842757672071457,
0.0014984358567744493,
0.09735378623008728,
-0.06415073573589325,
-0.0267147496342659,
-0.09900958836078644,
0.05459292232990265,
-0.11269081383943558,
-0.00788978673517704,
-0.09769269824028015,
0.05042320489883423,
-0.024159379303455353,
-0.0021472664084285498,
-0.04826706275343895,
0.06969045847654343,
-0.09788177907466888,
0.030043398961424828,
-0.015454460866749287,
0.02132292650640011,
-0.07207503914833069,
0.010597465559840202,
0.02977696806192398,
-0.0341954305768013,
0.11767048388719559,
0.0044917333871126175,
-0.01891392469406128,
0.130840465426445,
-0.11961154639720917,
-0.0014505416620522738,
0.038481205701828,
0.036012545228004456,
0.02633046731352806,
-0.07206889986991882,
0.05226386338472366,
0.030802618712186813,
0.03673606738448143,
-0.010201843455433846,
0.0645376443862915,
-0.08598767966032028,
-0.05750871077179909,
-0.0645635575056076,
-0.0837424173951149,
-0.06620124727487564,
0.024438444525003433,
0.043390605598688126,
0.05415787175297737,
0.08948754519224167,
-0.06809142976999283,
0.03374688699841499,
-0.10639879107475281,
0.010518966242671013,
0.011273495852947235,
-0.0478108786046505,
-0.05237269774079323,
-0.051379457116127014,
0.06722673773765564,
-0.02486869879066944,
0.17205709218978882,
-0.029603200033307076,
-0.034458328038454056,
-0.03868711739778519,
-0.03921687602996826,
0.06310227513313293,
-0.007531906943768263,
0.24843263626098633,
0.06681493669748306,
0.04355272278189659,
-0.011447550728917122,
0.10605673491954803,
0.0854787826538086,
0.02344701811671257,
0.12653282284736633,
-0.02966386452317238,
-0.003726196475327015,
0.1306139975786209,
-0.05879681557416916,
-0.04501214995980263,
-0.11275424063205719,
0.040506236255168915,
-0.03795671463012695,
0.025857890024781227,
-0.07874306291341782,
0.024522781372070312,
0.12194851785898209,
-0.09947516024112701,
0.007736282888799906,
0.014327676966786385,
-0.10691218823194504,
-0.1568574458360672,
-0.2607615888118744,
-0.06533709168434143,
-0.14725203812122345,
0.03636257350444794,
-0.10657334327697754,
-0.06792441010475159,
0.018083101138472557,
0.054757069796323776,
-0.04394230619072914,
0.05345536023378372,
-0.004658762365579605,
0.0433029867708683,
0.020415546372532845,
-0.0494832880795002,
-0.004227690398693085,
-0.06712749600410461,
-0.031145473942160606,
-0.05597594380378723,
0.0202155951410532,
0.06064596772193909,
-0.017416706308722496,
-0.04666300490498543,
0.08007555454969406,
0.023523204028606415,
-0.02629411593079567,
-0.09346570074558258,
0.03179419785737991,
0.04600028321146965,
0.08908167481422424,
0.041009776294231415,
-0.06050078943371773,
0.048145633190870285,
0.21287848055362701,
-0.05775148794054985,
-0.12561729550361633,
-0.127217099070549,
0.2416442483663559,
-0.00978502631187439,
-0.013219807296991348,
0.027458658441901207,
-0.03498624637722969,
0.02860933542251587,
0.28922611474990845,
0.24291108548641205,
-0.0829099491238594,
-0.014473389834165573,
0.0009781216504052281,
0.0008396159973926842,
-0.006791642401367426,
0.15321709215641022,
0.0782504603266716,
0.15656043589115143,
-0.10523808002471924,
-0.10797826200723648,
-0.016964416950941086,
-0.030974391847848892,
-0.05636446923017502,
0.06262882053852081,
0.016310537233948708,
-0.011049952358007431,
-0.03834117203950882,
0.08892073482275009,
-0.1910204142332077,
0.056185033172369,
-0.021389560773968697,
-0.013270189054310322,
-0.11197853088378906,
0.007005525752902031,
-0.030347833409905434,
-0.020842406898736954,
0.08641301095485687,
-0.04922843351960182,
0.030215036123991013,
0.13491462171077728,
-0.0032929989974945784,
-0.19540046155452728,
-0.0016591314924880862,
0.0529966726899147,
0.14283856749534607,
0.20821422338485718,
-0.030042214319109917,
0.08445239067077637,
0.09253024309873581,
-0.020792247727513313,
-0.13915537297725677,
0.10000631213188171,
-0.009596080519258976,
-0.04406045749783516,
0.04887938126921654,
0.018857331946492195,
-0.052712880074977875,
0.03138147294521332,
0.03104092739522457,
-0.06642540544271469,
-0.009379109367728233,
0.030320756137371063,
-0.003383998991921544,
-0.05369386821985245,
0.004771311767399311,
-0.07966477423906326,
0.12354959547519684,
0.0764390155673027,
-0.05670337378978729,
-0.03204035013914108,
-0.08805899322032928,
0.09086020290851593,
-0.021331246942281723,
-0.009360164403915405,
-0.012149090878665447,
-0.11661256849765778,
-0.05388224124908447,
0.02707969956099987,
0.0135676059871912,
-0.1650942862033844,
-0.05617466941475868,
-0.09321825951337814,
-0.048737939447164536,
-0.11582915484905243,
0.14123041927814484,
0.12661714851856232,
0.010999259538948536,
-0.025978004559874535,
-0.04121606796979904,
-0.07818488776683807,
0.03840752691030502,
-0.12272489815950394,
-0.12742629647254944
] |
null | null |
transformers
|
### Model information
Fine tuning data 1: https://www.kaggle.com/andradaolteanu/rickmorty-scripts
Base model: e-tony/gpt2-rnm
Epoch: 3
Train runtime: 7.1779 secs
Loss: 2.5694
Training notebook: [Colab](https://colab.research.google.com/drive/12NvO1SIZevF8ybJqfN9O21I3i9bU1dOO#scrollTo=KUsyn02WWmf5)
### ===Teachable NLP=== ###
To train a GPT-2 model, write code and require GPU resources, but can easily fine-tune and get an API to use the model here for free.
Teachable NLP: [Teachable NLP](https://ainize.ai/teachable-nlp)
Tutorial: [Tutorial](https://forum.ainetwork.ai/t/teachable-nlp-how-to-use-teachable-nlp/65?utm_source=community&utm_medium=huggingface&utm_campaign=model&utm_content=teachable%20nlp)
|
{}
|
text-generation
|
ainize/gpt2-rnm-with-season-1
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
### Model information
Fine tuning data 1: URL
Base model: e-tony/gpt2-rnm
Epoch: 3
Train runtime: 7.1779 secs
Loss: 2.5694
Training notebook: Colab
### ===Teachable NLP=== ###
To train a GPT-2 model, write code and require GPU resources, but can easily fine-tune and get an API to use the model here for free.
Teachable NLP: Teachable NLP
Tutorial: Tutorial
|
[
"### Model information\n \n Fine tuning data 1: URL\n Base model: e-tony/gpt2-rnm\n Epoch: 3\n Train runtime: 7.1779 secs\n Loss: 2.5694\n \n\n\nTraining notebook: Colab",
"### ===Teachable NLP=== ###\n\nTo train a GPT-2 model, write code and require GPU resources, but can easily fine-tune and get an API to use the model here for free.\n\nTeachable NLP: Teachable NLP\n\nTutorial: Tutorial"
] |
[
"TAGS\n#transformers #pytorch #jax #gpt2 #text-generation #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n",
"### Model information\n \n Fine tuning data 1: URL\n Base model: e-tony/gpt2-rnm\n Epoch: 3\n Train runtime: 7.1779 secs\n Loss: 2.5694\n \n\n\nTraining notebook: Colab",
"### ===Teachable NLP=== ###\n\nTo train a GPT-2 model, write code and require GPU resources, but can easily fine-tune and get an API to use the model here for free.\n\nTeachable NLP: Teachable NLP\n\nTutorial: Tutorial"
] |
[
50,
47,
57
] |
[
"passage: TAGS\n#transformers #pytorch #jax #gpt2 #text-generation #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n### Model information\n \n Fine tuning data 1: URL\n Base model: e-tony/gpt2-rnm\n Epoch: 3\n Train runtime: 7.1779 secs\n Loss: 2.5694\n \n\n\nTraining notebook: Colab### ===Teachable NLP=== ###\n\nTo train a GPT-2 model, write code and require GPU resources, but can easily fine-tune and get an API to use the model here for free.\n\nTeachable NLP: Teachable NLP\n\nTutorial: Tutorial"
] |
[
-0.13302038609981537,
0.20478567481040955,
-0.0006092428811825812,
0.140052929520607,
0.09786385297775269,
0.05708272382616997,
0.09847220033407211,
0.12913072109222412,
0.0011177189880982041,
-0.06587701290845871,
0.1324997842311859,
0.16185776889324188,
-0.0036984689068049192,
0.16308486461639404,
0.015083570964634418,
-0.31077489256858826,
-0.013927235268056393,
0.04832370951771736,
0.024127980694174767,
0.10054370015859604,
0.08636026084423065,
-0.024551715701818466,
0.059639398008584976,
-0.004698106087744236,
-0.12108222395181656,
-0.02081417106091976,
0.000755412969738245,
-0.08916213363409042,
0.1293046772480011,
0.052706677466630936,
0.03147324547171593,
0.01460774801671505,
0.06695332378149033,
-0.15723691880702972,
0.029735468327999115,
0.00620281184092164,
-0.0455050989985466,
0.10456524789333344,
0.0235162153840065,
0.012497246265411377,
0.21607764065265656,
0.04135656729340553,
0.0168142206966877,
0.041343387216329575,
-0.10376550257205963,
-0.07445579022169113,
-0.0346822664141655,
0.0982714518904686,
0.11689340323209763,
0.09462118148803711,
0.016768772155046463,
0.12037608027458191,
-0.14670272171497345,
0.06316465884447098,
0.0902969092130661,
-0.30096128582954407,
-0.05372883379459381,
0.15033912658691406,
-0.09005623310804367,
-0.02299191802740097,
-0.03166322410106659,
0.04801608994603157,
0.037414539605379105,
0.013154910877346992,
0.04097890481352806,
-0.030886756256222725,
-0.09701458364725113,
0.02071317844092846,
-0.15063007175922394,
-0.033702317625284195,
0.18283294141292572,
0.0031963821966201067,
0.005468443036079407,
-0.08378836512565613,
-0.052374135702848434,
-0.06707918643951416,
-0.032451801002025604,
-0.007651438470929861,
-0.035494085401296616,
0.05744442716240883,
-0.033475782722234726,
-0.12067914009094238,
-0.08657791465520859,
-0.11754721403121948,
0.00029028410790488124,
0.03582371771335602,
0.0630950778722763,
0.04180407151579857,
-0.03328075259923935,
0.18700972199440002,
-0.10325828939676285,
-0.012356042861938477,
-0.02879147045314312,
-0.07452791929244995,
0.005617405753582716,
-0.008651562966406345,
-0.04202321544289589,
0.061660315841436386,
-0.0037803680170327425,
0.14642202854156494,
-0.000006758863037248375,
0.010209177620708942,
0.10263846069574356,
0.06486713141202927,
-0.03381899371743202,
0.019734643399715424,
-0.028594065457582474,
-0.06791458278894424,
0.10625652223825455,
-0.029914071783423424,
0.044308461248874664,
-0.03502097725868225,
-0.09473338723182678,
-0.025286570191383362,
0.021755609661340714,
0.08625094592571259,
0.009502343833446503,
0.05889207497239113,
-0.03921443596482277,
-0.04268663376569748,
0.12455271929502487,
-0.05406608805060387,
0.011051636189222336,
-0.0033246695529669523,
-0.07855729758739471,
-0.02481955848634243,
0.09485570341348648,
-0.0022180976811796427,
-0.09180428832769394,
-0.13595615327358246,
-0.05832860246300697,
-0.028417596593499184,
-0.10911985486745834,
0.004614328965544701,
0.006944499909877777,
0.005928505212068558,
0.024969590827822685,
-0.15565231442451477,
-0.37052640318870544,
0.03864838927984238,
0.04559046030044556,
-0.03528589382767677,
-0.10757292062044144,
-0.11976569145917892,
-0.015257319435477257,
-0.015428971499204636,
-0.03419654816389084,
0.04213128238916397,
-0.05125681310892105,
0.037651680409908295,
0.042838502675294876,
0.06726372241973877,
-0.052152883261442184,
0.007742029149085283,
-0.060151901096105576,
-0.01621553674340248,
-0.0053636981174349785,
0.13919371366500854,
-0.06862000375986099,
0.0035895060282200575,
-0.04715308919548988,
-0.07669644802808762,
-0.037113141268491745,
0.025596531108021736,
0.05691499635577202,
0.1283714324235916,
-0.11140137165784836,
-0.035246554762125015,
0.1478944569826126,
-0.07262850552797318,
-0.06215274706482887,
0.12675413489341736,
-0.037315309047698975,
0.10503455251455307,
0.1249445304274559,
0.10363245755434036,
0.18378221988677979,
-0.02861703932285309,
0.03143429756164551,
0.06713301688432693,
-0.07357537001371384,
-0.2135411947965622,
0.01755649782717228,
0.1221158429980278,
-0.18091297149658203,
0.06526879221200943,
-0.00980419758707285,
0.11128895729780197,
-0.07955333590507507,
-0.0821128711104393,
-0.026840252801775932,
-0.13421612977981567,
0.07890225201845169,
0.033797990530729294,
0.11980881541967392,
-0.03869776055216789,
-0.11590391397476196,
0.01486737746745348,
0.16827477514743805,
-0.057734616100788116,
0.014532440342009068,
-0.10385041683912277,
0.09474223107099533,
-0.06988802552223206,
0.0002936397213488817,
-0.09585326164960861,
-0.08638610690832138,
-0.021011343225836754,
0.1160712018609047,
0.0792931467294693,
0.11194074898958206,
0.04735385626554489,
0.08830496668815613,
-0.053382400423288345,
-0.008294638246297836,
-0.02097834274172783,
-0.006143450271338224,
-0.09504387527704239,
-0.056519124656915665,
-0.00596024002879858,
-0.02207268215715885,
0.08604541420936584,
-0.23076221346855164,
0.050161685794591904,
-0.031594373285770416,
0.04360376298427582,
-0.014533644542098045,
0.001583222532644868,
0.020193055272102356,
0.02555583231151104,
-0.0023635474499315023,
-0.050153568387031555,
0.11484529823064804,
0.03637375682592392,
-0.05176819488406181,
-0.02197963558137417,
-0.05099831894040108,
0.019306959584355354,
0.11985477805137634,
-0.10398364067077637,
-0.054196394979953766,
-0.003729442832991481,
-0.06917234510183334,
0.000809794757515192,
-0.05473921075463295,
0.0676274299621582,
0.20136535167694092,
-0.03325178846716881,
0.10562842339277267,
-0.026564184576272964,
-0.03128733113408089,
-0.04992525652050972,
-0.050663694739341736,
0.03732971474528313,
0.08734668791294098,
0.1572892814874649,
-0.06365294009447098,
0.06874320656061172,
0.02380250208079815,
-0.08030369877815247,
0.1495838165283203,
0.07453198730945587,
-0.056509025394916534,
0.008264623582363129,
-0.020501382648944855,
0.009080922231078148,
0.06421265006065369,
-0.047664057463407516,
0.020630469545722008,
0.031328070908784866,
0.02690008096396923,
0.12405002117156982,
-0.2063409388065338,
-0.04580172896385193,
-0.016549695283174515,
-0.045798640698194504,
0.009155998937785625,
0.03289942070841789,
-0.0834134966135025,
0.06695664674043655,
-0.051758237183094025,
-0.06663478910923004,
0.08456642925739288,
0.02951403334736824,
-0.07747402042150497,
0.1571410894393921,
-0.059989601373672485,
-0.25340092182159424,
-0.11175797134637833,
0.016889341175556183,
-0.04963282123208046,
0.03358776122331619,
0.057099517434835434,
-0.07615741342306137,
-0.0681174024939537,
-0.06308268010616302,
-0.02466142363846302,
0.021192872896790504,
-0.028365569189190865,
0.03675679862499237,
0.02434445358812809,
-0.0483715683221817,
-0.13611236214637756,
0.009132408536970615,
-0.025244830176234245,
-0.14620743691921234,
0.09902948141098022,
-0.09103069454431534,
-0.0004123282269574702,
0.18899552524089813,
0.013014405034482479,
0.038388144224882126,
-0.02295788750052452,
0.2102949023246765,
-0.0368245504796505,
0.02956581674516201,
0.18542242050170898,
0.08259449154138565,
0.030628031119704247,
-0.02868153713643551,
0.03802166506648064,
-0.12760375440120697,
0.05932760611176491,
0.006950464099645615,
-0.08276236057281494,
-0.17604847252368927,
-0.10778484493494034,
-0.07051288336515427,
0.07283532619476318,
0.06540555506944656,
0.07047932595014572,
0.06772191822528839,
0.11488750576972961,
0.03033592738211155,
0.09436137229204178,
0.015500425361096859,
0.05914496257901192,
0.15874868631362915,
-0.04599592089653015,
0.07390839606523514,
-0.07974261790513992,
-0.01975145749747753,
0.11874905973672867,
0.08335956931114197,
0.1423369199037552,
-0.0686037614941597,
0.07715195417404175,
0.003322591772302985,
0.16198550164699554,
0.07582169026136398,
0.06810731440782547,
-0.029982663691043854,
-0.003220207756385207,
-0.02210468053817749,
0.008290970697999,
-0.1076454445719719,
0.05157467722892761,
-0.0023059716913849115,
-0.07718230783939362,
-0.0668601244688034,
0.06741321831941605,
0.03873557597398758,
0.06925976276397705,
0.0836152657866478,
-0.3118135929107666,
-0.09756845235824585,
0.03145534545183182,
-0.014536900445818901,
-0.023024262860417366,
0.08587287366390228,
0.013143785297870636,
-0.0934344083070755,
-0.0267484150826931,
-0.011129471473395824,
0.10155752301216125,
-0.14143230020999908,
-0.031014274805784225,
-0.039691220968961716,
0.09267135709524155,
0.013056498020887375,
0.15228863060474396,
-0.1540766805410385,
0.11819577217102051,
-0.008835997432470322,
0.10695324093103409,
-0.0769433081150055,
-0.021118225529789925,
0.06961209326982498,
0.07217936962842941,
0.09429758787155151,
0.005564098712056875,
-0.06473667174577713,
-0.09361301362514496,
-0.1676371693611145,
0.02097015269100666,
-0.06372013688087463,
0.008389512076973915,
0.0034475778229534626,
-0.028565270826220512,
0.00043224793625995517,
0.0212139543145895,
-0.09134585410356522,
-0.16292011737823486,
-0.12200427055358887,
0.036515358835458755,
0.10889061540365219,
0.011495755985379219,
-0.047769542783498764,
-0.09431412816047668,
-0.05009299889206886,
0.25782305002212524,
0.04933668673038483,
-0.08068257570266724,
-0.133065328001976,
0.06416799873113632,
0.05614951252937317,
-0.07805204391479492,
0.02201935090124607,
0.07193098962306976,
0.07537257671356201,
-0.008020135574042797,
-0.08253356069326401,
0.07001939415931702,
-0.08720856159925461,
-0.0950562134385109,
0.01389180589467287,
0.0811305120587349,
0.06725279986858368,
0.036739423871040344,
0.0474453866481781,
-0.03061968833208084,
-0.028682608157396317,
-0.13831999897956848,
-0.016156230121850967,
0.09356123954057693,
-0.03271664306521416,
-0.009313905611634254,
-0.04052186757326126,
0.08875962346792221,
-0.0025941201020032167,
-0.007892552763223648,
0.22814412415027618,
0.1984417885541916,
-0.059883225709199905,
0.08297468721866608,
0.1004326343536377,
-0.0523640401661396,
-0.2622963786125183,
-0.014581254683434963,
-0.04419100284576416,
-0.005617588758468628,
-0.05366102606058121,
-0.21453137695789337,
0.09027525782585144,
0.05308151990175247,
-0.03850824758410454,
0.1055871844291687,
-0.2721274197101593,
-0.09558536112308502,
0.1653931736946106,
0.13933996856212616,
0.24278387427330017,
-0.03916911780834198,
0.0029259177390486,
-0.027162576094269753,
-0.10329297184944153,
0.20425191521644592,
-0.1244460940361023,
0.13269232213497162,
-0.044811397790908813,
0.10654139518737793,
0.0012631110148504376,
-0.03852570429444313,
0.07640530169010162,
-0.006876063998788595,
0.0411294661462307,
-0.06237732246518135,
0.10396473854780197,
0.014038639143109322,
-0.06382913142442703,
0.09777837246656418,
0.019675366580486298,
0.07542058825492859,
-0.15709765255451202,
-0.08915513008832932,
-0.02349984645843506,
0.04115305840969086,
0.02399516850709915,
-0.0955924242734909,
0.004616410005837679,
0.0511893667280674,
0.0138248261064291,
0.017116298899054527,
-0.010680015198886395,
0.02217789925634861,
-0.026123519986867905,
0.06479445099830627,
0.1072646975517273,
-0.07815065234899521,
-0.020332520827651024,
-0.05199612304568291,
-0.026819532737135887,
0.05694970488548279,
-0.16313785314559937,
0.012257412075996399,
0.07089994847774506,
0.0001900818751892075,
0.06249053031206131,
0.07611657679080963,
-0.014698997139930725,
0.04593220353126526,
0.07734392583370209,
-0.12902802228927612,
-0.06480594724416733,
-0.08590014278888702,
-0.06237977743148804,
0.03654707595705986,
0.06544321775436401,
0.13113971054553986,
-0.11601760238409042,
-0.03249632194638252,
-0.009942468255758286,
0.0197319183498621,
-0.07027827948331833,
0.10633844137191772,
0.08107315003871918,
-0.01790810190141201,
-0.06837982684373856,
0.10988626629114151,
-0.0011138069676235318,
0.0016057172324508429,
0.0293473768979311,
0.12043166905641556,
-0.09703980386257172,
-0.1427212953567505,
-0.011829148046672344,
0.08088640123605728,
-0.13469985127449036,
-0.06588731706142426,
-0.06885910779237747,
0.01077608484774828,
0.07646554708480835,
-0.05797390267252922,
0.06063276529312134,
0.00396213261410594,
-0.08129966259002686,
0.027795014902949333,
-0.12064478546380997,
0.034474119544029236,
0.08660130947828293,
0.02203238196671009,
-0.10000311583280563,
0.09937814623117447,
-0.0006088179652579129,
0.08485566079616547,
-0.06867653131484985,
-0.03582516685128212,
-0.10762885212898254,
0.049756184220314026,
-0.09555062651634216,
-0.015403589233756065,
-0.09057608246803284,
0.05063349008560181,
-0.029127057641744614,
-0.004692590329796076,
-0.04543206840753555,
0.07076416164636612,
-0.09576660394668579,
0.03413168340921402,
-0.010668735951185226,
0.017642047256231308,
-0.077935591340065,
0.012673212215304375,
0.02588767744600773,
-0.028649836778640747,
0.11602446436882019,
0.00948732253164053,
-0.02068493887782097,
0.1284990906715393,
-0.10431855916976929,
0.0027497706469148397,
0.03647387772798538,
0.02732362039387226,
0.03516955301165581,
-0.07140370458364487,
0.046510182321071625,
0.026830093935132027,
0.038884107023477554,
-0.000027848656827700324,
0.05583570525050163,
-0.09099103510379791,
-0.0441710539162159,
-0.0634487122297287,
-0.09051259607076645,
-0.06381523609161377,
0.0217099878937006,
0.03597424179315567,
0.05349526181817055,
0.0933653712272644,
-0.07324559986591339,
0.02605818770825863,
-0.09924369305372238,
0.004666762892156839,
0.016769060865044594,
-0.04633253067731857,
-0.03299187123775482,
-0.044714491814374924,
0.06682466715574265,
-0.023823995143175125,
0.17940041422843933,
-0.017723672091960907,
-0.054412659257650375,
-0.04631724953651428,
-0.048090651631355286,
0.06603797525167465,
-0.009794504381716251,
0.2455793023109436,
0.07012271881103516,
0.04046690836548805,
-0.006888080388307571,
0.11031435430049896,
0.08679424971342087,
0.04107961058616638,
0.12659525871276855,
-0.0429358184337616,
-0.02132985182106495,
0.11649355292320251,
-0.0727577805519104,
-0.044302619993686676,
-0.11255639791488647,
0.03892486169934273,
-0.04776925966143608,
0.021926354616880417,
-0.08486363291740417,
0.02155090868473053,
0.12515860795974731,
-0.09256145358085632,
0.012128799222409725,
0.014670463278889656,
-0.1124114841222763,
-0.15390731394290924,
-0.2470005303621292,
-0.06924127787351608,
-0.15818405151367188,
0.0394463986158371,
-0.10422695428133011,
-0.0573636069893837,
0.028433429077267647,
0.06392864137887955,
-0.03579472005367279,
0.06324973702430725,
-0.01955341547727585,
0.06235779449343681,
0.007812871597707272,
-0.05641283094882965,
-0.006942216772586107,
-0.07356715202331543,
-0.029959971085190773,
-0.05866284668445587,
0.01162624079734087,
0.06116480007767677,
-0.025316674262285233,
-0.04222330078482628,
0.06449839472770691,
0.032325200736522675,
-0.01675049588084221,
-0.08582572638988495,
0.029535865411162376,
0.04425632581114769,
0.08060947805643082,
0.04238377884030342,
-0.06272422522306442,
0.04558814689517021,
0.21914075314998627,
-0.06160750240087509,
-0.12839262187480927,
-0.12495958060026169,
0.25083503127098083,
-0.0031718802638351917,
-0.011072127148509026,
0.01957869715988636,
-0.03785502165555954,
0.03395802900195122,
0.2729282081127167,
0.2496536374092102,
-0.08506213873624802,
-0.011676466092467308,
0.0011513595236465335,
-0.0037355157546699047,
-0.010769068263471127,
0.15188367664813995,
0.0679667666554451,
0.15360552072525024,
-0.09753495454788208,
-0.10446074604988098,
-0.01745983213186264,
-0.03585970774292946,
-0.05785008519887924,
0.07256170362234116,
0.013009654358029366,
-0.014428497292101383,
-0.046667151153087616,
0.07888436317443848,
-0.1913696527481079,
0.06876794248819351,
-0.02101137675344944,
-0.009075574576854706,
-0.11530895531177521,
0.011129739694297314,
-0.01803220994770527,
-0.0348605252802372,
0.08880604803562164,
-0.04826487973332405,
0.035040535032749176,
0.10239234566688538,
-0.006791426334530115,
-0.186259463429451,
-0.01218725647777319,
0.045629095286130905,
0.1611458957195282,
0.2069249004125595,
-0.039411574602127075,
0.09124501794576645,
0.0942990705370903,
-0.024035358801484108,
-0.13651280105113983,
0.10258447378873825,
-0.011915835551917553,
-0.06355725228786469,
0.03635374829173088,
0.03280331566929817,
-0.05285630375146866,
0.04596146196126938,
0.030927885323762894,
-0.06623498350381851,
-0.006865717004984617,
0.02090473100543022,
0.001183712505735457,
-0.05597315728664398,
0.004305257461965084,
-0.06804277002811432,
0.1282094568014145,
0.08236202597618103,
-0.05275361239910126,
-0.032768603414297104,
-0.08917754888534546,
0.08478132635354996,
-0.025694923475384712,
-0.023125143721699715,
-0.011943420395255089,
-0.12954576313495636,
-0.05381467565894127,
0.05743284150958061,
0.01317740697413683,
-0.17664280533790588,
-0.06321682780981064,
-0.09355682879686356,
-0.055816177278757095,
-0.10814537107944489,
0.1388307511806488,
0.12022309750318527,
0.014031367376446724,
-0.021405156701803207,
-0.05096546187996864,
-0.08253274857997894,
0.0346878282725811,
-0.1233765035867691,
-0.12465502321720123
] |
null | null |
transformers
|
### Model information
Fine tuning data 1: https://www.kaggle.com/andradaolteanu/rickmorty-scripts
Fine tuning data 2: https://www.kaggle.com/mikhailgaerlan/spongebob-squarepants-completed-transcripts
Base model: e-tony/gpt2-rnm
Epoch: 2
Train runtime: 790.0612 secs
Loss: 2.8569
API page: [Ainize](https://ainize.ai/fpem123/GPT2-Rick-N-Morty-with-SpongeBob?branch=master)
Demo page: [End-point](https://master-gpt2-rick-n-morty-with-sponge-bob-fpem123.endpoint.ainize.ai/)
### ===Teachable NLP=== ###
To train a GPT-2 model, write code and require GPU resources, but can easily fine-tune and get an API to use the model here for free.
Teachable NLP: [Teachable NLP](https://ainize.ai/teachable-nlp)
Tutorial: [Tutorial](https://forum.ainetwork.ai/t/teachable-nlp-how-to-use-teachable-nlp/65?utm_source=community&utm_medium=huggingface&utm_campaign=model&utm_content=teachable%20nlp)
|
{}
|
text-generation
|
ainize/gpt2-rnm-with-spongebob
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
### Model information
Fine tuning data 1: URL
Fine tuning data 2: URL
Base model: e-tony/gpt2-rnm
Epoch: 2
Train runtime: 790.0612 secs
Loss: 2.8569
API page: Ainize
Demo page: End-point
### ===Teachable NLP=== ###
To train a GPT-2 model, write code and require GPU resources, but can easily fine-tune and get an API to use the model here for free.
Teachable NLP: Teachable NLP
Tutorial: Tutorial
|
[
"### Model information\n \n Fine tuning data 1: URL\n Fine tuning data 2: URL\n Base model: e-tony/gpt2-rnm\n Epoch: 2\n Train runtime: 790.0612 secs\n Loss: 2.8569\n\nAPI page: Ainize\n\nDemo page: End-point",
"### ===Teachable NLP=== ###\n\nTo train a GPT-2 model, write code and require GPU resources, but can easily fine-tune and get an API to use the model here for free.\n\nTeachable NLP: Teachable NLP\n\nTutorial: Tutorial"
] |
[
"TAGS\n#transformers #pytorch #jax #gpt2 #text-generation #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n",
"### Model information\n \n Fine tuning data 1: URL\n Fine tuning data 2: URL\n Base model: e-tony/gpt2-rnm\n Epoch: 2\n Train runtime: 790.0612 secs\n Loss: 2.8569\n\nAPI page: Ainize\n\nDemo page: End-point",
"### ===Teachable NLP=== ###\n\nTo train a GPT-2 model, write code and require GPU resources, but can easily fine-tune and get an API to use the model here for free.\n\nTeachable NLP: Teachable NLP\n\nTutorial: Tutorial"
] |
[
50,
62,
57
] |
[
"passage: TAGS\n#transformers #pytorch #jax #gpt2 #text-generation #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n### Model information\n \n Fine tuning data 1: URL\n Fine tuning data 2: URL\n Base model: e-tony/gpt2-rnm\n Epoch: 2\n Train runtime: 790.0612 secs\n Loss: 2.8569\n\nAPI page: Ainize\n\nDemo page: End-point### ===Teachable NLP=== ###\n\nTo train a GPT-2 model, write code and require GPU resources, but can easily fine-tune and get an API to use the model here for free.\n\nTeachable NLP: Teachable NLP\n\nTutorial: Tutorial"
] |
[
-0.12498442083597183,
0.20969218015670776,
-0.002398393815383315,
0.12150934338569641,
0.10377044230699539,
0.07121280580759048,
0.12022189050912857,
0.13554801046848297,
0.05413036048412323,
-0.06288308650255203,
0.09507672488689423,
0.18699583411216736,
0.0339181125164032,
0.1955171674489975,
0.02185356616973877,
-0.3025093972682953,
-0.019247913733124733,
0.09004148095846176,
0.08202976733446121,
0.1003391370177269,
0.09342925250530243,
-0.03527689352631569,
0.06086300313472748,
0.02705412358045578,
-0.08476559072732925,
-0.017255954444408417,
0.01782749965786934,
-0.10178041458129883,
0.13002514839172363,
0.04543401673436165,
0.01953793503344059,
0.021791666746139526,
0.04069986566901207,
-0.1414741426706314,
0.032009728252887726,
0.0037593315355479717,
-0.049598850309848785,
0.12060333788394928,
0.043363213539123535,
-0.0019274622900411487,
0.20553052425384521,
0.05778755620121956,
0.00739554176107049,
0.054508108645677567,
-0.08252524584531784,
-0.15818700194358826,
-0.023069214075803757,
0.08675940334796906,
0.10762834548950195,
0.09464865922927856,
0.014299136586487293,
0.13571901619434357,
-0.08708593249320984,
0.054194118827581406,
0.12847846746444702,
-0.2896645665168762,
-0.04010237753391266,
0.10256926715373993,
-0.08392234146595001,
-0.026244845241308212,
-0.011979624629020691,
0.03651171922683716,
0.010501795448362827,
0.027776796370744705,
0.06263452023267746,
-0.03793160617351532,
-0.12907476723194122,
-0.012059787288308144,
-0.12657366693019867,
-0.026239339262247086,
0.18796661496162415,
0.005873274523764849,
-0.024768006056547165,
-0.08052165806293488,
-0.07805464416742325,
-0.011048784479498863,
-0.058515239506959915,
-0.0009748516604304314,
-0.022538205608725548,
0.04934903234243393,
0.013965725898742676,
-0.1562729775905609,
-0.09168660640716553,
-0.11585298925638199,
-0.017362836748361588,
0.06615782529115677,
0.07136426120996475,
0.045597851276397705,
-0.06269069015979767,
0.18004612624645233,
-0.12552547454833984,
-0.03567217290401459,
-0.04659354314208031,
-0.07529390603303909,
0.029383700340986252,
-0.0009483267203904688,
-0.02548019029200077,
0.0031573346350342035,
0.010208507999777794,
0.1609557569026947,
-0.010407193563878536,
0.006627772934734821,
0.0990784540772438,
0.06024583429098129,
-0.021695394068956375,
0.025317028164863586,
-0.04710984230041504,
-0.05918510630726814,
0.13116644322872162,
-0.03581838682293892,
0.06658539921045303,
-0.02894619107246399,
-0.09430379420518875,
-0.03395935148000717,
0.017772939056158066,
0.062203146517276764,
0.0430942103266716,
0.05410681664943695,
-0.04789010435342789,
-0.0674176961183548,
0.14137984812259674,
-0.0538092777132988,
-0.012292608618736267,
0.00025409937370568514,
-0.06149841472506523,
0.02974841743707657,
0.10504179447889328,
-0.004686404950916767,
-0.10833154618740082,
-0.12471753358840942,
-0.06089579686522484,
-0.017269885167479515,
-0.08564848452806473,
0.0008485247963108122,
0.008397652767598629,
0.014716245234012604,
0.02346254698932171,
-0.15612486004829407,
-0.32966139912605286,
0.04733501002192497,
0.07365041226148605,
-0.025590863078832626,
-0.10917326807975769,
-0.08381922543048859,
0.029237214475870132,
-0.014290928840637207,
-0.05674858018755913,
0.014346071518957615,
-0.04747920110821724,
0.04255732148885727,
0.06087684631347656,
0.08017101883888245,
-0.06119967997074127,
0.03234616667032242,
-0.077950619161129,
-0.030593890696763992,
-0.053386393934488297,
0.10282085090875626,
-0.03887900710105896,
-0.0115967383608222,
-0.061426401138305664,
-0.05053387209773064,
-0.014392065815627575,
0.00828453991562128,
0.058325428515672684,
0.13484229147434235,
-0.08717475086450577,
-0.04283517599105835,
0.1847023367881775,
-0.07413844019174576,
-0.10585365444421768,
0.12195055186748505,
-0.011433765292167664,
0.08982307463884354,
0.1191050186753273,
0.1059441864490509,
0.1433449536561966,
-0.06263446062803268,
0.033283982425928116,
0.09526893496513367,
-0.04684208706021309,
-0.21799400448799133,
0.04127787426114082,
0.07994475215673447,
-0.14360186457633972,
0.06531305611133575,
-0.034037135541439056,
0.1060483381152153,
-0.05629230663180351,
-0.07869377732276917,
-0.016823483631014824,
-0.12735582888126373,
0.08285945653915405,
0.04121987149119377,
0.10641990602016449,
-0.015266523696482182,
-0.12774421274662018,
-0.018572846427559853,
0.17292627692222595,
-0.06795194000005722,
0.015849141404032707,
-0.10499710589647293,
0.12305627763271332,
-0.09265639632940292,
0.02388812229037285,
-0.1088159829378128,
-0.08589427173137665,
-0.006845325231552124,
0.033008135855197906,
0.08154859393835068,
0.07008616626262665,
0.04191502183675766,
0.09484727680683136,
-0.027024466544389725,
-0.019108254462480545,
-0.02259790152311325,
-0.019523920491337776,
-0.08575277030467987,
-0.037315525114536285,
-0.01752619631588459,
-0.02194153144955635,
0.04990150034427643,
-0.1440754234790802,
0.04099144786596298,
-0.028436072170734406,
0.03444506227970123,
-0.004262234084308147,
-0.004178795497864485,
0.03840923681855202,
-0.01084952149540186,
-0.01534615270793438,
-0.027651548385620117,
0.10896144807338715,
0.02836436778306961,
-0.04085153713822365,
-0.01734190434217453,
-0.037141744047403336,
0.06341561675071716,
0.12580154836177826,
-0.11860686540603638,
-0.018963100388646126,
0.020233672112226486,
-0.05927114188671112,
0.022608108818531036,
-0.04290233552455902,
0.05886644124984741,
0.2123778611421585,
0.003675546497106552,
0.120445117354393,
-0.03696431219577789,
-0.01784334145486355,
-0.020188743248581886,
-0.06407888233661652,
0.04709219932556152,
0.06364849209785461,
0.18419182300567627,
-0.02455839514732361,
0.06960224360227585,
0.03453425318002701,
-0.07433837652206421,
0.11671583354473114,
0.07361689954996109,
-0.05201782286167145,
0.01520081888884306,
-0.03409150615334511,
0.007030153181403875,
0.06564037501811981,
-0.10101871937513351,
0.0255669467151165,
0.05790793150663376,
0.0024939319118857384,
0.11480626463890076,
-0.16861538589000702,
-0.02601187489926815,
-0.011331802234053612,
-0.05454784259200096,
0.0009841859573498368,
0.048194725066423416,
-0.0822635367512703,
0.07548011839389801,
-0.033102307468652725,
-0.03887590393424034,
0.06607811152935028,
0.02486157976090908,
-0.08039414137601852,
0.15505538880825043,
-0.04420708864927292,
-0.26368144154548645,
-0.09706156700849533,
-0.015302371233701706,
-0.05760355293750763,
0.03655700013041496,
0.07679231464862823,
-0.0709579810500145,
-0.07195001095533371,
-0.0372973270714283,
-0.020534629002213478,
0.0618075430393219,
-0.042039673775434494,
0.014183640480041504,
0.005702185910195112,
-0.028512317687273026,
-0.13151074945926666,
-0.0131751773878932,
-0.030258508399128914,
-0.13066433370113373,
0.09720469266176224,
-0.0636073425412178,
0.024080706760287285,
0.17674432694911957,
0.015789221972227097,
0.040504779666662216,
-0.00506019638851285,
0.22210721671581268,
-0.04742040857672691,
0.049875158816576004,
0.2572473883628845,
0.10114170610904694,
0.05225147306919098,
-0.023245805874466896,
0.03931272402405739,
-0.11175934225320816,
0.03749578446149826,
-0.012981769628822803,
-0.07080361247062683,
-0.18171636760234833,
-0.11273243278265,
-0.08099633455276489,
0.03169647604227066,
0.058304563164711,
0.05928526818752289,
0.0552108958363533,
0.1301840990781784,
0.0039827520959079266,
0.08747889846563339,
0.0047703697346150875,
0.07063373923301697,
0.16308166086673737,
-0.02509627491235733,
0.07150724530220032,
-0.0806998461484909,
-0.018720285966992378,
0.1296486258506775,
0.11105241626501083,
0.12328531593084335,
-0.06288363039493561,
0.08540290594100952,
0.013972895219922066,
0.13389164209365845,
0.06329793483018875,
0.08588734269142151,
-0.047909364104270935,
0.010394987650215626,
-0.040112730115652084,
-0.008962228894233704,
-0.11096681654453278,
0.04757574573159218,
-0.04296567663550377,
-0.09349971264600754,
-0.07365713268518448,
0.020511066541075706,
0.039133522659540176,
0.06892763823270798,
0.07592707872390747,
-0.3048999607563019,
-0.09443900734186172,
0.030519764870405197,
-0.017975913360714912,
-0.03802207484841347,
0.06102867051959038,
-0.0507408082485199,
-0.11629288643598557,
0.00048373587196692824,
-0.03226446732878685,
0.11232058703899384,
-0.15688225626945496,
-0.015219035558402538,
-0.021191248670220375,
0.10819076001644135,
0.033966660499572754,
0.1452753245830536,
-0.17922645807266235,
0.1062379777431488,
0.0010353804100304842,
0.11259599030017853,
-0.08359047770500183,
-0.0014566679019480944,
0.07110787183046341,
0.0706634446978569,
0.12441691756248474,
-0.007882675155997276,
-0.009004926308989525,
-0.0954490676522255,
-0.1412872076034546,
0.04970201104879379,
-0.07589045912027359,
-0.027789952233433723,
0.0032295621931552887,
-0.02917034924030304,
-0.011868063360452652,
-0.011198863387107849,
-0.06293611973524094,
-0.16317613422870636,
-0.12816821038722992,
0.04824117198586464,
0.09135208278894424,
0.011363635770976543,
-0.030981432646512985,
-0.068049356341362,
-0.044032178819179535,
0.23290298879146576,
0.028160743415355682,
-0.1337844580411911,
-0.1283842772245407,
0.021819258108735085,
0.03345043584704399,
-0.109274722635746,
0.029441945254802704,
0.023779455572366714,
0.08720983564853668,
-0.034966930747032166,
-0.12042924016714096,
0.0612337552011013,
-0.10148802399635315,
-0.07297541946172714,
0.015640104189515114,
0.05724592134356499,
0.04714226350188255,
0.03136675804853439,
0.04674697294831276,
-0.033431340008974075,
-0.05298694968223572,
-0.13630512356758118,
-0.024601027369499207,
0.12125525623559952,
-0.025726551190018654,
-0.02982916124165058,
-0.0393400713801384,
0.016773883253335953,
-0.02095353789627552,
0.026382634416222572,
0.22420811653137207,
0.19188375771045685,
-0.048893142491579056,
0.09329766035079956,
0.15566490590572357,
-0.04505334421992302,
-0.2592645287513733,
-0.05613866075873375,
-0.054709434509277344,
-0.024114973843097687,
-0.049794550985097885,
-0.2478247582912445,
0.10859128087759018,
0.03156787157058716,
-0.06441216170787811,
0.10262639820575714,
-0.2265574038028717,
-0.08959931880235672,
0.1645525097846985,
0.14623130857944489,
0.2450825721025467,
-0.05318981036543846,
-0.0073128510266542435,
-0.025247221812605858,
-0.17283213138580322,
0.20816665887832642,
-0.08742629736661911,
0.12795978784561157,
-0.06518323719501495,
0.11725964397192001,
0.0032105857972055674,
-0.041916169226169586,
0.04739672690629959,
-0.015068596228957176,
0.01834847219288349,
-0.05026565492153168,
0.1116001307964325,
0.03677763044834137,
-0.05258401483297348,
0.1403970718383789,
-0.007892802357673645,
0.06627804040908813,
-0.13640150427818298,
-0.1039978414773941,
-0.039603620767593384,
0.05454292893409729,
0.02919054590165615,
-0.1139780655503273,
0.011316802352666855,
0.023748427629470825,
0.0017109456239268184,
0.037237271666526794,
0.015375105664134026,
0.023648295551538467,
-0.012998376041650772,
0.06693243235349655,
0.11196138709783554,
-0.11213956028223038,
-0.08248703181743622,
-0.047807205468416214,
-0.03401222825050354,
0.07041425257921219,
-0.17155179381370544,
0.014732854440808296,
0.0751923993229866,
-0.012872696854174137,
0.03836192935705185,
0.06427790969610214,
-0.045351143926382065,
0.06622163951396942,
0.06925634294748306,
-0.14829736948013306,
-0.09267647564411163,
-0.09100734442472458,
-0.05032380670309067,
0.013746672309935093,
0.10260424762964249,
0.11993374675512314,
-0.08240965753793716,
-0.05642475560307503,
-0.017192788422107697,
0.018732815980911255,
-0.05840322747826576,
0.08375123143196106,
0.07951023429632187,
-0.0056824516505002975,
-0.08349302411079407,
0.08302091062068939,
0.011665446683764458,
-0.014477338641881943,
0.031863726675510406,
0.12241481989622116,
-0.11189458519220352,
-0.15595117211341858,
-0.04048886522650719,
0.07594656199216843,
-0.15266312658786774,
-0.05826786905527115,
-0.08594762533903122,
0.016324002295732498,
0.05393876135349274,
-0.08722920715808868,
0.07011999934911728,
-0.018286673352122307,
-0.05951301008462906,
0.011522790417075157,
-0.09986382722854614,
0.036344461143016815,
0.0645674467086792,
0.030150288715958595,
-0.10233311355113983,
0.048865530639886856,
0.019177444279193878,
0.10867650806903839,
-0.06368756294250488,
-0.037483833730220795,
-0.07782764732837677,
0.03873709589242935,
-0.11208464205265045,
-0.013919849880039692,
-0.1091308668255806,
0.04340476542711258,
-0.04065709188580513,
-0.01058181095868349,
-0.037820883095264435,
0.0733250081539154,
-0.09246247261762619,
0.021715259179472923,
-0.008344006724655628,
0.01199198979884386,
-0.09753721952438354,
0.03253699839115143,
0.0205861683934927,
-0.03432980924844742,
0.13794772326946259,
0.0017817418556660414,
-0.026548203080892563,
0.10909151285886765,
-0.108361154794693,
-0.00070787756703794,
0.01092476211488247,
0.050559986382722855,
0.04433932527899742,
-0.04046995937824249,
0.06612495332956314,
0.031259097158908844,
0.03368004038929939,
-0.02283337339758873,
0.09567169845104218,
-0.1001443937420845,
-0.01423618383705616,
-0.0522899366915226,
-0.0722380205988884,
-0.06504957377910614,
0.02659366838634014,
0.044297151267528534,
0.055000189691782,
0.09972420334815979,
-0.059455644339323044,
0.05890956521034241,
-0.11219898611307144,
0.014158081263303757,
0.009250449948012829,
-0.05797195062041283,
-0.04695428907871246,
-0.05973544344305992,
0.05130818113684654,
-0.025485318154096603,
0.16019265353679657,
0.0022892882116138935,
-0.02395383082330227,
-0.037635330110788345,
-0.020100833848118782,
0.07317640632390976,
-0.037869665771722794,
0.199877068400383,
0.05693088471889496,
0.03338779881596565,
-0.00989423505961895,
0.10888893902301788,
0.06672077625989914,
0.03300590440630913,
0.12161137163639069,
-0.013407177291810513,
-0.005695682484656572,
0.1202378049492836,
-0.05299680680036545,
-0.021811500191688538,
-0.18511822819709778,
-0.015388883650302887,
-0.03009146824479103,
0.038158219307661057,
-0.06438156217336655,
0.024919135496020317,
0.13971883058547974,
-0.07024365663528442,
0.0184809360653162,
0.042983345687389374,
-0.08570661395788193,
-0.16349007189273834,
-0.2655441462993622,
-0.07230968773365021,
-0.145964577794075,
0.029917120933532715,
-0.09208060055971146,
-0.06534426659345627,
0.023431941866874695,
0.03776923567056656,
-0.04392416402697563,
0.0863289088010788,
0.02623762935400009,
0.02555779740214348,
0.027211103588342667,
-0.041846953332424164,
-0.03902256861329079,
-0.03629045560956001,
-0.016349555924534798,
-0.06118420138955116,
0.026769455522298813,
0.06919547915458679,
-0.0036183688789606094,
-0.06752555817365646,
0.07125012576580048,
0.01340754795819521,
-0.03746981546282768,
-0.09262026846408844,
0.03780345991253853,
0.03514588251709938,
0.09825270622968674,
0.022055311128497124,
-0.049972616136074066,
0.03933890908956528,
0.20507417619228363,
-0.048103347420692444,
-0.08967503160238266,
-0.11858199536800385,
0.2990761399269104,
-0.0010672658681869507,
-0.015923382714390755,
0.026761464774608612,
-0.030602695420384407,
0.012765600346028805,
0.30093902349472046,
0.23772813379764557,
-0.06412672251462936,
-0.009127006866037846,
0.017194418236613274,
-0.002026948379352689,
-0.013256986625492573,
0.1664925217628479,
0.07382617145776749,
0.2287025898694992,
-0.09185412526130676,
-0.04765694588422775,
-0.008770313113927841,
-0.02850969508290291,
-0.05622714012861252,
0.07990091294050217,
0.01089893002063036,
-0.0014035683125257492,
-0.02954564243555069,
0.07473177462816238,
-0.18795236945152283,
0.09523425996303558,
-0.05368971452116966,
-0.005804263986647129,
-0.11068486422300339,
0.009446900337934494,
-0.029863713309168816,
-0.015412180684506893,
0.08740535378456116,
-0.03977559134364128,
0.018666964024305344,
0.1343824714422226,
0.0028258769307285547,
-0.18391665816307068,
-0.010130233131349087,
0.06812958419322968,
0.07476019114255905,
0.24759361147880554,
-0.029666416347026825,
0.07216818630695343,
0.09388969093561172,
-0.029913458973169327,
-0.1420985758304596,
0.11048822849988937,
-0.027340354397892952,
-0.04148832708597183,
0.056192364543676376,
0.008913393132388592,
-0.04819865524768829,
0.016462821513414383,
0.02860722690820694,
-0.08252791315317154,
-0.020835954695940018,
-0.011930130422115326,
0.00045399783994071186,
-0.04517076909542084,
0.023006992414593697,
-0.08269396424293518,
0.11159008741378784,
0.04848963022232056,
-0.05654281750321388,
-0.051470834761857986,
-0.09780862927436829,
0.10738617926836014,
0.0006346408044919372,
-0.020650438964366913,
-0.022403022274374962,
-0.12519940733909607,
-0.04636464640498161,
0.033723555505275726,
0.016286063939332962,
-0.1841776967048645,
-0.03195745497941971,
-0.09457018226385117,
-0.03721490874886513,
-0.12793509662151337,
0.09811519831418991,
0.1293574422597885,
0.0109232347458601,
-0.029214035719633102,
-0.02015531249344349,
-0.09208953380584717,
0.03429824486374855,
-0.14081770181655884,
-0.12590590119361877
] |
null | null |
transformers
|
### Model information
Fine tuning data: https://www.kaggle.com/mikhailgaerlan/spongebob-squarepants-completed-transcripts
License: CC-BY-SA
Base model: gpt-2 large
Epoch: 50
Train runtime: 14723.0716 secs
Loss: 0.0268
API page: [Ainize](https://ainize.ai/fpem123/GPT2-Spongebob?branch=master)
Demo page: [End-point](https://master-gpt2-spongebob-fpem123.endpoint.ainize.ai/)
### ===Teachable NLP=== ###
To train a GPT-2 model, write code and require GPU resources, but can easily fine-tune and get an API to use the model here for free.
Teachable NLP: [Teachable NLP](https://ainize.ai/teachable-nlp)
Tutorial: [Tutorial](https://forum.ainetwork.ai/t/teachable-nlp-how-to-use-teachable-nlp/65?utm_source=community&utm_medium=huggingface&utm_campaign=model&utm_content=teachable%20nlp)
|
{}
|
text-generation
|
ainize/gpt2-spongebob-script-large
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"autotrain_compatible",
"endpoints_compatible",
"has_space",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #autotrain_compatible #endpoints_compatible #has_space #text-generation-inference #region-us
|
### Model information
Fine tuning data: URL
License: CC-BY-SA
Base model: gpt-2 large
Epoch: 50
Train runtime: 14723.0716 secs
Loss: 0.0268
API page: Ainize
Demo page: End-point
### ===Teachable NLP=== ###
To train a GPT-2 model, write code and require GPU resources, but can easily fine-tune and get an API to use the model here for free.
Teachable NLP: Teachable NLP
Tutorial: Tutorial
|
[
"### Model information\n \n Fine tuning data: URL\n License: CC-BY-SA\n Base model: gpt-2 large \n Epoch: 50\n Train runtime: 14723.0716 secs\n Loss: 0.0268\n \n\nAPI page: Ainize\n\nDemo page: End-point",
"### ===Teachable NLP=== ###\n\nTo train a GPT-2 model, write code and require GPU resources, but can easily fine-tune and get an API to use the model here for free.\n\nTeachable NLP: Teachable NLP\n\nTutorial: Tutorial"
] |
[
"TAGS\n#transformers #pytorch #jax #gpt2 #text-generation #autotrain_compatible #endpoints_compatible #has_space #text-generation-inference #region-us \n",
"### Model information\n \n Fine tuning data: URL\n License: CC-BY-SA\n Base model: gpt-2 large \n Epoch: 50\n Train runtime: 14723.0716 secs\n Loss: 0.0268\n \n\nAPI page: Ainize\n\nDemo page: End-point",
"### ===Teachable NLP=== ###\n\nTo train a GPT-2 model, write code and require GPU resources, but can easily fine-tune and get an API to use the model here for free.\n\nTeachable NLP: Teachable NLP\n\nTutorial: Tutorial"
] |
[
54,
56,
57
] |
[
"passage: TAGS\n#transformers #pytorch #jax #gpt2 #text-generation #autotrain_compatible #endpoints_compatible #has_space #text-generation-inference #region-us \n### Model information\n \n Fine tuning data: URL\n License: CC-BY-SA\n Base model: gpt-2 large \n Epoch: 50\n Train runtime: 14723.0716 secs\n Loss: 0.0268\n \n\nAPI page: Ainize\n\nDemo page: End-point### ===Teachable NLP=== ###\n\nTo train a GPT-2 model, write code and require GPU resources, but can easily fine-tune and get an API to use the model here for free.\n\nTeachable NLP: Teachable NLP\n\nTutorial: Tutorial"
] |
[
-0.1378629058599472,
0.2038252204656601,
-0.0008680460159666836,
0.12060229480266571,
0.10165955871343613,
0.05870766192674637,
0.11824207007884979,
0.12358932197093964,
0.049751635640859604,
-0.040526267141103745,
0.13054457306861877,
0.16446717083454132,
0.033413276076316833,
0.19063635170459747,
0.02145451121032238,
-0.2948484420776367,
-0.004974799230694771,
0.09033903479576111,
0.057892631739377975,
0.10716024041175842,
0.0772363543510437,
-0.03640088811516762,
0.07547032833099365,
0.026127299293875694,
-0.11065492033958435,
-0.04321032017469406,
0.0259893499314785,
-0.11243601143360138,
0.11977322399616241,
0.02689126506447792,
0.011735539883375168,
0.024420611560344696,
0.04237738251686096,
-0.10473690927028656,
0.027947448194026947,
0.017174696549773216,
-0.03895621746778488,
0.13130439817905426,
0.04506296291947365,
0.026514288038015366,
0.24181875586509705,
0.026839373633265495,
-0.013173779472708702,
0.04414147883653641,
-0.10449407249689102,
-0.16264477372169495,
-0.029078062623739243,
0.0897546038031578,
0.09419617056846619,
0.08047101646661758,
0.018953587859869003,
0.11696126312017441,
-0.08720646053552628,
0.04752802103757858,
0.11637191474437714,
-0.32377856969833374,
-0.05353377386927605,
0.1537964791059494,
-0.05104774981737137,
-0.029430579394102097,
-0.0010382244363427162,
0.0637299194931984,
0.01628510095179081,
0.02162809483706951,
0.0826580747961998,
-0.04113087058067322,
-0.09841721504926682,
0.007254545111209154,
-0.1294182986021042,
-0.05052148178219795,
0.19881722331047058,
-0.008353887125849724,
-0.010611958801746368,
-0.09991201758384705,
-0.07263954728841782,
-0.02307933010160923,
-0.0688752830028534,
0.020227838307619095,
-0.024823550134897232,
0.05550317093729973,
0.011484644375741482,
-0.13515323400497437,
-0.08139078319072723,
-0.11742055416107178,
-0.024905975908041,
0.052366774529218674,
0.05642350763082504,
0.06530690938234329,
-0.05551725998520851,
0.1696510910987854,
-0.1553695946931839,
-0.036240946501493454,
-0.0421670563519001,
-0.09455756843090057,
0.03461138531565666,
0.008564581163227558,
-0.03688687086105347,
0.031324535608291626,
0.011186927556991577,
0.1212657019495964,
-0.01849243976175785,
-0.010035639628767967,
0.1047549620270729,
0.07442411035299301,
-0.026408379897475243,
0.04077725484967232,
-0.08135727792978287,
-0.057463858276605606,
0.123702272772789,
-0.02764405868947506,
0.06058061122894287,
-0.03433552384376526,
-0.10312855988740921,
-0.06222490593791008,
-0.006013966165482998,
0.05461792275309563,
0.0485173799097538,
0.08073757588863373,
-0.023532206192612648,
-0.051555901765823364,
0.1470097154378891,
-0.03775011748075485,
0.00465041771531105,
-0.0029797623865306377,
-0.04161415621638298,
0.030617043375968933,
0.10936237871646881,
-0.02268253080546856,
-0.0949583649635315,
-0.1356387883424759,
-0.06780678033828735,
-0.018484599888324738,
-0.10866200923919678,
-0.005410592071712017,
0.006800562608987093,
0.03637386113405228,
0.015854481607675552,
-0.1512114256620407,
-0.32612091302871704,
0.05190745368599892,
0.08112993836402893,
-0.021264487877488136,
-0.11679600924253464,
-0.046940091997385025,
0.019957222044467926,
-0.004060983192175627,
-0.05768861249089241,
0.03367731720209122,
-0.04808729887008667,
0.05166001617908478,
0.049996111541986465,
0.10059285908937454,
-0.07182478904724121,
0.03969135135412216,
-0.08233723044395447,
-0.001260188058950007,
-0.0706833004951477,
0.1087130606174469,
-0.02667609043419361,
-0.017993591725826263,
-0.05225837230682373,
-0.0754600241780281,
-0.02274518646299839,
0.013102047145366669,
0.06032036989927292,
0.14034093916416168,
-0.0943712592124939,
-0.03561214730143547,
0.18918262422084808,
-0.0605938620865345,
-0.10995184630155563,
0.11815884709358215,
-0.007282312493771315,
0.07448776066303253,
0.1021985337138176,
0.11009053885936737,
0.13852663338184357,
-0.06169707328081131,
0.03115653619170189,
0.11635006219148636,
-0.04845312237739563,
-0.24286291003227234,
0.04457497224211693,
0.09728868305683136,
-0.17733170092105865,
0.07782556116580963,
-0.07436326891183853,
0.11799080669879913,
-0.053280752152204514,
-0.07806013524532318,
-0.03560172766447067,
-0.11948273330926895,
0.07967609167098999,
0.04804383963346481,
0.10002214461565018,
-0.013725263997912407,
-0.12228406965732574,
-0.0035884249955415726,
0.15500888228416443,
-0.06994502991437912,
0.0035462509840726852,
-0.08569294214248657,
0.14146806299686432,
-0.1116664856672287,
0.03399020805954933,
-0.11325810104608536,
-0.04314736649394035,
-0.021252723410725594,
0.05204697698354721,
0.07753361761569977,
0.09820860624313354,
0.04533813148736954,
0.08565642684698105,
-0.024714209139347076,
0.01016104593873024,
-0.0031242265831679106,
-0.018233735114336014,
-0.09127549827098846,
-0.021775590255856514,
-0.031466949731111526,
-0.013305597938597202,
0.06641014665365219,
-0.15630966424942017,
0.047466643154621124,
-0.04503175616264343,
-0.004338799975812435,
-0.026700176298618317,
0.00020089429744984955,
0.047636862844228745,
0.010138226673007011,
-0.009973977692425251,
-0.025099044665694237,
0.11916903406381607,
0.03764447942376137,
-0.01800442859530449,
0.006825325544923544,
-0.06270244717597961,
0.0669761374592781,
0.14756347239017487,
-0.11415170878171921,
-0.01762314885854721,
0.00951786432415247,
-0.04881835728883743,
0.0237539354711771,
-0.03706110641360283,
0.06212415173649788,
0.1998620182275772,
-0.0052774385549128056,
0.12375693768262863,
-0.0487808883190155,
-0.0021158745512366295,
-0.012723464518785477,
-0.06496778875589371,
0.03635174036026001,
0.0596766397356987,
0.1884174644947052,
0.008743400685489178,
0.08743522316217422,
0.08024011552333832,
-0.0842951312661171,
0.11013363301753998,
0.06231587752699852,
-0.051529865711927414,
0.011230154894292355,
-0.049943216145038605,
0.011357498355209827,
0.06606026738882065,
-0.09214091300964355,
0.005123190116137266,
0.07008100301027298,
-0.0159013532102108,
0.10830650478601456,
-0.1602116972208023,
-0.032945796847343445,
-0.011578530073165894,
-0.052387990057468414,
-0.016273317858576775,
0.06022835522890091,
-0.06694409996271133,
0.08079862594604492,
-0.03177382051944733,
-0.031696222722530365,
0.0619080550968647,
0.038705650717020035,
-0.06440042704343796,
0.14651142060756683,
-0.035043541342020035,
-0.2634434700012207,
-0.10475820302963257,
0.005611277185380459,
-0.07226386666297913,
0.03576219081878662,
0.04969032108783722,
-0.03969322517514229,
-0.05891808494925499,
-0.04708588495850563,
-0.044096626341342926,
0.04924013465642929,
-0.028475113213062286,
0.012113010510802269,
0.008234096691012383,
-0.021893886849284172,
-0.12451165169477463,
-0.012090848758816719,
-0.013701423071324825,
-0.13901880383491516,
0.10642940551042557,
-0.05735774710774422,
0.01960168220102787,
0.17730483412742615,
0.0017416663467884064,
0.04266323149204254,
0.0016503530787304044,
0.20744886994361877,
-0.04385897517204285,
0.03687756881117821,
0.26436957716941833,
0.124518483877182,
0.03709236904978752,
-0.020764928311109543,
0.038113947957754135,
-0.10208964347839355,
0.009218691848218441,
-0.04586879909038544,
-0.09396052360534668,
-0.1461416631937027,
-0.12486899644136429,
-0.10373343527317047,
0.057340800762176514,
0.0738096609711647,
0.04654708877205849,
0.037428081035614014,
0.1356627494096756,
0.006454478949308395,
0.1082712858915329,
-0.024288581684231758,
0.06422152370214462,
0.18048083782196045,
-0.03310957923531532,
0.08733303844928741,
-0.08587902039289474,
-0.009286472573876381,
0.12031038850545883,
0.09457998722791672,
0.10325760394334793,
-0.07462480664253235,
0.06284424662590027,
0.013331815600395203,
0.15582995116710663,
0.08352982997894287,
0.09191524237394333,
-0.04706104099750519,
0.0037119602784514427,
-0.032731134444475174,
-0.020566822960972786,
-0.12835437059402466,
0.043747905641794205,
-0.0380248948931694,
-0.09853964298963547,
-0.06196127086877823,
-0.010778430849313736,
0.03607840836048126,
0.07158535718917847,
0.08273380249738693,
-0.3295469880104065,
-0.09623915702104568,
0.035425275564193726,
0.007690450642257929,
-0.06231210380792618,
0.059722766280174255,
-0.01935790851712227,
-0.11050619184970856,
-0.0006642368971370161,
-0.03577670454978943,
0.10975214838981628,
-0.1361650675535202,
-0.002804192015901208,
-0.021173246204853058,
0.10254037380218506,
0.029658833518624306,
0.14931410551071167,
-0.1824365109205246,
0.11460264027118683,
0.0036041690036654472,
0.07896405458450317,
-0.09836312383413315,
-0.0032612369395792484,
0.06722276657819748,
0.10400597006082535,
0.10752607136964798,
0.0008363124215975404,
-0.04264795407652855,
-0.09019546210765839,
-0.14114899933338165,
0.041412487626075745,
-0.09943840652704239,
-0.012201790697872639,
-0.007866619154810905,
-0.027203932404518127,
-0.009947793558239937,
-0.015632493421435356,
-0.033696725964546204,
-0.12579108774662018,
-0.12702900171279907,
0.051896873861551285,
0.1140633076429367,
0.011675410903990269,
-0.032752882689237595,
-0.05916736274957657,
-0.08734212815761566,
0.23814934492111206,
0.04880253225564957,
-0.12185986340045929,
-0.12021878361701965,
0.01621098630130291,
0.04373447969555855,
-0.0947447270154953,
0.03454570099711418,
0.006540076807141304,
0.07259297370910645,
-0.04083741828799248,
-0.13253752887248993,
0.06673749536275864,
-0.09520384669303894,
-0.057161420583724976,
0.029689587652683258,
0.04190589860081673,
0.012022999115288258,
0.03219500556588173,
0.046575479209423065,
-0.010769473388791084,
-0.07621534913778305,
-0.1432799994945526,
-0.03291937708854675,
0.11114124208688736,
0.007860318757593632,
-0.04846733435988426,
-0.02938014268875122,
0.014973466284573078,
0.02497761882841587,
0.039556220173835754,
0.20234976708889008,
0.17541086673736572,
-0.08263202011585236,
0.0889989510178566,
0.1359102427959442,
-0.026515867561101913,
-0.28306490182876587,
-0.04694884642958641,
-0.06136097013950348,
-0.005781711079180241,
-0.046619657427072525,
-0.2288627326488495,
0.0934719666838646,
0.048224225640296936,
-0.06547760963439941,
0.10257642716169357,
-0.23990550637245178,
-0.07700753211975098,
0.1589621752500534,
0.11031746864318848,
0.24723060429096222,
-0.07600162923336029,
0.003085080301389098,
-0.02568932995200157,
-0.127233624458313,
0.19382046163082123,
-0.10109293460845947,
0.13376249372959137,
-0.07633040100336075,
0.12302396446466446,
0.013002785854041576,
-0.05357692763209343,
0.07896195352077484,
-0.038458045572042465,
0.030923647806048393,
-0.060091160237789154,
0.11216577887535095,
0.07029561698436737,
-0.062228012830019,
0.14950047433376312,
-0.05452822521328926,
0.07025276869535446,
-0.13584740459918976,
-0.08696845918893814,
-0.05276259779930115,
0.0629177838563919,
0.034742824733257294,
-0.10448145121335983,
0.0026717297732830048,
0.019137833267450333,
-0.03127439692616463,
0.028675511479377747,
-0.007660801988095045,
0.011446324177086353,
-0.01084222923964262,
0.09245012700557709,
0.10773473978042603,
-0.09364914149045944,
-0.06943852454423904,
-0.02991439588367939,
-0.02516886405646801,
0.08439984917640686,
-0.21754370629787445,
0.00047814100980758667,
0.05907237529754639,
-0.009411710314452648,
0.019383203238248825,
0.05781154707074165,
-0.020994216203689575,
0.05225824564695358,
0.0703214704990387,
-0.16125571727752686,
-0.07885990291833878,
-0.07646758109331131,
-0.021094517782330513,
-0.001334700151346624,
0.08489080518484116,
0.0996757373213768,
-0.10799574106931686,
-0.048326004296541214,
-0.010422305203974247,
0.014934821985661983,
-0.06757020950317383,
0.06009616330265999,
0.06958010792732239,
-0.0027523362077772617,
-0.08512663841247559,
0.05451330915093422,
0.01442930568009615,
-0.007060351315885782,
0.020151939243078232,
0.10511389374732971,
-0.1030614823102951,
-0.16121548414230347,
-0.03949396312236786,
0.03517131507396698,
-0.11429828405380249,
-0.04913204535841942,
-0.045064717531204224,
-0.006218852940946817,
0.06305786222219467,
-0.1067899614572525,
0.06850646436214447,
-0.01360334549099207,
-0.05971220135688782,
-0.0073141553439199924,
-0.10747047513723373,
0.02406373620033264,
0.022490041330456734,
0.03220631554722786,
-0.0885990560054779,
0.076416976749897,
0.0033702407963573933,
0.10955648869276047,
-0.06711208075284958,
-0.03226231783628464,
-0.08456917852163315,
0.0367911234498024,
-0.11928259581327438,
-0.03043530322611332,
-0.12941467761993408,
0.035402338951826096,
-0.04785328358411789,
0.0009204682428389788,
-0.03774009644985199,
0.05603233724832535,
-0.0932004451751709,
0.034624893218278885,
-0.025165069848299026,
0.028517544269561768,
-0.08061188459396362,
0.032842475920915604,
0.024464894086122513,
-0.030140139162540436,
0.12036912888288498,
-0.0049890480004251,
-0.02765749953687191,
0.08100047707557678,
-0.11216859519481659,
-0.0028234259225428104,
0.005228849593549967,
0.052434056997299194,
0.039939019829034805,
-0.026363737881183624,
0.07733170688152313,
0.039438724517822266,
0.03708077222108841,
-0.00815215427428484,
0.07451821863651276,
-0.08964258432388306,
-0.001767061767168343,
-0.030730506405234337,
-0.05310023948550224,
-0.05257545784115791,
0.03236435353755951,
0.05345438793301582,
0.06419725716114044,
0.08219922333955765,
-0.0537729486823082,
0.03790136054158211,
-0.12135758250951767,
0.005656549707055092,
-0.011371894739568233,
-0.08408546447753906,
-0.025720151141285896,
-0.06862716376781464,
0.06334008276462555,
-0.0023056981153786182,
0.2127940058708191,
0.04638350009918213,
-0.07030774652957916,
-0.04656697064638138,
0.008985425345599651,
0.10690045356750488,
-0.03278343379497528,
0.2298673689365387,
0.06495997309684753,
0.03925361484289169,
-0.0011195170227438211,
0.13387320935726166,
0.062180884182453156,
0.07094427198171616,
0.09524301439523697,
-0.006532676983624697,
-0.025600876659154892,
0.09736257791519165,
-0.05699291080236435,
-0.02807079441845417,
-0.1625269055366516,
-0.009923204779624939,
-0.06274767220020294,
0.0623583048582077,
-0.09662892669439316,
0.02691202238202095,
0.1285991668701172,
-0.08202783763408661,
0.027350548654794693,
0.029376063495874405,
-0.08609414100646973,
-0.15076221525669098,
-0.27753904461860657,
-0.05781007185578346,
-0.15412524342536926,
0.016766445711255074,
-0.07819778472185135,
-0.057777490466833115,
0.05446816235780716,
0.019727328792214394,
-0.04564306139945984,
0.0743965357542038,
0.016497425734996796,
0.02670028991997242,
0.03844643756747246,
-0.04017492011189461,
-0.026033541187644005,
-0.054306309670209885,
-0.02356024831533432,
-0.06340824067592621,
0.002110981848090887,
0.08068876713514328,
0.004464333411306143,
-0.05473465099930763,
0.07692894339561462,
0.004431088920682669,
-0.035219572484493256,
-0.10054612159729004,
0.02858518436551094,
0.04592963308095932,
0.09826211631298065,
0.010282288305461407,
-0.062385305762290955,
0.037079550325870514,
0.18344497680664062,
-0.053834669291973114,
-0.08376596122980118,
-0.1081281304359436,
0.28618234395980835,
-0.036889076232910156,
-0.03356567397713661,
0.01007677149027586,
-0.009240871295332909,
0.025157073512673378,
0.31070613861083984,
0.2735140919685364,
-0.06977580487728119,
0.0006465587648563087,
-0.008178639225661755,
-0.007629820145666599,
-0.017422577366232872,
0.1846383959054947,
0.06624425202608109,
0.20494607090950012,
-0.09263760596513748,
-0.05262881517410278,
-0.018291151151061058,
-0.03108564205467701,
-0.08040082454681396,
0.06476535648107529,
-0.008501479402184486,
-0.036910220980644226,
-0.01795976050198078,
0.06638303399085999,
-0.172578364610672,
0.12451571226119995,
-0.025914601981639862,
-0.004606209229677916,
-0.10499841719865799,
0.013687501661479473,
-0.0495208278298378,
-0.011067601852118969,
0.07891017943620682,
-0.05388788878917694,
0.04560836777091026,
0.10878680646419525,
-0.011040924116969109,
-0.16465403139591217,
-0.004647702444344759,
0.07268957048654556,
0.11829061806201935,
0.22304295003414154,
-0.024752933531999588,
0.055490948259830475,
0.08988980203866959,
-0.024645227938890457,
-0.15072712302207947,
0.10115647315979004,
-0.027899742126464844,
-0.052308592945337296,
0.049980852752923965,
-0.02732306905090809,
-0.05427178367972374,
-0.0010845692595466971,
0.024331331253051758,
-0.05221278965473175,
-0.014635743573307991,
-0.04114032909274101,
0.01437658816576004,
-0.057837940752506256,
0.005222373176366091,
-0.0584414042532444,
0.11637807637453079,
0.05925540253520012,
-0.055957160890102386,
-0.0738348588347435,
-0.10106317698955536,
0.08252622187137604,
-0.017831608653068542,
-0.033266641199588776,
-0.03391774371266365,
-0.10095479339361191,
-0.05412621796131134,
0.041117675602436066,
0.02621982805430889,
-0.17450794577598572,
-0.02067631296813488,
-0.08635053038597107,
-0.038282573223114014,
-0.11547408252954483,
0.09313409775495529,
0.1267702281475067,
-0.008120681159198284,
-0.025235243141651154,
-0.00755832064896822,
-0.07361823320388794,
0.03994293138384819,
-0.12907317280769348,
-0.12392633408308029
] |
null | null |
transformers
|
# bert-base for QA
**Code:** See [Ainize Workspace](https://link.ainize.ai/3FjvBVn)
**klue-bert-base-mrc DEMO**: [Ainize DEMO](https://main-klue-mrc-bert-scy6500.endpoint.ainize.ai/)
**klue-bert-base-mrc API**: [Ainize API](https://ainize.ai/scy6500/KLUE-MRC-BERT?branch=main)
## Overview
**Language model:** klue/bert-base
**Language:** Korean
**Downstream-task:** Extractive QA
**Training data:** KLUE-MRC
**Eval data:** KLUE-MRC
## Usage
### In Transformers
```python
from transformers import AutoModelForQuestionAnswering, AutoTokenizer
tokenizer = AutoTokenizer.from_pretrained("ainize/klue-bert-base-mrc")
model = AutoModelForQuestionAnswering.from_pretrained("ainize/klue-bert-base-mrc")
context = "your context"
question = "your question"
encodings = tokenizer(context, question, max_length=512, truncation=True,
padding="max_length", return_token_type_ids=False)
encodings = {key: torch.tensor([val]) for key, val in encodings.items()}
input_ids = encodings["input_ids"]
attention_mask = encodings["attention_mask"]
pred = model(input_ids, attention_mask=attention_mask)
start_logits, end_logits = pred.start_logits, pred.end_logits
token_start_index, token_end_index = start_logits.argmax(dim=-1), end_logits.argmax(dim=-1)
pred_ids = input_ids[0][token_start_index: token_end_index + 1]
prediction = tokenizer.decode(pred_ids)
```
## About us
[Teachable NLP](https://ainize.ai/teachable-nlp) - Train NLP models with your own text without writing any code
[Ainize](https://ainize.ai/) - Deploy ML project using free gpu
|
{"language": "ko", "license": "cc-by-sa-4.0", "tags": ["bert", "mrc"], "datasets": ["klue"]}
|
question-answering
|
ainize/klue-bert-base-mrc
|
[
"transformers",
"pytorch",
"bert",
"question-answering",
"mrc",
"ko",
"dataset:klue",
"license:cc-by-sa-4.0",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"ko"
] |
TAGS
#transformers #pytorch #bert #question-answering #mrc #ko #dataset-klue #license-cc-by-sa-4.0 #endpoints_compatible #region-us
|
# bert-base for QA
Code: See Ainize Workspace
klue-bert-base-mrc DEMO: Ainize DEMO
klue-bert-base-mrc API: Ainize API
## Overview
Language model: klue/bert-base
Language: Korean
Downstream-task: Extractive QA
Training data: KLUE-MRC
Eval data: KLUE-MRC
## Usage
### In Transformers
## About us
Teachable NLP - Train NLP models with your own text without writing any code
Ainize - Deploy ML project using free gpu
|
[
"# bert-base for QA \n\nCode: See Ainize Workspace \n\nklue-bert-base-mrc DEMO: Ainize DEMO\n\nklue-bert-base-mrc API: Ainize API",
"## Overview\nLanguage model: klue/bert-base \nLanguage: Korean \nDownstream-task: Extractive QA \nTraining data: KLUE-MRC \nEval data: KLUE-MRC",
"## Usage",
"### In Transformers",
"## About us\nTeachable NLP - Train NLP models with your own text without writing any code \nAinize - Deploy ML project using free gpu"
] |
[
"TAGS\n#transformers #pytorch #bert #question-answering #mrc #ko #dataset-klue #license-cc-by-sa-4.0 #endpoints_compatible #region-us \n",
"# bert-base for QA \n\nCode: See Ainize Workspace \n\nklue-bert-base-mrc DEMO: Ainize DEMO\n\nklue-bert-base-mrc API: Ainize API",
"## Overview\nLanguage model: klue/bert-base \nLanguage: Korean \nDownstream-task: Extractive QA \nTraining data: KLUE-MRC \nEval data: KLUE-MRC",
"## Usage",
"### In Transformers",
"## About us\nTeachable NLP - Train NLP models with your own text without writing any code \nAinize - Deploy ML project using free gpu"
] |
[
51,
45,
43,
3,
6,
33
] |
[
"passage: TAGS\n#transformers #pytorch #bert #question-answering #mrc #ko #dataset-klue #license-cc-by-sa-4.0 #endpoints_compatible #region-us \n# bert-base for QA \n\nCode: See Ainize Workspace \n\nklue-bert-base-mrc DEMO: Ainize DEMO\n\nklue-bert-base-mrc API: Ainize API## Overview\nLanguage model: klue/bert-base \nLanguage: Korean \nDownstream-task: Extractive QA \nTraining data: KLUE-MRC \nEval data: KLUE-MRC## Usage### In Transformers## About us\nTeachable NLP - Train NLP models with your own text without writing any code \nAinize - Deploy ML project using free gpu"
] |
[
-0.05682341009378433,
0.09018290042877197,
-0.0015071160160005093,
0.04425981640815735,
0.13661886751651764,
-0.018284032121300697,
0.10053344070911407,
0.061699800193309784,
0.09480877965688705,
-0.036102112382650375,
0.09060701727867126,
0.13890372216701508,
0.02201271802186966,
0.08503131568431854,
-0.004457250703126192,
-0.25053784251213074,
-0.019032511860132217,
0.019946128129959106,
-0.030299831181764603,
0.08838136494159698,
0.08929785341024399,
-0.07461812347173691,
0.11085399985313416,
0.02042499929666519,
-0.05727049335837364,
0.016548054292798042,
-0.07943136245012283,
-0.11322256922721863,
0.10143103450536728,
0.02004663646221161,
0.055422212928533554,
0.043626587837934494,
0.002595199504867196,
-0.15593591332435608,
0.021554360166192055,
-0.03716764971613884,
-0.03486501798033714,
0.04339068382978439,
-0.031118985265493393,
-0.02396349422633648,
0.10371211171150208,
-0.0775090903043747,
-0.005319173447787762,
0.020396055653691292,
-0.062162384390830994,
-0.11815690249204636,
-0.08216220885515213,
0.07363288849592209,
0.13684311509132385,
0.0933721587061882,
0.004446493927389383,
0.12602972984313965,
-0.14012563228607178,
0.0882822573184967,
0.09525631368160248,
-0.3355676531791687,
-0.017782006412744522,
0.07507088780403137,
0.017274074256420135,
-0.043392427265644073,
-0.07194945216178894,
0.030237292870879173,
0.044369298964738846,
0.012303207069635391,
0.044142946600914,
-0.13423356413841248,
-0.11882925033569336,
0.07651445269584656,
-0.11177206039428711,
0.032477006316185,
0.27327167987823486,
-0.02993796579539776,
-0.004393251147121191,
-0.04552838206291199,
0.0384257510304451,
0.004284776281565428,
0.00004396288932184689,
0.06654326617717743,
-0.030352065339684486,
-0.05292687937617302,
-0.12157570570707321,
-0.097113698720932,
-0.06490351259708405,
-0.08150793612003326,
-0.07716931402683258,
0.2169354110956192,
0.06000407040119171,
-0.010042659938335419,
-0.07349824905395508,
0.1213662251830101,
0.008737177588045597,
-0.11973994970321655,
-0.09086022526025772,
-0.06421471387147903,
0.01797136291861534,
0.006862792186439037,
-0.003720829263329506,
0.017467055469751358,
0.09158531576395035,
0.08939378708600998,
0.008592558093369007,
0.02009638026356697,
0.029604606330394745,
0.0519113652408123,
0.03486044704914093,
0.18038813769817352,
-0.0642031878232956,
0.013006303459405899,
0.06441455334424973,
0.002258219523355365,
-0.0008674139389768243,
-0.037412796169519424,
-0.1125633716583252,
-0.04491816833615303,
-0.028533605858683586,
0.15162339806556702,
0.00048449714086018503,
0.11691298335790634,
-0.011541526764631271,
-0.01958545111119747,
0.06169503927230835,
-0.08584664762020111,
-0.027681974694132805,
0.020340289920568466,
-0.035579223185777664,
0.01928393729031086,
0.013755029998719692,
0.03797033801674843,
-0.01643436960875988,
0.0067932517267763615,
-0.056817036122083664,
-0.02323688194155693,
-0.07948306202888489,
-0.04856010526418686,
0.04105674847960472,
-0.013123218901455402,
0.08516315370798111,
-0.17216132581233978,
-0.12337260693311691,
0.04172700643539429,
0.010510403662919998,
0.013433197513222694,
-0.04766010865569115,
-0.07259229570627213,
-0.10395698994398117,
-0.0257097240537405,
-0.03993228077888489,
0.013349291868507862,
-0.03584238886833191,
0.04198175296187401,
0.014355693012475967,
0.04717544838786125,
-0.07380609959363937,
0.06411027908325195,
-0.11442217230796814,
0.03065805695950985,
-0.13263969123363495,
0.051715392619371414,
-0.14464540779590607,
-0.0038429072592407465,
-0.07208949327468872,
-0.04650253802537918,
0.07629019021987915,
0.030445802956819534,
0.019428536295890808,
0.1358768194913864,
-0.08304635435342789,
-0.05845694616436958,
0.0819164291024208,
-0.029467083513736725,
-0.15604078769683838,
0.04702407494187355,
-0.007802527863532305,
0.030898921191692352,
0.04520176723599434,
0.12033793330192566,
0.12458568066358566,
-0.14544808864593506,
-0.07349153608083725,
0.1322823166847229,
-0.009408506564795971,
-0.11891882866621017,
0.11168672889471054,
0.03573722019791603,
-0.14273981750011444,
0.06150209531188011,
-0.16673435270786285,
0.09418308734893799,
-0.03601989895105362,
-0.07282644510269165,
0.008236887864768505,
-0.0902688205242157,
0.1058306023478508,
-0.02874947525560856,
0.06076440587639809,
0.01983499713242054,
-0.03817726671695709,
0.09948793053627014,
0.10666842013597488,
-0.062095992267131805,
-0.008673311211168766,
-0.12420844286680222,
0.029980912804603577,
-0.03980681300163269,
0.022082552313804626,
-0.07909971475601196,
-0.13330750167369843,
-0.006693575065582991,
-0.08234795928001404,
0.0783170834183693,
0.05183028802275658,
0.002688101027160883,
0.052643075585365295,
-0.05344827473163605,
-0.027111634612083435,
-0.09418360143899918,
0.032598547637462616,
-0.05077522248029709,
-0.13392020761966705,
0.007966699078679085,
-0.03318806365132332,
0.013678188435733318,
0.0025879284366965294,
0.02784053608775139,
0.11633697897195816,
0.049407199025154114,
0.0034492488484829664,
0.08068995922803879,
0.060657601803541183,
0.13247795403003693,
0.017177268862724304,
0.040765609592199326,
0.04984399303793907,
-0.0036844853311777115,
-0.12109558284282684,
0.15447430312633514,
-0.04282787814736366,
-0.02664387784898281,
0.142625629901886,
0.0030564547050744295,
0.026433097198605537,
-0.02552114799618721,
-0.023514389991760254,
-0.026854149997234344,
-0.008881048299372196,
-0.018285676836967468,
0.26958614587783813,
0.036785367876291275,
0.10357040911912918,
-0.060990430414676666,
-0.10330216586589813,
-0.03548727557063103,
-0.056606777012348175,
-0.037615835666656494,
0.16918164491653442,
0.1529162973165512,
-0.04370523989200592,
0.11825613677501678,
0.20158329606056213,
-0.10495625436306,
0.23711341619491577,
-0.04735632613301277,
-0.01005637738853693,
-0.05109848827123642,
-0.01943645440042019,
-0.038763340562582016,
0.15588606894016266,
-0.1383579522371292,
-0.02530628815293312,
0.048718441277742386,
-0.03135869652032852,
0.07102032750844955,
-0.11956820636987686,
-0.07538614422082901,
-0.06652908772230148,
-0.014851338230073452,
-0.09148894250392914,
0.13157938420772552,
0.017755378037691116,
0.03897687792778015,
0.02125268615782261,
0.00022147843264974654,
0.07734636962413788,
-0.028719736263155937,
-0.04247697442770004,
0.19344773888587952,
-0.11276087164878845,
-0.16612230241298676,
-0.10111255198717117,
0.015815403312444687,
-0.12418559193611145,
-0.0948711410164833,
0.05812088027596474,
-0.13496644794940948,
-0.025616958737373352,
-0.02401880733668804,
0.1058361753821373,
-0.01958456262946129,
-0.05138245224952698,
-0.016432877629995346,
0.013130570761859417,
-0.09510423988103867,
-0.12812373042106628,
-0.029752159491181374,
0.0057965656742453575,
-0.08286294341087341,
0.0660460889339447,
0.010819078423082829,
0.12233810871839523,
0.1018785685300827,
0.00978055875748396,
0.009862020611763,
-0.023457692936062813,
0.15608012676239014,
-0.054781224578619,
-0.013953729532659054,
0.16699911653995514,
-0.004638696555048227,
0.034200236201286316,
0.09360218793153763,
0.0461096316576004,
-0.009581062011420727,
0.049109041690826416,
0.029497036710381508,
-0.07379201799631119,
-0.23063324391841888,
-0.08161086589097977,
-0.08954992890357971,
0.12220156937837601,
-0.05265278369188309,
0.016418924555182457,
-0.03541693091392517,
0.08103379607200623,
0.018396573141217232,
0.04288458079099655,
-0.06198533624410629,
0.06259167194366455,
0.18801945447921753,
0.006636707577854395,
0.03608882799744606,
-0.0746971070766449,
-0.05872277542948723,
0.03697344660758972,
0.16109929978847504,
0.1497257500886917,
-0.025707963854074478,
-0.0012326027499511838,
0.11332865059375763,
0.16210392117500305,
0.08046256750822067,
0.053707368671894073,
-0.05857694149017334,
-0.007452202960848808,
0.014097546227276325,
-0.06454913318157196,
-0.006276280619204044,
0.052557311952114105,
0.006203150376677513,
-0.07078976929187775,
-0.045373789966106415,
0.013536635786294937,
0.0586373470723629,
0.17740045487880707,
0.017045361921191216,
-0.10014346987009048,
-0.0589911974966526,
0.05904783681035042,
0.02479086071252823,
0.012143942527472973,
0.09083286672830582,
0.07572091370820999,
-0.16395646333694458,
0.03306056559085846,
-0.04296855628490448,
0.0814613476395607,
-0.09447338432073593,
0.006859797053039074,
0.021331200376152992,
0.08421274274587631,
0.026109972968697548,
0.11230365931987762,
-0.26209649443626404,
0.19083690643310547,
0.014328846707940102,
0.08447540551424026,
-0.08713041245937347,
-0.026247216388583183,
0.04234239086508751,
0.05174963176250458,
0.11378511786460876,
0.004648024216294289,
-0.02867385745048523,
-0.1767968088388443,
-0.06153522804379463,
0.10407298058271408,
0.0214335136115551,
-0.0892137959599495,
0.05633554980158806,
0.035021714866161346,
-0.013670862652361393,
-0.017260052263736725,
-0.05175144970417023,
-0.16644974052906036,
-0.07858218997716904,
0.030882881954312325,
0.05810689181089401,
0.08841121941804886,
-0.04663947969675064,
-0.05861317366361618,
-0.0838894173502922,
0.04076312482357025,
-0.12428572028875351,
-0.11299476027488708,
-0.054098572582006454,
-0.11451660841703415,
0.0826425850391388,
-0.15561816096305847,
-0.013683966360986233,
-0.032154861837625504,
-0.010199198499321938,
-0.012955205515027046,
-0.13722684979438782,
0.06389656662940979,
-0.08728422224521637,
-0.10993039608001709,
0.06629010289907455,
0.08202439546585083,
0.042163558304309845,
0.026301773265004158,
0.0009286986896768212,
-0.024993903934955597,
-0.036971211433410645,
-0.18971170485019684,
0.008474756963551044,
0.06297584623098373,
0.03616039454936981,
0.03870469704270363,
-0.13073590397834778,
-0.006351381540298462,
-0.06273607909679413,
-0.04745880141854286,
0.20730896294116974,
0.2313329577445984,
-0.05852695554494858,
0.059124115854501724,
0.17101910710334778,
-0.032566484063863754,
-0.3117680847644806,
-0.05425324663519859,
-0.06403347104787827,
0.02442353218793869,
0.007723877672106028,
-0.24009306728839874,
0.08565346896648407,
0.027683494612574577,
-0.03220108523964882,
-0.01573648490011692,
-0.2763189673423767,
-0.11655160784721375,
0.16491945087909698,
0.07915997505187988,
0.09329766035079956,
-0.06926201283931732,
-0.04304442182183266,
0.01983899436891079,
-0.1418047696352005,
0.15660518407821655,
-0.18548712134361267,
0.11944343149662018,
-0.052782557904720306,
0.15633156895637512,
0.0004279591084923595,
-0.03155073896050453,
0.09731479734182358,
0.035721056163311005,
0.03431234508752823,
-0.014227809384465218,
-0.12818565964698792,
0.05179489403963089,
-0.015731440857052803,
0.11897076666355133,
-0.12421926856040955,
0.06407295167446136,
-0.1479838341474533,
-0.020854143425822258,
-0.08766752481460571,
0.12797844409942627,
-0.02267586439847946,
-0.11034274101257324,
-0.04179646074771881,
0.07320807129144669,
-0.03389839082956314,
0.000483083538711071,
0.11932450532913208,
-0.017222996801137924,
-0.01395404152572155,
0.06586334854364395,
0.19686271250247955,
0.09026060253381729,
0.05646499991416931,
-0.0117013119161129,
-0.05679275467991829,
0.11507607251405716,
-0.12725704908370972,
0.003959984052926302,
0.11153356730937958,
0.04148557037115097,
0.02479402720928192,
0.04670708253979683,
-0.08979959785938263,
0.1068345233798027,
0.03719475865364075,
-0.11367311328649521,
-0.11450435221195221,
-0.10203267633914948,
0.005061468109488487,
0.008641860447824001,
0.12390332669019699,
0.14819997549057007,
-0.08235635608434677,
-0.0334111787378788,
-0.015490776859223843,
-0.007859956473112106,
-0.05670589953660965,
0.01776365377008915,
0.13652320206165314,
0.03644191101193428,
-0.08630413562059402,
0.07904017716646194,
-0.034393440932035446,
-0.032288260757923126,
0.049390073865652084,
0.061640121042728424,
-0.09966256469488144,
-0.13506436347961426,
-0.07547269761562347,
0.17287103831768036,
-0.13273395597934723,
-0.0631304606795311,
-0.03937431424856186,
-0.11089546978473663,
0.013445270247757435,
0.11797288805246353,
0.038303595036268234,
0.02233247272670269,
-0.024884670972824097,
0.015399453230202198,
-0.05284729227423668,
0.1239524856209755,
-0.045050427317619324,
-0.0033504050225019455,
-0.10547615587711334,
0.012336327694356441,
-0.00643630838021636,
0.2116387039422989,
-0.06024632602930069,
-0.01614878885447979,
-0.16938155889511108,
0.0119057921692729,
-0.14411261677742004,
-0.0654950961470604,
-0.10391182452440262,
0.008599415421485901,
-0.05652749538421631,
-0.07328780740499496,
-0.06335440278053284,
0.0668957456946373,
-0.09764059633016586,
0.030058667063713074,
-0.017040714621543884,
0.10791120678186417,
-0.09673494845628738,
-0.017911331728100777,
0.10052891820669174,
-0.0354294553399086,
0.11463431268930435,
0.033199492841959,
-0.0158583614975214,
0.14136692881584167,
-0.08837224543094635,
0.04154989495873451,
-0.022865159437060356,
0.04977016523480415,
0.0687166079878807,
-0.025945885106921196,
-0.027760261669754982,
-0.018489830195903778,
0.07069947570562363,
0.03667299821972847,
0.12149246782064438,
-0.11941936612129211,
-0.048644620925188065,
0.042473312467336655,
-0.07032088190317154,
-0.08471807837486267,
0.06362289935350418,
0.11081298440694809,
0.07533589750528336,
0.10347860306501389,
-0.06393629312515259,
0.07764612138271332,
-0.007854139432311058,
0.019614476710557938,
0.03046519309282303,
-0.06516160815954208,
-0.03022269904613495,
-0.07316798716783524,
0.04049105942249298,
-0.06738726049661636,
0.170815572142601,
-0.02307562343776226,
-0.07466619461774826,
0.01527441293001175,
-0.027750739827752113,
0.0025331934448331594,
0.03496038541197777,
0.21096722781658173,
0.019582035019993782,
0.03248176723718643,
-0.05134090036153793,
0.054674189537763596,
-0.05145435407757759,
-0.017299985513091087,
0.1516348421573639,
0.08212477713823318,
0.10045530647039413,
0.047174595296382904,
0.13787627220153809,
-0.020016975700855255,
-0.10146196186542511,
-0.06914858520030975,
-0.01043050829321146,
0.05591132491827011,
-0.0936841070652008,
0.13555027544498444,
0.10421920567750931,
-0.11817178130149841,
0.043539587408304214,
-0.004171148873865604,
-0.08341266214847565,
-0.11332780867815018,
-0.10468707233667374,
-0.05978970229625702,
-0.11021702736616135,
0.05279101803898811,
-0.12135787308216095,
-0.05502496659755707,
0.019255634397268295,
0.008299737237393856,
-0.02178034372627735,
0.19554007053375244,
-0.03098924830555916,
-0.05134693533182144,
0.10058551281690598,
-0.02913912571966648,
0.0018792999908328056,
-0.057342443615198135,
0.05708575248718262,
-0.03255287557840347,
-0.03634169325232506,
-0.030232079327106476,
0.050366733223199844,
-0.1266106367111206,
0.024926021695137024,
-0.06509362906217575,
-0.05784884840250015,
-0.05564307048916817,
0.04728350415825844,
0.07424087077379227,
0.07037194073200226,
0.04531276598572731,
-0.02931050769984722,
0.026404457166790962,
0.20019739866256714,
-0.039402980357408524,
-0.08018496632575989,
-0.10659220814704895,
0.12612737715244293,
-0.009040486067533493,
0.01611989736557007,
0.012551127932965755,
-0.05140546336770058,
-0.04466612637042999,
0.2283562272787094,
0.18425141274929047,
-0.05690911412239075,
0.05398768186569214,
-0.041723910719156265,
0.035346802324056625,
-0.04950786381959915,
0.0987311601638794,
0.10356798022985458,
0.19896864891052246,
-0.09376591444015503,
-0.017664507031440735,
-0.05689751356840134,
-0.006306268274784088,
-0.04570479691028595,
0.030025705695152283,
0.02105189673602581,
-0.12392626702785492,
-0.08058370649814606,
0.08917709439992905,
-0.21495480835437775,
-0.008693747222423553,
-0.042998798191547394,
-0.08154670894145966,
-0.10617634654045105,
-0.029814349487423897,
0.05202677845954895,
0.04122520610690117,
0.06479194760322571,
-0.03351464122533798,
0.02291944995522499,
0.09662427008152008,
0.04089191555976868,
-0.07873453199863434,
0.007628550287336111,
0.11669843643903732,
0.11536043137311935,
0.09742214530706406,
-0.01659633032977581,
0.13470318913459778,
0.12421343475580215,
-0.005644237156957388,
-0.058241914957761765,
0.09750714898109436,
0.0844627320766449,
-0.129853293299675,
0.011334093287587166,
0.016141489148139954,
-0.006323427427560091,
0.12249771505594254,
0.03436930850148201,
0.03490899130702019,
0.05491458252072334,
0.0526556521654129,
-0.0010430114343762398,
-0.11535222083330154,
0.07311467081308365,
-0.11714393645524979,
0.11588430404663086,
0.12926577031612396,
-0.06844440847635269,
-0.05232489854097366,
-0.016273677349090576,
0.060866374522447586,
-0.016672492027282715,
-0.07070571184158325,
-0.04135846719145775,
-0.053152330219745636,
-0.026160433888435364,
0.04533602297306061,
0.03953755646944046,
-0.1660807877779007,
0.0030037297401577234,
-0.08327842503786087,
0.03157642111182213,
-0.0693194717168808,
0.03694324567914009,
0.10103598982095718,
-0.021913578733801842,
0.0033959425054490566,
-0.15057773888111115,
-0.04667704552412033,
0.015438329428434372,
-0.18102166056632996,
-0.10075300931930542
] |
null | null |
transformers
|
# bert-base for KLUE Relation Extraction task.
Fine-tuned klue/bert-base using KLUE RE dataset.
- <a href="https://klue-benchmark.com/">KLUE Benchmark Official Webpage</a>
- <a href="https://github.com/KLUE-benchmark/KLUE">KLUE Official Github</a>
- <a href="https://github.com/ainize-team/klue-re-workspace">KLUE RE Github</a>
- Run KLUE RE on free GPU : <a href="https://ainize.ai/workspace/create?imageId=hnj95592adzr02xPTqss&git=https://github.com/ainize-team/klue-re-workspace">Ainize Workspace</a>
<br>
# Usage
<pre><code>
from transformers import AutoTokenizer, AutoModelForSequenceClassification
tokenizer = AutoTokenizer.from_pretrained("ainize/klue-bert-base-re")
model = AutoModelForSequenceClassification.from_pretrained("ainize/klue-bert-base-re")
# Add "<subj>", "</subj>" to both ends of the subject object and "<obj>", "</obj>" to both ends of the object object.
sentence = "<subj>손흥민</subj>은 <obj>대한민국</obj>에서 태어났다."
encodings = tokenizer(sentence,
max_length=128,
truncation=True,
padding="max_length",
return_tensors="pt")
outputs = model(**encodings)
logits = outputs['logits']
preds = torch.argmax(logits, dim=1)
</code></pre>
<br>
# About us
- <a href="https://ainize.ai/teachable-nlp">Teachable NLP</a> - Train NLP models with your own text without writing any code
- <a href="https://ainize.ai/">Ainize</a> - Deploy ML project using free gpu
|
{}
|
text-classification
|
ainize/klue-bert-base-re
|
[
"transformers",
"pytorch",
"bert",
"text-classification",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #bert #text-classification #autotrain_compatible #endpoints_compatible #region-us
|
# bert-base for KLUE Relation Extraction task.
Fine-tuned klue/bert-base using KLUE RE dataset.
- <a href="URL Benchmark Official Webpage</a>
- <a href="URL Official Github</a>
- <a href="URL RE Github</a>
- Run KLUE RE on free GPU : <a href="URL/URL Workspace</a>
<br>
# Usage
<pre><code>
from transformers import AutoTokenizer, AutoModelForSequenceClassification
tokenizer = AutoTokenizer.from_pretrained("ainize/klue-bert-base-re")
model = AutoModelForSequenceClassification.from_pretrained("ainize/klue-bert-base-re")
# Add "<subj>", "</subj>" to both ends of the subject object and "<obj>", "</obj>" to both ends of the object object.
sentence = "<subj>손흥민</subj>은 <obj>대한민국</obj>에서 태어났다."
encodings = tokenizer(sentence,
max_length=128,
truncation=True,
padding="max_length",
return_tensors="pt")
outputs = model(encodings)
logits = outputs['logits']
preds = URL(logits, dim=1)
</code></pre>
<br>
# About us
- <a href="URL NLP</a> - Train NLP models with your own text without writing any code
- <a href="URL - Deploy ML project using free gpu
|
[
"# bert-base for KLUE Relation Extraction task.\nFine-tuned klue/bert-base using KLUE RE dataset.\n- <a href=\"URL Benchmark Official Webpage</a>\n- <a href=\"URL Official Github</a> \n- <a href=\"URL RE Github</a>\n- Run KLUE RE on free GPU : <a href=\"URL/URL Workspace</a>\n\n<br>",
"# Usage\n<pre><code>\nfrom transformers import AutoTokenizer, AutoModelForSequenceClassification\n\ntokenizer = AutoTokenizer.from_pretrained(\"ainize/klue-bert-base-re\")\nmodel = AutoModelForSequenceClassification.from_pretrained(\"ainize/klue-bert-base-re\")",
"# Add \"<subj>\", \"</subj>\" to both ends of the subject object and \"<obj>\", \"</obj>\" to both ends of the object object.\nsentence = \"<subj>손흥민</subj>은 <obj>대한민국</obj>에서 태어났다.\"\n\nencodings = tokenizer(sentence, \n max_length=128, \n truncation=True, \n padding=\"max_length\", \n return_tensors=\"pt\")\n\noutputs = model(encodings)\n\nlogits = outputs['logits']\n\npreds = URL(logits, dim=1)\n</code></pre>\n\n<br>",
"# About us\n- <a href=\"URL NLP</a> - Train NLP models with your own text without writing any code\n- <a href=\"URL - Deploy ML project using free gpu"
] |
[
"TAGS\n#transformers #pytorch #bert #text-classification #autotrain_compatible #endpoints_compatible #region-us \n",
"# bert-base for KLUE Relation Extraction task.\nFine-tuned klue/bert-base using KLUE RE dataset.\n- <a href=\"URL Benchmark Official Webpage</a>\n- <a href=\"URL Official Github</a> \n- <a href=\"URL RE Github</a>\n- Run KLUE RE on free GPU : <a href=\"URL/URL Workspace</a>\n\n<br>",
"# Usage\n<pre><code>\nfrom transformers import AutoTokenizer, AutoModelForSequenceClassification\n\ntokenizer = AutoTokenizer.from_pretrained(\"ainize/klue-bert-base-re\")\nmodel = AutoModelForSequenceClassification.from_pretrained(\"ainize/klue-bert-base-re\")",
"# Add \"<subj>\", \"</subj>\" to both ends of the subject object and \"<obj>\", \"</obj>\" to both ends of the object object.\nsentence = \"<subj>손흥민</subj>은 <obj>대한민국</obj>에서 태어났다.\"\n\nencodings = tokenizer(sentence, \n max_length=128, \n truncation=True, \n padding=\"max_length\", \n return_tensors=\"pt\")\n\noutputs = model(encodings)\n\nlogits = outputs['logits']\n\npreds = URL(logits, dim=1)\n</code></pre>\n\n<br>",
"# About us\n- <a href=\"URL NLP</a> - Train NLP models with your own text without writing any code\n- <a href=\"URL - Deploy ML project using free gpu"
] |
[
36,
99,
83,
178,
46
] |
[
"passage: TAGS\n#transformers #pytorch #bert #text-classification #autotrain_compatible #endpoints_compatible #region-us \n# bert-base for KLUE Relation Extraction task.\nFine-tuned klue/bert-base using KLUE RE dataset.\n- <a href=\"URL Benchmark Official Webpage</a>\n- <a href=\"URL Official Github</a> \n- <a href=\"URL RE Github</a>\n- Run KLUE RE on free GPU : <a href=\"URL/URL Workspace</a>\n\n<br># Usage\n<pre><code>\nfrom transformers import AutoTokenizer, AutoModelForSequenceClassification\n\ntokenizer = AutoTokenizer.from_pretrained(\"ainize/klue-bert-base-re\")\nmodel = AutoModelForSequenceClassification.from_pretrained(\"ainize/klue-bert-base-re\")# Add \"<subj>\", \"</subj>\" to both ends of the subject object and \"<obj>\", \"</obj>\" to both ends of the object object.\nsentence = \"<subj>손흥민</subj>은 <obj>대한민국</obj>에서 태어났다.\"\n\nencodings = tokenizer(sentence, \n max_length=128, \n truncation=True, \n padding=\"max_length\", \n return_tensors=\"pt\")\n\noutputs = model(encodings)\n\nlogits = outputs['logits']\n\npreds = URL(logits, dim=1)\n</code></pre>\n\n<br># About us\n- <a href=\"URL NLP</a> - Train NLP models with your own text without writing any code\n- <a href=\"URL - Deploy ML project using free gpu"
] |
[
0.01151170115917921,
0.1444348394870758,
-0.008801939897239208,
0.06509308516979218,
0.11251524835824966,
0.05967195704579353,
0.0698208212852478,
0.1061442419886589,
-0.001181714702397585,
0.03522360324859619,
0.013707094825804234,
0.09168791025876999,
0.04179416224360466,
0.07092704623937607,
0.00982515886425972,
-0.24315345287322998,
-0.014149193651974201,
-0.01978478580713272,
0.01326570101082325,
0.043054111301898956,
0.06462234258651733,
-0.07196184992790222,
0.10194762051105499,
0.027773499488830566,
-0.04515158757567406,
0.02979198470711708,
-0.013720708899199963,
-0.06497245281934738,
0.02551065944135189,
0.06268433481454849,
0.05817265436053276,
0.003813318908214569,
0.040802326053380966,
-0.20035943388938904,
0.0012989923125132918,
0.08773622661828995,
0.01290927268564701,
0.05326247215270996,
0.09919795393943787,
-0.10253028571605682,
0.04410557076334953,
-0.17067380249500275,
0.028288042172789574,
0.030144451186060905,
-0.09128855913877487,
-0.11905305832624435,
-0.0702272355556488,
0.066043920814991,
0.10955318063497543,
0.02965528331696987,
0.00029613886727020144,
0.032014451920986176,
-0.055360451340675354,
0.06251507997512817,
0.1932803839445114,
-0.24037228524684906,
0.0077763949520885944,
-0.02647044137120247,
-0.08242000639438629,
-0.049673568457365036,
-0.07695730775594711,
-0.005807887297123671,
-0.04071228951215744,
-0.0407722182571888,
0.04727242887020111,
-0.12168171256780624,
-0.1746331751346588,
0.010480056516826153,
-0.11079959571361542,
0.01763061061501503,
0.15907910466194153,
0.0012488554930314422,
-0.00958979781717062,
-0.02924727462232113,
-0.02277536503970623,
0.015187041833996773,
-0.028360990807414055,
0.02854662388563156,
0.015824422240257263,
-0.052835457026958466,
-0.03894835710525513,
-0.13520574569702148,
-0.07085594534873962,
-0.03329233080148697,
-0.029721075668931007,
0.11393163353204727,
0.06656595319509506,
-0.010165353305637836,
0.0037183857057243586,
0.1447596549987793,
-0.04459841176867485,
-0.14940278232097626,
-0.017497196793556213,
-0.025210116058588028,
-0.0953712984919548,
-0.034102585166692734,
-0.0567474290728569,
-0.14952710270881653,
0.056348543614149094,
0.1537652313709259,
0.027312368154525757,
0.04408998042345047,
-0.048183560371398926,
0.026205426082015038,
0.02915031835436821,
0.11314299702644348,
-0.07386564463376999,
-0.0693192407488823,
0.05065618082880974,
-0.03462647646665573,
-0.0013426828663796186,
-0.0009293988114222884,
-0.043495114892721176,
-0.09202941507101059,
-0.03190059959888458,
0.03944851830601692,
0.020550549030303955,
0.10638923943042755,
-0.05688580125570297,
-0.06023566797375679,
0.019156066700816154,
-0.1499793380498886,
0.0211125910282135,
0.034809406846761703,
-0.06871254742145538,
0.019481206312775612,
0.0972372516989708,
0.041854072362184525,
-0.05895353853702545,
0.08601310849189758,
-0.04944745823740959,
0.023199962452054024,
-0.1091594472527504,
-0.09025444090366364,
-0.014667530544102192,
-0.027303531765937805,
0.0010272974614053965,
-0.06198505312204361,
-0.15183025598526,
-0.016897238790988922,
0.10282439738512039,
-0.02853136882185936,
-0.0044053238816559315,
-0.07899023592472076,
0.021902574226260185,
0.0058349017053842545,
-0.012253004126250744,
-0.0005841052043251693,
-0.0016326622571796179,
0.010595690459012985,
-0.006486821919679642,
0.038905177265405655,
-0.00012389212497510016,
0.03884612023830414,
-0.10603039711713791,
0.04888386279344559,
-0.18361327052116394,
0.16988211870193481,
-0.02816653996706009,
0.0005600040894933045,
-0.1138477772474289,
-0.025291990488767624,
0.019620059058070183,
-0.028908783569931984,
0.07301069051027298,
0.03172938525676727,
-0.04066606983542442,
-0.027600467205047607,
0.13197125494480133,
-0.03532738611102104,
-0.06602743268013,
0.0383654460310936,
-0.03944937512278557,
0.037150170654058456,
0.067564457654953,
0.10956554859876633,
0.27077966928482056,
-0.03807637840509415,
-0.001396620529703796,
0.07037409394979477,
-0.01710367761552334,
0.04801168292760849,
0.03401647135615349,
-0.011237306520342827,
0.012666220776736736,
0.04751666635274887,
-0.10708075016736984,
0.07613435387611389,
0.03792273625731468,
-0.050771474838256836,
0.022924697026610374,
-0.053973857313394547,
0.08097296953201294,
-0.05204019695520401,
0.019559569656848907,
0.0487835556268692,
-0.08625735342502594,
0.11175354570150375,
0.0750245749950409,
-0.09156098961830139,
0.05823677033185959,
-0.024411723017692566,
-0.05143394693732262,
-0.009187296032905579,
0.0033520995639264584,
-0.12711285054683685,
-0.14983811974525452,
0.006055023521184921,
-0.12889231741428375,
0.13412782549858093,
0.07042811065912247,
0.014097391627728939,
0.042660195380449295,
-0.0036782296374440193,
-0.018074246123433113,
0.031173773109912872,
-0.022845229133963585,
-0.010050613433122635,
-0.12863247096538544,
-0.0069198207929730415,
0.023038817569613457,
0.04156779870390892,
0.007806558161973953,
0.02542942203581333,
0.032312359660863876,
0.09527561068534851,
0.021513737738132477,
0.035761699080467224,
0.01641050912439823,
-0.020734108984470367,
0.028118282556533813,
-0.004335303790867329,
-0.0022348440252244473,
-0.0015141665935516357,
-0.053414829075336456,
0.07042080909013748,
-0.05589144676923752,
-0.1525093913078308,
0.060871586203575134,
-0.0035962117835879326,
-0.05170111358165741,
-0.03870069235563278,
-0.008574473671615124,
-0.021882163360714912,
0.022215640172362328,
-0.007977737113833427,
0.260439932346344,
0.0981525182723999,
0.09802044183015823,
-0.030927734449505806,
-0.09947791695594788,
-0.04512016102671623,
-0.07649995386600494,
0.004372090566903353,
0.020561812445521355,
0.051621921360492706,
-0.12391772121191025,
0.043218404054641724,
0.03290465474128723,
-0.128032386302948,
0.12330842763185501,
0.030510948970913887,
-0.036557331681251526,
-0.03265422210097313,
0.011701571755111217,
-0.024563312530517578,
-0.017835762351751328,
-0.0176075492054224,
0.019808432087302208,
0.06319720298051834,
-0.013703037984669209,
0.04718915745615959,
-0.07805900275707245,
0.06498118489980698,
-0.002678090473636985,
-0.03136862441897392,
0.04175804555416107,
0.045513954013586044,
0.04561777412891388,
0.03135839104652405,
0.04970941320061684,
0.07022595405578613,
0.032131243497133255,
-0.03106149658560753,
-0.056503526866436005,
0.10284533351659775,
-0.13412593305110931,
-0.19845625758171082,
-0.15648846328258514,
-0.1268836110830307,
-0.15313108265399933,
-0.013546650297939777,
0.0346849225461483,
-0.0351271815598011,
-0.06909970939159393,
-0.01409142091870308,
0.09393755346536636,
0.038268081843853,
-0.05468018725514412,
-0.11155394464731216,
0.04250014200806618,
0.004342753440141678,
-0.07549434155225754,
0.0026596274692565203,
0.029161958023905754,
-0.08667975664138794,
0.017411457374691963,
0.08097363263368607,
0.04779443517327309,
0.07804712653160095,
-0.045989517122507095,
0.029055872932076454,
0.008522961288690567,
0.11562397330999374,
-0.018675338476896286,
0.03946942836046219,
0.14146849513053894,
-0.054584503173828125,
0.09373261779546738,
0.1530802845954895,
0.028362002223730087,
0.011736424639821053,
0.033920541405677795,
0.0800916999578476,
0.012823645956814289,
-0.1763082891702652,
-0.030423032119870186,
-0.05831609666347504,
-0.04226875305175781,
0.08677943050861359,
-0.010950571857392788,
-0.03820240497589111,
0.1315203309059143,
-0.022834567353129387,
0.10323142260313034,
0.021827658638358116,
0.1024039089679718,
0.19174902141094208,
0.01765175722539425,
0.05947408825159073,
-0.04622340202331543,
-0.055404696613550186,
0.038637686520814896,
0.0718919187784195,
0.10711053013801575,
-0.13829731941223145,
0.08759504556655884,
0.07586384564638138,
0.11091314256191254,
-0.012388079427182674,
0.04255121946334839,
-0.09295503795146942,
0.07423866540193558,
0.009329325519502163,
-0.08362692594528198,
-0.0635654404759407,
0.07076872140169144,
0.013366607949137688,
-0.026345834136009216,
0.01153392344713211,
0.008032542653381824,
0.07640008628368378,
0.18291527032852173,
0.055755000561475754,
-0.1982468068599701,
0.036283984780311584,
0.03606344759464264,
0.04082240164279938,
-0.07428017258644104,
-0.017498590052127838,
0.021560929715633392,
-0.12943775951862335,
0.08576671034097672,
-0.06417971849441528,
0.08360632508993149,
-0.10665950179100037,
-0.0033577014692127705,
0.09064649790525436,
0.19693417847156525,
0.015941806137561798,
0.12599967420101166,
-0.18666014075279236,
0.028377195820212364,
0.0208726916462183,
0.017271587625145912,
-0.06553325802087784,
0.06573013216257095,
0.016800986602902412,
-0.014683363027870655,
0.055350419133901596,
-0.009732860140502453,
0.002998986514285207,
-0.08540284633636475,
-0.02715817652642727,
0.04409574344754219,
0.09932363778352737,
-0.09208733588457108,
0.09443117678165436,
-0.03163929656147957,
-0.06887291371822357,
-0.03322508931159973,
-0.01634834334254265,
-0.14865095913410187,
-0.1808701455593109,
0.05723744258284569,
-0.011053577065467834,
0.07739075273275375,
0.0011166685726493597,
0.04035545513033867,
-0.04986102133989334,
0.24553604423999786,
-0.0644582137465477,
-0.13378067314624786,
-0.052130572497844696,
0.022655397653579712,
0.1319042295217514,
-0.13320404291152954,
0.018302686512470245,
-0.05135404318571091,
0.06183576211333275,
-0.008065765723586082,
-0.11458548158407211,
0.018291201442480087,
-0.03468229994177818,
-0.0516725555062294,
-0.02042926847934723,
0.060033462941646576,
0.025800909847021103,
-0.021560030058026314,
0.011553005315363407,
0.0042214104905724525,
-0.03048236109316349,
-0.11868638545274734,
-0.09678854048252106,
0.07004941254854202,
0.08534897863864899,
0.05340046435594559,
-0.15407699346542358,
-0.05990958586335182,
-0.09663016349077225,
0.06884267926216125,
0.18209585547447205,
0.13707031309604645,
-0.060226213186979294,
0.07883881777524948,
0.09806211292743683,
-0.06240422651171684,
-0.17983882129192352,
0.01885521225631237,
0.07583291083574295,
-0.017907608300447464,
-0.015118369832634926,
-0.21707703173160553,
0.11372283101081848,
0.01686873659491539,
0.010406754910945892,
0.027698298916220665,
-0.18987374007701874,
-0.13102315366268158,
0.1184224858880043,
0.05541882663965225,
-0.15958304703235626,
-0.06124121695756912,
-0.05136580392718315,
-0.08672059327363968,
-0.11474128812551498,
0.1651037633419037,
-0.0829491913318634,
0.074469193816185,
0.01928539015352726,
0.09869500994682312,
0.0714162290096283,
-0.03345774859189987,
0.04545379430055618,
0.04103191941976547,
0.04155188798904419,
-0.07720111310482025,
0.025874311104416847,
0.11325935274362564,
-0.09343204647302628,
0.15233123302459717,
-0.055539555847644806,
0.06898926943540573,
-0.07339540123939514,
-0.020783230662345886,
-0.0389900766313076,
0.07113631069660187,
-0.007771842647343874,
-0.09470013529062271,
-0.0530855655670166,
0.024201205000281334,
0.0822913870215416,
0.029616137966513634,
0.025538120418787003,
0.0006873973761685193,
-0.07139572501182556,
0.11533524841070175,
0.08654237538576126,
0.10291513800621033,
-0.07567261159420013,
-0.00611143046990037,
-0.0108808483928442,
0.044382497668266296,
-0.09322063624858856,
0.0765669047832489,
0.0750349685549736,
-0.008098985999822617,
0.06407666206359863,
0.011512137949466705,
-0.0924428254365921,
-0.011017393320798874,
0.07024940848350525,
-0.14688968658447266,
0.03151143342256546,
-0.07229548692703247,
-0.022384848445653915,
-0.14532135426998138,
-0.04615459591150284,
0.173157200217247,
0.004062202293425798,
-0.03590371832251549,
0.00827991683036089,
0.009059218689799309,
-0.05875474214553833,
0.05949101969599724,
0.06657755374908447,
0.014104880392551422,
-0.07240716367959976,
0.07248465716838837,
0.09378405660390854,
-0.002211995655670762,
-0.004544174764305353,
0.12801675498485565,
-0.07850481569766998,
-0.09103003889322281,
-0.03478812798857689,
0.09594719856977463,
-0.06544022262096405,
0.018472667783498764,
-0.023093463853001595,
-0.021804524585604668,
-0.005924108438193798,
0.036000579595565796,
0.0028466859366744757,
0.025909028947353363,
-0.018051745370030403,
-0.01082597579807043,
-0.06811580061912537,
0.13090340793132782,
0.02960098721086979,
0.06965777277946472,
-0.0760636180639267,
0.14266687631607056,
-0.014927336014807224,
0.1012517586350441,
-0.0024965249467641115,
0.006858712527900934,
-0.08614152669906616,
-0.02440309338271618,
-0.08346433937549591,
0.0130824726074934,
-0.06455853581428528,
0.022936077788472176,
-0.0018055816181004047,
0.005702444817870855,
-0.020941879600286484,
0.030980978161096573,
-0.06568922102451324,
-0.07429458945989609,
-0.03886132314801216,
0.08345910906791687,
-0.1579330563545227,
-0.03930042311549187,
0.08188942074775696,
-0.08462756872177124,
0.06719992309808731,
-0.010841983370482922,
-0.012476307339966297,
0.058676768094301224,
-0.09647820144891739,
-0.05375141277909279,
-0.0033216604497283697,
0.07527085393667221,
0.01041028555482626,
-0.0563117116689682,
0.030935509130358696,
-0.05011703446507454,
0.038916461169719696,
-0.0523638017475605,
0.046287305653095245,
-0.13207612931728363,
0.07863128930330276,
-0.0001781192549970001,
-0.04328680410981178,
-0.07599996030330658,
0.027452610433101654,
0.015498016960918903,
0.06480418890714645,
0.11140917241573334,
-0.050495244562625885,
0.09160912781953812,
-0.06960341334342957,
-0.008964601904153824,
0.0299502145498991,
-0.00963735394179821,
0.009310316294431686,
-0.0980161651968956,
0.03300316631793976,
-0.0364658497273922,
0.08060220628976822,
0.08672941476106644,
-0.015134193003177643,
0.0014137730468064547,
0.013392111286520958,
0.021813128143548965,
0.049351781606674194,
0.05674794688820839,
0.029970642179250717,
-0.0013393672415986657,
-0.060285355895757675,
0.034026119858026505,
-0.03958210349082947,
-0.11052260547876358,
0.09851621836423874,
0.07189274579286575,
0.05373663827776909,
0.01087936945259571,
0.07385317981243134,
-0.06035993620753288,
0.0018914216198027134,
0.09287693351507187,
-0.018450621515512466,
0.04491996765136719,
-0.06925176829099655,
0.0932452604174614,
0.07984057068824768,
-0.15268097817897797,
0.06767413020133972,
0.09524357318878174,
-0.09479014575481415,
-0.0894458144903183,
-0.18587267398834229,
-0.06986438482999802,
-0.0358923077583313,
0.019054677337408066,
-0.10432042926549911,
0.05277501791715622,
-0.014637484215199947,
-0.04100565239787102,
0.00877365656197071,
0.1125701516866684,
-0.05737539380788803,
-0.0883871465921402,
0.024775784462690353,
0.021319249644875526,
0.030599132180213928,
0.04854773357510567,
0.027156531810760498,
0.039926234632730484,
0.05999715253710747,
0.01985640451312065,
0.021593080833554268,
0.06810901314020157,
0.021129004657268524,
-0.04178009554743767,
-0.059290941804647446,
-0.020428786054253578,
0.020497960969805717,
0.015203761868178844,
0.03234976530075073,
0.05121314525604248,
-0.03787790983915329,
-0.040385134518146515,
0.14302827417850494,
-0.060712166130542755,
-0.12327753752470016,
-0.11314652115106583,
0.09883119910955429,
0.08085139840841293,
0.03057660162448883,
0.00003681928137666546,
-0.12334776669740677,
-0.005590636283159256,
0.1500464826822281,
0.17937825620174408,
-0.063185915350914,
0.03038346767425537,
-0.0006253882311284542,
0.019470339640975,
0.015012121759355068,
0.0610126368701458,
0.045954134315252304,
0.1671806275844574,
-0.0576922707259655,
0.0434454083442688,
0.03502742573618889,
0.0006812477367930114,
-0.044466532766819,
0.03715511038899422,
-0.03738259896636009,
-0.035668957978487015,
0.009305066429078579,
0.041011542081832886,
-0.10119063407182693,
-0.0471515916287899,
-0.010586639866232872,
-0.04900263622403145,
-0.10237191617488861,
-0.01908566989004612,
-0.0010859676403924823,
0.0066514997743070126,
0.07259807735681534,
0.0013350013177841902,
-0.04265435412526131,
0.17353735864162445,
0.006202077493071556,
-0.12149602174758911,
-0.02251758985221386,
0.04698812961578369,
0.03287648409605026,
0.13571295142173767,
0.018585868179798126,
0.11769900470972061,
0.07924872636795044,
-0.012661357410252094,
-0.07717391103506088,
0.010433647781610489,
-0.0033769654110074043,
-0.10336149483919144,
0.07625508308410645,
0.08132386952638626,
-0.02206542156636715,
0.06917139887809753,
0.06941406428813934,
-0.05692029371857643,
0.05569226294755936,
0.014761943370103836,
0.017683133482933044,
-0.108546182513237,
0.004343572538346052,
-0.09995555132627487,
0.10385299474000931,
0.1699114292860031,
-0.008649113588035107,
0.03795056790113449,
-0.05113513767719269,
0.008683726191520691,
0.004139290191233158,
0.06735917925834656,
-0.044271960854530334,
-0.022810298949480057,
0.004985183011740446,
0.038522250950336456,
0.03751320391893387,
-0.17783071100711823,
-0.04190707951784134,
0.015660930424928665,
0.024327868595719337,
-0.05292189121246338,
0.11092779040336609,
0.03644496947526932,
0.024457156658172607,
-0.012462086975574493,
-0.2044772207736969,
-0.018472477793693542,
0.026198863983154297,
-0.1514143943786621,
-0.08529270440340042
] |
null | null |
transformers
|
# kobart-news
- This model is a [kobart](https://huggingface.co/hyunwoongko/kobart) fine-tuned on the [문서요약 텍스트/신문기사](https://aihub.or.kr/aidata/8054) using [Ainize Teachable-NLP](https://ainize.ai/teachable-nlp).
## Usage
### Python Code
```python
from transformers import PreTrainedTokenizerFast, BartForConditionalGeneration
# Load Model and Tokenize
tokenizer = PreTrainedTokenizerFast.from_pretrained("ainize/kobart-news")
model = BartForConditionalGeneration.from_pretrained("ainize/kobart-news")
# Encode Input Text
input_text = '국내 전반적인 경기침체로 상가 건물주의 수익도 전국적인 감소세를 보이고 있는 것으로 나타났다. 수익형 부동산 연구개발기업 상가정보연구소는 한국감정원 통계를 분석한 결과 전국 중대형 상가 순영업소득(부동산에서 발생하는 임대수입, 기타수입에서 제반 경비를 공제한 순소득)이 1분기 ㎡당 3만4200원에서 3분기 2만5800원으로 감소했다고 17일 밝혔다. 수도권, 세종시, 지방광역시에서 순영업소득이 가장 많이 감소한 지역은 3분기 1만3100원을 기록한 울산으로, 1분기 1만9100원 대비 31.4% 감소했다. 이어 대구(-27.7%), 서울(-26.9%), 광주(-24.9%), 부산(-23.5%), 세종(-23.4%), 대전(-21%), 경기(-19.2%), 인천(-18.5%) 순으로 감소했다. 지방 도시의 경우도 비슷했다. 경남의 3분기 순영업소득은 1만2800원으로 1분기 1만7400원 대비 26.4% 감소했으며 제주(-25.1%), 경북(-24.1%), 충남(-20.9%), 강원(-20.9%), 전남(-20.1%), 전북(-17%), 충북(-15.3%) 등도 감소세를 보였다. 조현택 상가정보연구소 연구원은 "올해 내수 경기의 침체된 분위기가 유지되며 상가, 오피스 등을 비롯한 수익형 부동산 시장의 분위기도 경직된 모습을 보였고 오피스텔, 지식산업센터 등의 수익형 부동산 공급도 증가해 공실의 위험도 늘었다"며 "실제 올 3분기 전국 중대형 상가 공실률은 11.5%를 기록하며 1분기 11.3% 대비 0.2% 포인트 증가했다"고 말했다. 그는 "최근 소셜커머스(SNS를 통한 전자상거래), 음식 배달 중개 애플리케이션, 중고 물품 거래 애플리케이션 등의 사용 증가로 오프라인 매장에 영향을 미쳤다"며 "향후 지역, 콘텐츠에 따른 상권 양극화 현상은 심화될 것으로 보인다"고 덧붙였다.'
input_ids = tokenizer.encode(input_text, return_tensors="pt")
# Generate Summary Text Ids
summary_text_ids = model.generate(
input_ids=input_ids,
bos_token_id=model.config.bos_token_id,
eos_token_id=model.config.eos_token_id,
length_penalty=2.0,
max_length=142,
min_length=56,
num_beams=4,
)
# Decoding Text
print(tokenizer.decode(summary_text_ids[0], skip_special_tokens=True))
```
### API and Demo
You can experience this model through [ainize-api](https://ainize.ai/gkswjdzz/summarize-torchserve?branch=main) and [ainize-demo](https://main-summarize-torchserve-gkswjdzz.endpoint.ainize.ai/).
|
{"language": "ko", "license": "mit", "tags": ["summarization", "bart"]}
|
summarization
|
ainize/kobart-news
|
[
"transformers",
"pytorch",
"bart",
"text2text-generation",
"summarization",
"ko",
"license:mit",
"autotrain_compatible",
"endpoints_compatible",
"has_space",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"ko"
] |
TAGS
#transformers #pytorch #bart #text2text-generation #summarization #ko #license-mit #autotrain_compatible #endpoints_compatible #has_space #region-us
|
# kobart-news
- This model is a kobart fine-tuned on the 문서요약 텍스트/신문기사 using Ainize Teachable-NLP.
## Usage
### Python Code
### API and Demo
You can experience this model through ainize-api and ainize-demo.
|
[
"# kobart-news\n- This model is a kobart fine-tuned on the 문서요약 텍스트/신문기사 using Ainize Teachable-NLP.",
"## Usage",
"### Python Code",
"### API and Demo\nYou can experience this model through ainize-api and ainize-demo."
] |
[
"TAGS\n#transformers #pytorch #bart #text2text-generation #summarization #ko #license-mit #autotrain_compatible #endpoints_compatible #has_space #region-us \n",
"# kobart-news\n- This model is a kobart fine-tuned on the 문서요약 텍스트/신문기사 using Ainize Teachable-NLP.",
"## Usage",
"### Python Code",
"### API and Demo\nYou can experience this model through ainize-api and ainize-demo."
] |
[
53,
34,
3,
4,
21
] |
[
"passage: TAGS\n#transformers #pytorch #bart #text2text-generation #summarization #ko #license-mit #autotrain_compatible #endpoints_compatible #has_space #region-us \n# kobart-news\n- This model is a kobart fine-tuned on the 문서요약 텍스트/신문기사 using Ainize Teachable-NLP.## Usage### Python Code### API and Demo\nYou can experience this model through ainize-api and ainize-demo."
] |
[
-0.005270554684102535,
0.05934730917215347,
-0.003136834828183055,
-0.005397368222475052,
0.07332226634025574,
-0.018134312704205513,
0.19457007944583893,
0.07042057812213898,
0.0342613086104393,
-0.02452295646071434,
0.1374322921037674,
0.13636106252670288,
-0.010939407162368298,
0.2449411302804947,
-0.011426634155213833,
-0.34644797444343567,
0.07673052698373795,
0.05057143047451973,
0.0936615988612175,
0.12681227922439575,
0.13112403452396393,
-0.052977342158555984,
0.04068785160779953,
0.05187347158789635,
-0.06841514259576797,
-0.004677171353250742,
-0.078962542116642,
-0.12697246670722961,
0.09646076709032059,
0.0381585955619812,
0.08779888600111008,
0.056469596922397614,
0.008218411356210709,
-0.14042572677135468,
0.04569917172193527,
-0.044992655515670776,
-0.05499769374728203,
0.04719199985265732,
-0.031286682933568954,
-0.025134386494755745,
0.24423757195472717,
0.02376394532620907,
0.0007282719016075134,
-0.0506288968026638,
-0.14449475705623627,
-0.02471783757209778,
-0.026448871940374374,
0.06035751849412918,
0.14961974322795868,
0.09415164589881897,
-0.022657696157693863,
0.1486843377351761,
-0.09856712818145752,
0.06774482876062393,
0.05387840420007706,
-0.2701050043106079,
-0.04952969402074814,
0.1023733988404274,
0.08144209533929825,
-0.029007120057940483,
-0.012343262322247028,
0.05410636216402054,
0.06840414553880692,
0.038932643830776215,
-0.017378058284521103,
-0.05277235805988312,
-0.053815264254808426,
-0.002238150220364332,
-0.1195116937160492,
-0.07158362865447998,
0.22863495349884033,
-0.0417180135846138,
0.015620416961610317,
-0.07425437867641449,
-0.029741160571575165,
-0.015764087438583374,
-0.04550081118941307,
-0.009663531556725502,
-0.0758579820394516,
0.033141739666461945,
-0.055735405534505844,
-0.03463936969637871,
-0.11711451411247253,
0.041085243225097656,
-0.13954898715019226,
0.24591468274593353,
0.01822894997894764,
0.02389984019100666,
-0.17821747064590454,
0.12546637654304504,
-0.00719032296910882,
-0.10424458980560303,
0.012834201566874981,
-0.06870850175619125,
0.11383369565010071,
0.0043122367933392525,
-0.03867761418223381,
-0.012901953421533108,
0.03683093935251236,
0.16184325516223907,
0.04204527661204338,
-0.021738287061452866,
0.06908407062292099,
0.08171968162059784,
0.08989009261131287,
0.08984556794166565,
-0.010122082196176052,
-0.07871617376804352,
-0.0033241056371480227,
0.029553089290857315,
0.01758771389722824,
-0.053806476294994354,
-0.1258818507194519,
-0.002799895592033863,
-0.004921895917505026,
0.004798004403710365,
0.07339555770158768,
0.11693448573350906,
-0.003773641772568226,
-0.017998332157731056,
0.07275224477052689,
-0.03485265374183655,
-0.010707416571676731,
-0.05595404654741287,
-0.00240658619441092,
0.06383553147315979,
0.03740313649177551,
0.03967444226145744,
-0.03502412140369415,
0.09687570482492447,
-0.04334351792931557,
-0.0048950836062431335,
-0.05177851766347885,
-0.022254234179854393,
-0.005796921439468861,
-0.01598113216459751,
0.03530040755867958,
-0.19889678061008453,
-0.12103672325611115,
-0.01689196564257145,
0.06989534199237823,
0.001844158861786127,
-0.09213634580373764,
-0.09020092338323593,
-0.004101085476577282,
0.052111342549324036,
-0.07518661767244339,
-0.0681946650147438,
-0.07851066440343857,
0.054336465895175934,
-0.043381500989198685,
0.0699644610285759,
-0.15765520930290222,
0.03664803132414818,
-0.11462496221065521,
0.023427987471222878,
0.014547335915267467,
0.03343154489994049,
-0.01181040145456791,
0.04142747074365616,
-0.039097025990486145,
-0.010393043980002403,
-0.09920844435691833,
0.08776693046092987,
0.02877507172524929,
0.16125769913196564,
-0.13682661950588226,
-0.07195144146680832,
0.10208763182163239,
-0.06359463930130005,
-0.1066930741071701,
0.0820695087313652,
-0.0446317084133625,
0.16847003996372223,
0.08363279700279236,
0.12148307263851166,
0.08837810158729553,
-0.08773709088563919,
-0.017164988443255424,
0.08397811651229858,
-0.015175344422459602,
-0.05352914705872536,
0.042484935373067856,
0.07142841070890427,
-0.12801721692085266,
0.056885603815317154,
0.01352690253406763,
0.07855747640132904,
-0.04322038218379021,
-0.059203069657087326,
0.026765674352645874,
-0.000431214168202132,
0.12158171087503433,
-0.016493752598762512,
0.09727171808481216,
-0.008643842302262783,
-0.06580335646867752,
0.02606736309826374,
0.10141012072563171,
-0.0021430007182061672,
0.034573838114738464,
-0.13771279156208038,
0.11818403750658035,
-0.03722779080271721,
0.03752564638853073,
-0.12298990041017532,
0.04737449064850807,
-0.058510299772024155,
0.04054409638047218,
0.04732749983668327,
0.15662814676761627,
0.004877062980085611,
-0.055440038442611694,
0.0020941023249179125,
0.022920366376638412,
0.06303227692842484,
0.008389613591134548,
-0.09233064949512482,
-0.021431639790534973,
0.054998476058244705,
-0.08648734539747238,
0.029126698151230812,
-0.08462830632925034,
0.02858845889568329,
-0.007727567106485367,
0.07440861314535141,
-0.04078107699751854,
0.07815813273191452,
-0.00030553762917406857,
0.0593484565615654,
-0.0790342167019844,
0.0413946770131588,
0.07966531068086624,
0.016041859984397888,
-0.06140275299549103,
0.15788544714450836,
-0.14905531704425812,
0.06225541606545448,
0.15297451615333557,
-0.06602291017770767,
0.016104498878121376,
-0.050204504281282425,
-0.03707803785800934,
0.016628844663500786,
-0.04201013594865799,
0.025125641375780106,
0.18039333820343018,
-0.0030522372107952833,
0.1129935160279274,
-0.005774176679551601,
-0.002685945015400648,
-0.010232081636786461,
-0.07579737156629562,
-0.060322634875774384,
0.06590525805950165,
0.047572821378707886,
-0.13148586452007294,
0.10496445745229721,
0.25777238607406616,
-0.02295142598450184,
0.17783313989639282,
0.026224639266729355,
0.021395759657025337,
-0.012038885615766048,
-0.0626373440027237,
-0.03708644583821297,
0.06718103587627411,
-0.14457929134368896,
-0.00874300766736269,
0.06225038692355156,
-0.010110370814800262,
0.07098150998353958,
-0.07663276791572571,
-0.06596502661705017,
0.01041886955499649,
0.03856354206800461,
-0.034488290548324585,
0.1128777265548706,
-0.008211701177060604,
0.07078501582145691,
0.005225769709795713,
-0.10788038372993469,
0.037266943603754044,
0.02175896055996418,
-0.0406607948243618,
0.17759312689304352,
-0.03979657217860222,
-0.25940361618995667,
-0.10631663352251053,
-0.09609223157167435,
0.0386645682156086,
-0.01615329273045063,
0.08973152935504913,
-0.013786801137030125,
-0.0032417525071650743,
-0.06627101451158524,
-0.007637272123247385,
-0.0033846895676106215,
-0.017213888466358185,
0.02371886372566223,
-0.016211755573749542,
-0.06166995316743851,
-0.11075782030820847,
-0.04772544652223587,
-0.028844118118286133,
-0.009282602928578854,
0.10805832594633102,
-0.11406790465116501,
0.08226941525936127,
0.16081956028938293,
0.0027579565066844225,
0.04963282123208046,
-0.01270546205341816,
0.2193075269460678,
-0.10643857717514038,
-0.012726950459182262,
0.13411447405815125,
-0.025098081678152084,
0.05877815559506416,
0.174100860953331,
0.01912359893321991,
-0.030739201232790947,
0.05480722710490227,
-0.04113655537366867,
-0.11629460752010345,
-0.09067731350660324,
-0.14154553413391113,
-0.05765966325998306,
0.09088936448097229,
0.07562501728534698,
0.05456520989537239,
0.030363984405994415,
0.10696975141763687,
-0.018060721457004547,
0.04892300069332123,
-0.02449844405055046,
0.08486658334732056,
0.16521325707435608,
-0.021644311025738716,
0.11547080427408218,
-0.04749941825866699,
-0.1748141497373581,
0.0551731139421463,
0.02809310518205166,
0.05455954372882843,
0.06515020877122879,
-0.08795421570539474,
0.07281166315078735,
0.031912580132484436,
0.13692982494831085,
0.08505725115537643,
0.0010023998329415917,
-0.035902176052331924,
-0.051261056214571,
-0.04635762423276901,
-0.020395740866661072,
0.07462474703788757,
0.024632809683680534,
-0.1204792931675911,
-0.015196543186903,
-0.09130871295928955,
0.055534638464450836,
0.0832679346203804,
0.1426030546426773,
-0.24260374903678894,
-0.00879132654517889,
0.04747844859957695,
-0.050301648676395416,
-0.06296124309301376,
0.057190004736185074,
-0.008686198852956295,
-0.14727625250816345,
0.05402226746082306,
-0.020839255303144455,
0.11455312371253967,
-0.06975103169679642,
0.059787262231111526,
-0.06381166726350784,
-0.12875878810882568,
-0.016410432755947113,
0.09192725270986557,
-0.2472398579120636,
0.24620521068572998,
-0.032498158514499664,
-0.0017201006412506104,
-0.05693412199616432,
-0.03817793354392052,
0.05934169888496399,
0.13192997872829437,
0.07036832720041275,
0.018929647281765938,
-0.06353200972080231,
-0.12828890979290009,
-0.12113787978887558,
0.06445218622684479,
0.0019091261783614755,
-0.0825604498386383,
0.051302436739206314,
-0.015223615802824497,
0.030387893319129944,
-0.021359331905841827,
0.06518622487783432,
-0.05993105471134186,
-0.16620616614818573,
0.08365090191364288,
0.09001749753952026,
0.05508887395262718,
-0.005995493847876787,
-0.10044334828853607,
-0.025784531608223915,
0.024401642382144928,
0.04037487134337425,
-0.05192781239748001,
-0.07742827385663986,
-0.053331807255744934,
0.015431823208928108,
-0.08731003105640411,
-0.0037551026325672865,
-0.020362872630357742,
0.018255682662129402,
-0.08753278106451035,
-0.12584735453128815,
0.04528658837080002,
-0.07974245399236679,
-0.042770445346832275,
-0.022986145690083504,
0.01760229282081127,
0.04095642268657684,
0.009403756819665432,
0.06252466887235641,
0.029240746051073074,
-0.08441755920648575,
-0.10024888068437576,
-0.09077797830104828,
0.05391307547688484,
-0.013356774114072323,
-0.050234995782375336,
0.0018239448545500636,
-0.04240339994430542,
-0.058313269168138504,
-0.01161062903702259,
0.15321354568004608,
0.21614782512187958,
-0.06216149032115936,
0.06290785223245621,
0.13729289174079895,
-0.04058319702744484,
-0.2970763146877289,
-0.18820662796497345,
-0.0745391696691513,
0.0183628648519516,
0.019494064152240753,
-0.08963388204574585,
0.06471236795186996,
-0.006524127442389727,
-0.07932380586862564,
-0.01513760257512331,
-0.24723593890666962,
-0.11383029073476791,
0.22839917242527008,
-0.05504078045487404,
0.3371279835700989,
-0.07145332545042038,
-0.01835155114531517,
-0.01823587343096733,
-0.08114612847566605,
0.172456756234169,
-0.04508132115006447,
0.09103090316057205,
-0.035969704389572144,
0.12263113260269165,
0.02290329337120056,
-0.008665268309414387,
0.12872307002544403,
-0.021056173369288445,
-0.014699448831379414,
-0.12322627007961273,
-0.17039309442043304,
-0.04355229437351227,
-0.06154754385352135,
0.07508022338151932,
-0.12126799672842026,
0.023297669366002083,
-0.23173408210277557,
-0.028043154627084732,
-0.052763160318136215,
0.07567630708217621,
-0.010223420336842537,
-0.08006103336811066,
-0.06509324908256531,
0.03498731553554535,
0.013372046872973442,
0.013863985426723957,
0.2588188946247101,
-0.016337163746356964,
0.081870436668396,
0.081227608025074,
0.0743437334895134,
-0.04142162576317787,
0.039797332137823105,
-0.03704944998025894,
-0.044579893350601196,
0.07923940569162369,
-0.1417940855026245,
-0.042881373316049576,
0.09015781432390213,
-0.012054149992763996,
0.056983910501003265,
0.056725259870290756,
-0.05821818858385086,
0.06983432173728943,
0.14575618505477905,
-0.14139069616794586,
-0.045955073088407516,
-0.048351772129535675,
0.1617988795042038,
0.029838833957910538,
0.030794300138950348,
0.14636671543121338,
-0.08527936041355133,
-0.021691694855690002,
0.015844648703932762,
-0.016178356483578682,
-0.0527002289891243,
0.040312375873327255,
0.03615705668926239,
0.035292576998472214,
-0.046183425933122635,
0.004628829192370176,
0.06188354268670082,
-0.06042581796646118,
0.009146282449364662,
0.037376776337623596,
-0.09495080262422562,
-0.12706996500492096,
-0.045873336493968964,
0.08169906586408615,
-0.10321525484323502,
-0.05317901447415352,
-0.002695895964279771,
-0.05673856660723686,
0.0420403778553009,
0.08502873033285141,
0.09766119718551636,
0.02692674659192562,
-0.06823885440826416,
-0.0168908778578043,
-0.007072266656905413,
0.02853229269385338,
0.0641525611281395,
0.023392366245388985,
-0.05251580849289894,
0.016087831929326057,
-0.023110859096050262,
0.14271239936351776,
-0.10078561305999756,
-0.09707251936197281,
-0.1068759337067604,
0.028787588700652122,
-0.10019858181476593,
-0.019644008949398994,
-0.19061101973056793,
-0.03509527072310448,
-0.02244396135210991,
-0.03945883736014366,
-0.057552509009838104,
-0.05621443688869476,
-0.08742177486419678,
0.03772338479757309,
-0.04530709981918335,
0.061749838292598724,
-0.03728587180376053,
0.019192054867744446,
0.07858884334564209,
-0.03635435923933983,
0.06705270707607269,
0.022301364690065384,
-0.037655752152204514,
0.038751717656850815,
-0.15856511890888214,
-0.017561050131917,
0.04468941316008568,
0.015623880550265312,
0.05554179102182388,
0.011460872367024422,
0.004104370716959238,
0.02968294732272625,
0.09702853113412857,
0.043432414531707764,
0.05588318407535553,
-0.09498150646686554,
0.01586916670203209,
0.026206957176327705,
-0.11244865506887436,
-0.03051714599132538,
0.009942195378243923,
0.014586882665753365,
0.06417358666658401,
0.18356694281101227,
-0.0709330141544342,
0.05909552425146103,
-0.017582662403583527,
0.028386538848280907,
-0.009336655028164387,
-0.1485406756401062,
0.021890774369239807,
-0.13917061686515808,
-0.02281961590051651,
-0.013381941244006157,
0.17472147941589355,
0.08945304155349731,
-0.11080240458250046,
0.010034526698291302,
-0.03103523701429367,
0.037776414304971695,
-0.02820770815014839,
0.14527124166488647,
0.07119457423686981,
0.016750965267419815,
-0.15797550976276398,
0.041918739676475525,
0.006499445531517267,
0.03579384461045265,
0.05318396911025047,
0.07849591225385666,
0.025145085528492928,
0.13012069463729858,
0.007554512936621904,
0.036239173263311386,
-0.09879151731729507,
-0.2455311268568039,
-0.07866864651441574,
0.08146060258150101,
-0.03848329558968544,
0.05932262912392616,
0.14515641331672668,
-0.0707310363650322,
0.009142334572970867,
-0.010076782666146755,
-0.019701072946190834,
-0.1471787542104721,
-0.21637733280658722,
-0.058396514505147934,
-0.10680516809225082,
-0.0009400860872119665,
-0.06411907821893692,
-0.003994882106781006,
0.1470162719488144,
0.03927795588970184,
-0.04442388191819191,
0.07635223120450974,
0.013057136908173561,
-0.04988696798682213,
0.12683890759944916,
-0.03618687763810158,
0.0334119014441967,
-0.12004777789115906,
0.03622669726610184,
-0.10769844055175781,
0.0022110261488705873,
-0.007168903015553951,
0.03206094726920128,
-0.024983884766697884,
0.011465474963188171,
-0.0836431235074997,
-0.07008045166730881,
-0.04213598743081093,
0.009928040206432343,
0.0874178484082222,
0.05628816410899162,
0.035425443202257156,
-0.0029170012567192316,
0.01976115256547928,
0.2136843353509903,
-0.0018723777029663324,
-0.13785623013973236,
-0.08381733298301697,
0.17609699070453644,
-0.0004245552991051227,
0.08019815385341644,
-0.05897035077214241,
0.010669061914086342,
-0.0756668820977211,
0.24563172459602356,
0.2504776418209076,
-0.04517682269215584,
0.041088808327913284,
-0.009622677229344845,
0.04365384951233864,
0.030014820396900177,
0.09949063509702682,
0.036252737045288086,
0.2652483582496643,
-0.06746117025613785,
-0.02674352191388607,
-0.05217008292675018,
-0.027421385049819946,
-0.06121326610445976,
0.0073614586144685745,
0.02738477848470211,
-0.07587815076112747,
-0.07253099232912064,
0.11689354479312897,
-0.18487302958965302,
0.12383375316858292,
-0.10849091410636902,
-0.12531593441963196,
-0.11147657036781311,
0.020628521218895912,
0.10800278186798096,
0.011770362965762615,
0.12413559854030609,
-0.049032218754291534,
0.010544946417212486,
-0.03411741182208061,
-0.01587795279920101,
-0.051405973732471466,
0.005485906265676022,
0.09416311234235764,
-0.004773215856403112,
0.11497694253921509,
-0.012571949511766434,
0.03321746364235878,
0.09941703081130981,
0.03025437705218792,
-0.032476648688316345,
0.10509529709815979,
0.03849566727876663,
0.05535127595067024,
0.02895236201584339,
-0.04769142344594002,
0.004398608114570379,
-0.031346745789051056,
0.12254316359758377,
-0.11438477039337158,
0.09966438263654709,
-0.03036082722246647,
0.011848309077322483,
-0.09178517758846283,
0.061670124530792236,
-0.0408027283847332,
0.1297147572040558,
0.09893710166215897,
-0.06536029279232025,
-0.043724410235881805,
0.017112236469984055,
0.0387524776160717,
-0.042456720024347305,
-0.07544679939746857,
-0.07628998160362244,
-0.07070445269346237,
-0.0838286280632019,
-0.03293953463435173,
0.010272611863911152,
-0.18875454366207123,
0.0014562654541805387,
-0.09628963470458984,
-0.004281265661120415,
-0.013405708596110344,
0.08772014826536179,
0.10514821112155914,
-0.025967758148908615,
-0.016063014045357704,
-0.07303258031606674,
-0.009697379544377327,
0.03381563723087311,
-0.11537326127290726,
-0.10284774750471115
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# mt5-base-wikinewssum-all-languages
This model is a fine-tuned version of [google/mt5-base](https://huggingface.co/google/mt5-base) on an unknown dataset.
It achieves the following results on the evaluation set:
- Loss: 2.2454
- Rouge1: 8.3826
- Rouge2: 3.5524
- Rougel: 6.8656
- Rougelsum: 7.8362
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 5.6e-05
- train_batch_size: 4
- eval_batch_size: 4
- seed: 42
- gradient_accumulation_steps: 2
- total_train_batch_size: 8
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- num_epochs: 8
### Training results
| Training Loss | Epoch | Step | Validation Loss | Rouge1 | Rouge2 | Rougel | Rougelsum |
|:-------------:|:-----:|:-----:|:---------------:|:------:|:------:|:------:|:---------:|
| No log | 1.0 | 3467 | 2.4034 | 8.0363 | 3.2484 | 6.5409 | 7.477 |
| No log | 2.0 | 6934 | 2.3276 | 8.1054 | 3.2905 | 6.5765 | 7.5687 |
| No log | 3.0 | 10401 | 2.2976 | 8.169 | 3.4272 | 6.6597 | 7.6435 |
| No log | 4.0 | 13868 | 2.2795 | 8.2941 | 3.5353 | 6.7881 | 7.7664 |
| 2.8057 | 5.0 | 17335 | 2.2621 | 8.3302 | 3.5599 | 6.8238 | 7.7928 |
| 2.8057 | 6.0 | 20802 | 2.2547 | 8.3818 | 3.5886 | 6.8672 | 7.844 |
| 2.8057 | 7.0 | 24269 | 2.2472 | 8.3809 | 3.5696 | 6.8575 | 7.8327 |
| 2.8057 | 8.0 | 27736 | 2.2454 | 8.3826 | 3.5524 | 6.8656 | 7.8362 |
### Framework versions
- Transformers 4.13.0
- Pytorch 1.10.1
- Datasets 1.16.1
- Tokenizers 0.10.3
|
{"license": "apache-2.0", "tags": ["summarization", "generated_from_trainer"], "metrics": ["rouge"], "model-index": [{"name": "mt5-base-wikinewssum-all-languages", "results": []}]}
|
summarization
|
airKlizz/mt5-base-wikinewssum-all-languages
|
[
"transformers",
"pytorch",
"mt5",
"text2text-generation",
"summarization",
"generated_from_trainer",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #mt5 #text2text-generation #summarization #generated_from_trainer #license-apache-2.0 #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
mt5-base-wikinewssum-all-languages
==================================
This model is a fine-tuned version of google/mt5-base on an unknown dataset.
It achieves the following results on the evaluation set:
* Loss: 2.2454
* Rouge1: 8.3826
* Rouge2: 3.5524
* Rougel: 6.8656
* Rougelsum: 7.8362
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 5.6e-05
* train\_batch\_size: 4
* eval\_batch\_size: 4
* seed: 42
* gradient\_accumulation\_steps: 2
* total\_train\_batch\_size: 8
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* num\_epochs: 8
### Training results
### Framework versions
* Transformers 4.13.0
* Pytorch 1.10.1
* Datasets 1.16.1
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5.6e-05\n* train\\_batch\\_size: 4\n* eval\\_batch\\_size: 4\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 8\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 8",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.13.0\n* Pytorch 1.10.1\n* Datasets 1.16.1\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #mt5 #text2text-generation #summarization #generated_from_trainer #license-apache-2.0 #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5.6e-05\n* train\\_batch\\_size: 4\n* eval\\_batch\\_size: 4\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 8\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 8",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.13.0\n* Pytorch 1.10.1\n* Datasets 1.16.1\n* Tokenizers 0.10.3"
] |
[
68,
127,
4,
30
] |
[
"passage: TAGS\n#transformers #pytorch #mt5 #text2text-generation #summarization #generated_from_trainer #license-apache-2.0 #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5.6e-05\n* train\\_batch\\_size: 4\n* eval\\_batch\\_size: 4\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 8\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 8### Training results### Framework versions\n\n\n* Transformers 4.13.0\n* Pytorch 1.10.1\n* Datasets 1.16.1\n* Tokenizers 0.10.3"
] |
[
-0.11205639690160751,
0.1200854480266571,
-0.002607157453894615,
0.09516560286283493,
0.12879973649978638,
0.002303670160472393,
0.11824613064527512,
0.171413391828537,
-0.14953720569610596,
0.06951165944337845,
0.14009463787078857,
0.14367541670799255,
0.0381598025560379,
0.19781967997550964,
-0.06012033671140671,
-0.25871190428733826,
0.022363876923918724,
0.01624239981174469,
-0.028094766661524773,
0.14088991284370422,
0.09551103413105011,
-0.1287730485200882,
0.07820693403482437,
0.000998921343125403,
-0.183598592877388,
-0.024697372689843178,
-0.02796112187206745,
-0.06271753460168839,
0.12303527444601059,
0.00036430644104257226,
0.06788218021392822,
0.03774084895849228,
0.06467381119728088,
-0.18728700280189514,
0.001241724705323577,
0.060552459210157394,
0.01991002820432186,
0.10001540929079056,
0.056589141488075256,
-0.031313855201005936,
0.13133859634399414,
-0.08424826711416245,
0.05489073321223259,
0.025151334702968597,
-0.12808622419834137,
-0.26126325130462646,
-0.10970255732536316,
0.031086960807442665,
0.09551467001438141,
0.08334353566169739,
-0.019286882132291794,
0.15294815599918365,
-0.06488027423620224,
0.10504907369613647,
0.27263572812080383,
-0.3033342659473419,
-0.05171016976237297,
0.045595161616802216,
0.013228163123130798,
0.060377441346645355,
-0.09293504059314728,
-0.02399926446378231,
0.05413489788770676,
0.045160096138715744,
0.1429183930158615,
-0.0027004037983715534,
-0.02511623315513134,
-0.0019087126711383462,
-0.14250117540359497,
-0.07364105433225632,
0.17931704223155975,
0.03428516909480095,
-0.03383367881178856,
-0.09517841786146164,
-0.0675944834947586,
-0.20747029781341553,
-0.028198545798659325,
0.014058619737625122,
0.03909992054104805,
-0.041674401611089706,
-0.09935754537582397,
0.03741353750228882,
-0.07454674690961838,
-0.039075493812561035,
-0.018075205385684967,
0.06490866839885712,
0.04652085900306702,
0.01755381189286709,
-0.03178589418530464,
0.09724763035774231,
-0.03455623239278793,
-0.16132846474647522,
0.0037479298189282417,
0.00593946548178792,
-0.00808199867606163,
-0.03548746928572655,
-0.03998066857457161,
-0.07684570550918579,
0.014109679497778416,
0.1381710320711136,
-0.09364449977874756,
0.06668062508106232,
-0.006666583940386772,
0.03617323935031891,
-0.04165424033999443,
0.14300018548965454,
-0.031221669167280197,
-0.039196353405714035,
-0.012784029357135296,
0.07528222352266312,
0.04381590336561203,
-0.02753317356109619,
-0.10624876618385315,
0.057326097041368484,
0.10818855464458466,
0.04058639705181122,
-0.036746468394994736,
0.054511938244104385,
-0.05165879800915718,
-0.02911781147122383,
0.0272970050573349,
-0.10658147186040878,
0.03305680677294731,
-0.005727611947804689,
-0.08889719843864441,
-0.04051333665847778,
-0.012938112020492554,
0.003390922211110592,
-0.04460423067212105,
0.11670494824647903,
-0.08160168677568436,
0.023699680343270302,
-0.0841912105679512,
-0.12989528477191925,
0.028230013325810432,
-0.10438722372055054,
-0.008960140869021416,
-0.06941433995962143,
-0.15547049045562744,
-0.02269802801311016,
0.06120498105883598,
-0.06857271492481232,
-0.06946265697479248,
-0.07487965375185013,
-0.08376320451498032,
0.042465370148420334,
-0.02433229237794876,
0.138554647564888,
-0.08122630417346954,
0.09492399543523788,
0.03257113695144653,
0.08185537159442902,
0.01219625398516655,
0.053119316697120667,
-0.09459764510393143,
0.03232480213046074,
-0.22062593698501587,
0.0760658010840416,
-0.044534832239151,
0.0795859545469284,
-0.11665266752243042,
-0.11618484556674957,
0.028773363679647446,
-0.03539174422621727,
0.09214713424444199,
0.1268407702445984,
-0.1912430226802826,
-0.07374312728643417,
0.20106762647628784,
-0.0881706178188324,
-0.12733972072601318,
0.12914006412029266,
-0.05568486452102661,
0.020963672548532486,
0.04997303709387779,
0.22030377388000488,
0.0132258590310812,
-0.06619121134281158,
-0.020783480256795883,
-0.04689343646168709,
0.07744970172643661,
-0.052902836352586746,
0.08229874074459076,
-0.007337320130318403,
0.08715333789587021,
0.014347528107464314,
0.03443807363510132,
0.022578751668334007,
-0.11143284291028976,
-0.07900378108024597,
-0.03549855202436447,
-0.07241262495517731,
0.012825163081288338,
0.049185190349817276,
0.07837684452533722,
-0.14081326127052307,
-0.08774495869874954,
0.008908026851713657,
0.0861697569489479,
-0.08352768421173096,
0.04624487832188606,
-0.06909306347370148,
0.11099889129400253,
-0.06278719753026962,
-0.00034085329389199615,
-0.19382749497890472,
0.014749595895409584,
0.03944957256317139,
0.019187571480870247,
-0.008854692801833153,
-0.03225889801979065,
0.05644776672124863,
0.0672122910618782,
-0.05915208160877228,
-0.02305278740823269,
-0.013876810669898987,
-0.00945417582988739,
-0.12107262760400772,
-0.23638217151165009,
-0.04785185679793358,
-0.04435846954584122,
0.10498365014791489,
-0.176314115524292,
0.04025633633136749,
0.05055408179759979,
0.11252106726169586,
0.023772336542606354,
-0.03973634913563728,
-0.00460545951500535,
0.06906706839799881,
-0.061362750828266144,
-0.06819313019514084,
0.06561906635761261,
0.017099659889936447,
-0.0800761878490448,
-0.00904841162264347,
-0.11209828406572342,
0.13416273891925812,
0.14215566217899323,
-0.014379914849996567,
-0.07797959446907043,
-0.0400732047855854,
-0.07193023711442947,
-0.030359629541635513,
-0.04994754120707512,
0.033291544765233994,
0.13199256360530853,
0.023203253746032715,
0.15598061680793762,
-0.09839808195829391,
-0.06289047002792358,
0.057420846074819565,
-0.017126668244600296,
0.006391224917024374,
0.1314973682165146,
0.06009002402424812,
-0.12431710213422775,
0.14869509637355804,
0.12587502598762512,
-0.014233122579753399,
0.1185302883386612,
-0.061093661934137344,
-0.07847945392131805,
-0.02777388133108616,
-0.005566457752138376,
0.01030904520303011,
0.10947950929403305,
-0.14417177438735962,
-0.031061502173542976,
0.03999796882271767,
0.05617692694067955,
0.01567836105823517,
-0.17570212483406067,
-0.007124630268663168,
0.03344719111919403,
-0.04787316545844078,
-0.04560607671737671,
-0.02284228429198265,
0.004227818455547094,
0.11800038069486618,
0.005832357332110405,
-0.06198959797620773,
0.020752903074026108,
0.0002620290615595877,
-0.0718337893486023,
0.19234828650951385,
-0.09336501359939575,
-0.15773937106132507,
-0.07881972938776016,
-0.0991009771823883,
-0.06453730165958405,
-0.00814135279506445,
0.08371137827634811,
-0.09895356744527817,
-0.03233583644032478,
-0.10093409568071365,
0.002524662995710969,
-0.029991062358021736,
0.03168123960494995,
0.043324198573827744,
0.012962352484464645,
0.05045382305979729,
-0.11362975090742111,
-0.020125364884734154,
-0.040214478969573975,
0.0008918482926674187,
0.0750204399228096,
0.01599774695932865,
0.10105552524328232,
0.13922399282455444,
-0.011240120977163315,
0.05168319493532181,
-0.04848644137382507,
0.20207765698432922,
-0.06343686580657959,
-0.03139430284500122,
0.10303834080696106,
0.011015428230166435,
0.07387688010931015,
0.1355835348367691,
0.044918399304151535,
-0.10764198005199432,
0.009790534153580666,
0.018470797687768936,
-0.05205294489860535,
-0.22517366707324982,
-0.0382547602057457,
-0.05631903186440468,
0.029499458149075508,
0.12404964864253998,
0.026718202978372574,
-0.005984536837786436,
0.03752150386571884,
0.01822456158697605,
0.032422635704278946,
-0.01615438610315323,
0.09237942844629288,
0.07464220374822617,
0.036158639937639236,
0.14656466245651245,
-0.04628540948033333,
-0.019978981465101242,
0.050288423895835876,
-0.01718146912753582,
0.24636387825012207,
-0.034528475254774094,
0.1597570776939392,
0.05584326386451721,
0.16479536890983582,
0.009025229141116142,
0.08763927966356277,
-0.02134997770190239,
-0.009810500778257847,
-0.028991280123591423,
-0.05003475770354271,
-0.04527236893773079,
0.028167368844151497,
-0.033007148653268814,
0.04371321573853493,
-0.14233914017677307,
0.03516094759106636,
0.0854090228676796,
0.32209962606430054,
0.085199736058712,
-0.35187843441963196,
-0.09759797155857086,
0.008699295111000538,
-0.039517972618341446,
-0.023187648504972458,
0.020709242671728134,
0.11779837310314178,
-0.08545199036598206,
0.07921327650547028,
-0.06322813034057617,
0.10837214440107346,
-0.05136217921972275,
0.036786407232284546,
0.0473417304456234,
0.06066565588116646,
-0.013728477992117405,
0.07209303975105286,
-0.30708518624305725,
0.28899139165878296,
0.018392030149698257,
0.05957576259970665,
-0.0923774391412735,
0.017143648117780685,
0.0026850122958421707,
0.024159053340554237,
0.07960312813520432,
-0.0039656939916312695,
-0.12163063883781433,
-0.143130823969841,
-0.11210592091083527,
0.008798196911811829,
0.09584887325763702,
-0.02732231840491295,
0.10918975621461868,
-0.0005942208808846772,
0.0024724307004362345,
0.03448329493403435,
-0.012539991177618504,
-0.026949169114232063,
-0.1067408099770546,
0.01221197284758091,
0.00874137319624424,
-0.03764903545379639,
-0.06522967666387558,
-0.11124996840953827,
-0.06485360860824585,
0.21355371177196503,
0.03389192372560501,
-0.06171393021941185,
-0.12091884016990662,
0.10070730000734329,
0.08661431074142456,
-0.07510244101285934,
0.025908103212714195,
0.0029307075310498476,
0.09597276896238327,
-0.00506886001676321,
-0.05563337728381157,
0.12162170559167862,
-0.056817762553691864,
-0.1918409764766693,
-0.06492508947849274,
0.1379907876253128,
0.020831160247325897,
0.07119887322187424,
-0.0195109061896801,
0.04011273756623268,
-0.015777073800563812,
-0.0810445100069046,
0.052636027336120605,
-0.004894413985311985,
0.14381149411201477,
-0.0034972357098013163,
-0.023933257907629013,
0.028880678117275238,
-0.06759252399206161,
-0.058457907289266586,
0.20209334790706635,
0.2886066734790802,
-0.1014329269528389,
0.07176490128040314,
0.04254549741744995,
-0.05271827429533005,
-0.15753675997257233,
0.02230987511575222,
0.059829145669937134,
0.007197357714176178,
0.00971455778926611,
-0.17786499857902527,
0.03637805953621864,
0.10226704180240631,
-0.017067179083824158,
0.07126431912183762,
-0.3531765043735504,
-0.12112689018249512,
0.06849798560142517,
0.1042497530579567,
0.07964833080768585,
-0.16004104912281036,
-0.037219274789094925,
-0.0217670276761055,
-0.11253982782363892,
0.10693099349737167,
-0.09691113978624344,
0.12483107298612595,
-0.029006067663431168,
0.06675292551517487,
0.012483968399465084,
-0.0716375783085823,
0.11419954895973206,
0.028830980882048607,
0.07685384154319763,
-0.05723283067345619,
0.015192286111414433,
0.09579186141490936,
-0.07801118493080139,
0.03801945596933365,
-0.10100790858268738,
0.0395209901034832,
-0.12121538817882538,
-0.006478775292634964,
-0.06998132914304733,
0.009828716516494751,
-0.04250891134142876,
-0.05700402334332466,
-0.05117117986083031,
0.027468670159578323,
0.08668804913759232,
-0.01997712254524231,
0.17492634057998657,
0.014194699935615063,
0.1586436927318573,
0.1694963425397873,
0.060269687324762344,
-0.12699128687381744,
-0.041073545813560486,
0.0008937517995946109,
-0.014475439675152302,
0.029230155050754547,
-0.18474577367305756,
0.03402821719646454,
0.1450379639863968,
0.024433722719550133,
0.13561265170574188,
0.07501205056905746,
-0.050406601279973984,
0.028158608824014664,
0.06194161996245384,
-0.16201508045196533,
-0.08362826704978943,
0.019016984850168228,
0.015313887037336826,
-0.1245117112994194,
0.05582323670387268,
0.09873337298631668,
-0.05105185881257057,
-0.01937897317111492,
-0.008666092529892921,
0.04667884483933449,
-0.026434067636728287,
0.21319027245044708,
0.0015973870176821947,
0.08558833599090576,
-0.12406037747859955,
0.10168445110321045,
0.06127422675490379,
-0.12516702711582184,
0.04600324109196663,
0.09664556384086609,
-0.10063496232032776,
-0.025734778493642807,
0.0564645417034626,
0.143109530210495,
-0.007795474026352167,
-0.03123391419649124,
-0.15345261991024017,
-0.12555287778377533,
0.1070227324962616,
0.11124735325574875,
0.08447909355163574,
0.022966429591178894,
-0.03172243759036064,
-0.021852592006325722,
-0.1395566314458847,
0.0996365025639534,
0.0713956207036972,
0.05787466838955879,
-0.1184096559882164,
0.12900805473327637,
-0.0029617438558489084,
0.03932579606771469,
-0.010611096397042274,
0.008911686018109322,
-0.11015573143959045,
0.020352834835648537,
-0.09383316338062286,
-0.017769496887922287,
-0.06696808338165283,
-0.011882016435265541,
-0.024039380252361298,
-0.016373133286833763,
-0.056233637034893036,
0.012460472993552685,
-0.11871087551116943,
-0.038187891244888306,
-0.001826642663218081,
0.038225479423999786,
-0.11793367564678192,
-0.021984172984957695,
-0.0005945826414972544,
-0.09057524800300598,
0.10011273622512817,
0.05925333499908447,
-0.0009865729371085763,
0.009503116831183434,
-0.02850765362381935,
-0.008363250643014908,
0.06886249035596848,
-0.0055760652758181095,
0.07980187237262726,
-0.13043776154518127,
-0.016374174505472183,
0.009114005602896214,
0.00978186447173357,
0.033877402544021606,
0.08168154209852219,
-0.13570496439933777,
0.006359047722071409,
-0.01543837133795023,
-0.0704135149717331,
-0.05560191348195076,
0.0575764998793602,
0.08301401883363724,
0.021680660545825958,
0.18087723851203918,
-0.07383621484041214,
0.04131067916750908,
-0.21257226169109344,
-0.012014258652925491,
0.0018502091988921165,
-0.1238124668598175,
-0.07574303448200226,
-0.027593931183218956,
0.07831709831953049,
-0.07784182578325272,
0.14541946351528168,
0.014750583097338676,
0.012970727868378162,
0.041576139628887177,
-0.05441097542643547,
-0.022552259266376495,
0.021100156009197235,
0.1664745807647705,
0.01539525855332613,
-0.041574906557798386,
0.08540167659521103,
0.04916341230273247,
0.10373528301715851,
0.15661627054214478,
0.21478168666362762,
0.12472513318061829,
0.09403238445520401,
0.08199582248926163,
0.013090026564896107,
-0.08310051262378693,
-0.1886410117149353,
0.1010751873254776,
-0.020964697003364563,
0.15339131653308868,
-0.00940002128481865,
0.15566739439964294,
0.11629936099052429,
-0.18345552682876587,
0.053638678044080734,
-0.062108367681503296,
-0.08301382511854172,
-0.11057336628437042,
-0.06543656438589096,
-0.08658221364021301,
-0.1667580008506775,
-0.004923108033835888,
-0.12870657444000244,
0.038584474474191666,
0.08083992451429367,
0.030919775366783142,
0.008959905244410038,
0.08363289386034012,
0.023197825998067856,
0.02281271666288376,
0.06618763506412506,
0.006118490360677242,
-0.03714447095990181,
-0.05541076511144638,
-0.05820423364639282,
0.009513909928500652,
-0.013712036423385143,
0.0646275207400322,
-0.017094522714614868,
-0.03266702964901924,
0.049033042043447495,
-0.019696732982993126,
-0.10339327901601791,
0.010534671135246754,
0.014843210577964783,
0.077741838991642,
0.06764236092567444,
0.02115391194820404,
-0.008487651124596596,
-0.01827094331383705,
0.21911625564098358,
-0.08267181366682053,
-0.05574587732553482,
-0.12404626607894897,
0.27340221405029297,
0.040739960968494415,
-0.03453099727630615,
0.04053065553307533,
-0.07545653730630875,
-0.020516909658908844,
0.17087097465991974,
0.23462322354316711,
-0.026195652782917023,
-0.02122514322400093,
-0.006915074773132801,
-0.012536261230707169,
-0.014906550757586956,
0.07918783277273178,
0.14076878130435944,
0.03575282171368599,
-0.08514099568128586,
-0.011057380586862564,
-0.062316715717315674,
-0.033721376210451126,
-0.03900812938809395,
0.08805117011070251,
0.03334086015820503,
0.009187313728034496,
-0.03357794135808945,
0.06043779104948044,
-0.05192376673221588,
-0.07097984850406647,
0.026212621480226517,
-0.23813489079475403,
-0.1741388887166977,
0.012991872616112232,
0.062448665499687195,
0.005783857777714729,
0.0733928233385086,
-0.006816425826400518,
0.002218420384451747,
0.07052018493413925,
-0.019468894228339195,
-0.07480581849813461,
-0.11086127161979675,
0.10008812695741653,
-0.15018858015537262,
0.1847400814294815,
-0.04338105767965317,
0.02458036318421364,
0.14340759813785553,
0.04774262756109238,
-0.1081097424030304,
0.0418110154569149,
0.05090409889817238,
-0.02254590205848217,
0.011134220287203789,
0.12826618552207947,
-0.038520123809576035,
0.08810807764530182,
0.05422309786081314,
-0.11524895578622818,
-0.006634072866290808,
-0.08708541840314865,
-0.022465838119387627,
-0.03393132612109184,
-0.03512055426836014,
-0.029881775379180908,
0.1268550455570221,
0.209943026304245,
-0.05260109528899193,
0.012102355249226093,
-0.06767775863409042,
0.014113033190369606,
0.04032096266746521,
0.006463134661316872,
-0.05424747243523598,
-0.2874477803707123,
0.017394447699189186,
0.09424180537462234,
-0.0019257799722254276,
-0.2589356005191803,
-0.08441072702407837,
0.026616526767611504,
-0.0592057965695858,
-0.11457079648971558,
0.09700646251440048,
0.056220125406980515,
0.04969387501478195,
-0.0756903812289238,
-0.052557267248630524,
-0.06885884702205658,
0.1691865622997284,
-0.14244388043880463,
-0.07906962931156158
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# mt5-base-wikinewssum-english-100
This model is a fine-tuned version of [google/mt5-base](https://huggingface.co/google/mt5-base) on an unknown dataset.
It achieves the following results on the evaluation set:
- Loss: 6.6225
- Rouge1: 3.909
- Rouge2: 0.9312
- Rougel: 3.3835
- Rougelsum: 3.7786
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 5.6e-05
- train_batch_size: 4
- eval_batch_size: 4
- seed: 42
- gradient_accumulation_steps: 2
- total_train_batch_size: 8
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- num_epochs: 8
### Training results
| Training Loss | Epoch | Step | Validation Loss | Rouge1 | Rouge2 | Rougel | Rougelsum |
|:-------------:|:-----:|:----:|:---------------:|:------:|:------:|:------:|:---------:|
| No log | 0.96 | 12 | 14.4949 | 2.7398 | 0.7181 | 2.491 | 2.6561 |
| No log | 1.96 | 24 | 10.5056 | 4.4428 | 1.4293 | 3.8469 | 4.2869 |
| No log | 2.96 | 36 | 8.9856 | 4.1179 | 1.229 | 3.5726 | 3.9693 |
| No log | 3.96 | 48 | 7.7950 | 3.9217 | 1.1339 | 3.4256 | 3.7905 |
| No log | 4.96 | 60 | 7.0734 | 3.8004 | 1.0326 | 3.3246 | 3.6766 |
| No log | 5.96 | 72 | 6.7897 | 3.6351 | 0.9162 | 3.1839 | 3.5149 |
| No log | 6.96 | 84 | 6.6610 | 3.7486 | 0.8829 | 3.2583 | 3.6193 |
| No log | 7.96 | 96 | 6.6225 | 3.909 | 0.9312 | 3.3835 | 3.7786 |
### Framework versions
- Transformers 4.13.0
- Pytorch 1.10.1
- Datasets 1.16.1
- Tokenizers 0.10.3
|
{"license": "apache-2.0", "tags": ["summarization", "generated_from_trainer"], "metrics": ["rouge"], "model-index": [{"name": "mt5-base-wikinewssum-english-100", "results": []}]}
|
summarization
|
airKlizz/mt5-base-wikinewssum-english-100
|
[
"transformers",
"pytorch",
"mt5",
"text2text-generation",
"summarization",
"generated_from_trainer",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #mt5 #text2text-generation #summarization #generated_from_trainer #license-apache-2.0 #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
mt5-base-wikinewssum-english-100
================================
This model is a fine-tuned version of google/mt5-base on an unknown dataset.
It achieves the following results on the evaluation set:
* Loss: 6.6225
* Rouge1: 3.909
* Rouge2: 0.9312
* Rougel: 3.3835
* Rougelsum: 3.7786
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 5.6e-05
* train\_batch\_size: 4
* eval\_batch\_size: 4
* seed: 42
* gradient\_accumulation\_steps: 2
* total\_train\_batch\_size: 8
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* num\_epochs: 8
### Training results
### Framework versions
* Transformers 4.13.0
* Pytorch 1.10.1
* Datasets 1.16.1
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5.6e-05\n* train\\_batch\\_size: 4\n* eval\\_batch\\_size: 4\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 8\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 8",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.13.0\n* Pytorch 1.10.1\n* Datasets 1.16.1\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #mt5 #text2text-generation #summarization #generated_from_trainer #license-apache-2.0 #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5.6e-05\n* train\\_batch\\_size: 4\n* eval\\_batch\\_size: 4\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 8\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 8",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.13.0\n* Pytorch 1.10.1\n* Datasets 1.16.1\n* Tokenizers 0.10.3"
] |
[
68,
127,
4,
30
] |
[
"passage: TAGS\n#transformers #pytorch #mt5 #text2text-generation #summarization #generated_from_trainer #license-apache-2.0 #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5.6e-05\n* train\\_batch\\_size: 4\n* eval\\_batch\\_size: 4\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 8\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 8### Training results### Framework versions\n\n\n* Transformers 4.13.0\n* Pytorch 1.10.1\n* Datasets 1.16.1\n* Tokenizers 0.10.3"
] |
[
-0.11205639690160751,
0.1200854480266571,
-0.002607157453894615,
0.09516560286283493,
0.12879973649978638,
0.002303670160472393,
0.11824613064527512,
0.171413391828537,
-0.14953720569610596,
0.06951165944337845,
0.14009463787078857,
0.14367541670799255,
0.0381598025560379,
0.19781967997550964,
-0.06012033671140671,
-0.25871190428733826,
0.022363876923918724,
0.01624239981174469,
-0.028094766661524773,
0.14088991284370422,
0.09551103413105011,
-0.1287730485200882,
0.07820693403482437,
0.000998921343125403,
-0.183598592877388,
-0.024697372689843178,
-0.02796112187206745,
-0.06271753460168839,
0.12303527444601059,
0.00036430644104257226,
0.06788218021392822,
0.03774084895849228,
0.06467381119728088,
-0.18728700280189514,
0.001241724705323577,
0.060552459210157394,
0.01991002820432186,
0.10001540929079056,
0.056589141488075256,
-0.031313855201005936,
0.13133859634399414,
-0.08424826711416245,
0.05489073321223259,
0.025151334702968597,
-0.12808622419834137,
-0.26126325130462646,
-0.10970255732536316,
0.031086960807442665,
0.09551467001438141,
0.08334353566169739,
-0.019286882132291794,
0.15294815599918365,
-0.06488027423620224,
0.10504907369613647,
0.27263572812080383,
-0.3033342659473419,
-0.05171016976237297,
0.045595161616802216,
0.013228163123130798,
0.060377441346645355,
-0.09293504059314728,
-0.02399926446378231,
0.05413489788770676,
0.045160096138715744,
0.1429183930158615,
-0.0027004037983715534,
-0.02511623315513134,
-0.0019087126711383462,
-0.14250117540359497,
-0.07364105433225632,
0.17931704223155975,
0.03428516909480095,
-0.03383367881178856,
-0.09517841786146164,
-0.0675944834947586,
-0.20747029781341553,
-0.028198545798659325,
0.014058619737625122,
0.03909992054104805,
-0.041674401611089706,
-0.09935754537582397,
0.03741353750228882,
-0.07454674690961838,
-0.039075493812561035,
-0.018075205385684967,
0.06490866839885712,
0.04652085900306702,
0.01755381189286709,
-0.03178589418530464,
0.09724763035774231,
-0.03455623239278793,
-0.16132846474647522,
0.0037479298189282417,
0.00593946548178792,
-0.00808199867606163,
-0.03548746928572655,
-0.03998066857457161,
-0.07684570550918579,
0.014109679497778416,
0.1381710320711136,
-0.09364449977874756,
0.06668062508106232,
-0.006666583940386772,
0.03617323935031891,
-0.04165424033999443,
0.14300018548965454,
-0.031221669167280197,
-0.039196353405714035,
-0.012784029357135296,
0.07528222352266312,
0.04381590336561203,
-0.02753317356109619,
-0.10624876618385315,
0.057326097041368484,
0.10818855464458466,
0.04058639705181122,
-0.036746468394994736,
0.054511938244104385,
-0.05165879800915718,
-0.02911781147122383,
0.0272970050573349,
-0.10658147186040878,
0.03305680677294731,
-0.005727611947804689,
-0.08889719843864441,
-0.04051333665847778,
-0.012938112020492554,
0.003390922211110592,
-0.04460423067212105,
0.11670494824647903,
-0.08160168677568436,
0.023699680343270302,
-0.0841912105679512,
-0.12989528477191925,
0.028230013325810432,
-0.10438722372055054,
-0.008960140869021416,
-0.06941433995962143,
-0.15547049045562744,
-0.02269802801311016,
0.06120498105883598,
-0.06857271492481232,
-0.06946265697479248,
-0.07487965375185013,
-0.08376320451498032,
0.042465370148420334,
-0.02433229237794876,
0.138554647564888,
-0.08122630417346954,
0.09492399543523788,
0.03257113695144653,
0.08185537159442902,
0.01219625398516655,
0.053119316697120667,
-0.09459764510393143,
0.03232480213046074,
-0.22062593698501587,
0.0760658010840416,
-0.044534832239151,
0.0795859545469284,
-0.11665266752243042,
-0.11618484556674957,
0.028773363679647446,
-0.03539174422621727,
0.09214713424444199,
0.1268407702445984,
-0.1912430226802826,
-0.07374312728643417,
0.20106762647628784,
-0.0881706178188324,
-0.12733972072601318,
0.12914006412029266,
-0.05568486452102661,
0.020963672548532486,
0.04997303709387779,
0.22030377388000488,
0.0132258590310812,
-0.06619121134281158,
-0.020783480256795883,
-0.04689343646168709,
0.07744970172643661,
-0.052902836352586746,
0.08229874074459076,
-0.007337320130318403,
0.08715333789587021,
0.014347528107464314,
0.03443807363510132,
0.022578751668334007,
-0.11143284291028976,
-0.07900378108024597,
-0.03549855202436447,
-0.07241262495517731,
0.012825163081288338,
0.049185190349817276,
0.07837684452533722,
-0.14081326127052307,
-0.08774495869874954,
0.008908026851713657,
0.0861697569489479,
-0.08352768421173096,
0.04624487832188606,
-0.06909306347370148,
0.11099889129400253,
-0.06278719753026962,
-0.00034085329389199615,
-0.19382749497890472,
0.014749595895409584,
0.03944957256317139,
0.019187571480870247,
-0.008854692801833153,
-0.03225889801979065,
0.05644776672124863,
0.0672122910618782,
-0.05915208160877228,
-0.02305278740823269,
-0.013876810669898987,
-0.00945417582988739,
-0.12107262760400772,
-0.23638217151165009,
-0.04785185679793358,
-0.04435846954584122,
0.10498365014791489,
-0.176314115524292,
0.04025633633136749,
0.05055408179759979,
0.11252106726169586,
0.023772336542606354,
-0.03973634913563728,
-0.00460545951500535,
0.06906706839799881,
-0.061362750828266144,
-0.06819313019514084,
0.06561906635761261,
0.017099659889936447,
-0.0800761878490448,
-0.00904841162264347,
-0.11209828406572342,
0.13416273891925812,
0.14215566217899323,
-0.014379914849996567,
-0.07797959446907043,
-0.0400732047855854,
-0.07193023711442947,
-0.030359629541635513,
-0.04994754120707512,
0.033291544765233994,
0.13199256360530853,
0.023203253746032715,
0.15598061680793762,
-0.09839808195829391,
-0.06289047002792358,
0.057420846074819565,
-0.017126668244600296,
0.006391224917024374,
0.1314973682165146,
0.06009002402424812,
-0.12431710213422775,
0.14869509637355804,
0.12587502598762512,
-0.014233122579753399,
0.1185302883386612,
-0.061093661934137344,
-0.07847945392131805,
-0.02777388133108616,
-0.005566457752138376,
0.01030904520303011,
0.10947950929403305,
-0.14417177438735962,
-0.031061502173542976,
0.03999796882271767,
0.05617692694067955,
0.01567836105823517,
-0.17570212483406067,
-0.007124630268663168,
0.03344719111919403,
-0.04787316545844078,
-0.04560607671737671,
-0.02284228429198265,
0.004227818455547094,
0.11800038069486618,
0.005832357332110405,
-0.06198959797620773,
0.020752903074026108,
0.0002620290615595877,
-0.0718337893486023,
0.19234828650951385,
-0.09336501359939575,
-0.15773937106132507,
-0.07881972938776016,
-0.0991009771823883,
-0.06453730165958405,
-0.00814135279506445,
0.08371137827634811,
-0.09895356744527817,
-0.03233583644032478,
-0.10093409568071365,
0.002524662995710969,
-0.029991062358021736,
0.03168123960494995,
0.043324198573827744,
0.012962352484464645,
0.05045382305979729,
-0.11362975090742111,
-0.020125364884734154,
-0.040214478969573975,
0.0008918482926674187,
0.0750204399228096,
0.01599774695932865,
0.10105552524328232,
0.13922399282455444,
-0.011240120977163315,
0.05168319493532181,
-0.04848644137382507,
0.20207765698432922,
-0.06343686580657959,
-0.03139430284500122,
0.10303834080696106,
0.011015428230166435,
0.07387688010931015,
0.1355835348367691,
0.044918399304151535,
-0.10764198005199432,
0.009790534153580666,
0.018470797687768936,
-0.05205294489860535,
-0.22517366707324982,
-0.0382547602057457,
-0.05631903186440468,
0.029499458149075508,
0.12404964864253998,
0.026718202978372574,
-0.005984536837786436,
0.03752150386571884,
0.01822456158697605,
0.032422635704278946,
-0.01615438610315323,
0.09237942844629288,
0.07464220374822617,
0.036158639937639236,
0.14656466245651245,
-0.04628540948033333,
-0.019978981465101242,
0.050288423895835876,
-0.01718146912753582,
0.24636387825012207,
-0.034528475254774094,
0.1597570776939392,
0.05584326386451721,
0.16479536890983582,
0.009025229141116142,
0.08763927966356277,
-0.02134997770190239,
-0.009810500778257847,
-0.028991280123591423,
-0.05003475770354271,
-0.04527236893773079,
0.028167368844151497,
-0.033007148653268814,
0.04371321573853493,
-0.14233914017677307,
0.03516094759106636,
0.0854090228676796,
0.32209962606430054,
0.085199736058712,
-0.35187843441963196,
-0.09759797155857086,
0.008699295111000538,
-0.039517972618341446,
-0.023187648504972458,
0.020709242671728134,
0.11779837310314178,
-0.08545199036598206,
0.07921327650547028,
-0.06322813034057617,
0.10837214440107346,
-0.05136217921972275,
0.036786407232284546,
0.0473417304456234,
0.06066565588116646,
-0.013728477992117405,
0.07209303975105286,
-0.30708518624305725,
0.28899139165878296,
0.018392030149698257,
0.05957576259970665,
-0.0923774391412735,
0.017143648117780685,
0.0026850122958421707,
0.024159053340554237,
0.07960312813520432,
-0.0039656939916312695,
-0.12163063883781433,
-0.143130823969841,
-0.11210592091083527,
0.008798196911811829,
0.09584887325763702,
-0.02732231840491295,
0.10918975621461868,
-0.0005942208808846772,
0.0024724307004362345,
0.03448329493403435,
-0.012539991177618504,
-0.026949169114232063,
-0.1067408099770546,
0.01221197284758091,
0.00874137319624424,
-0.03764903545379639,
-0.06522967666387558,
-0.11124996840953827,
-0.06485360860824585,
0.21355371177196503,
0.03389192372560501,
-0.06171393021941185,
-0.12091884016990662,
0.10070730000734329,
0.08661431074142456,
-0.07510244101285934,
0.025908103212714195,
0.0029307075310498476,
0.09597276896238327,
-0.00506886001676321,
-0.05563337728381157,
0.12162170559167862,
-0.056817762553691864,
-0.1918409764766693,
-0.06492508947849274,
0.1379907876253128,
0.020831160247325897,
0.07119887322187424,
-0.0195109061896801,
0.04011273756623268,
-0.015777073800563812,
-0.0810445100069046,
0.052636027336120605,
-0.004894413985311985,
0.14381149411201477,
-0.0034972357098013163,
-0.023933257907629013,
0.028880678117275238,
-0.06759252399206161,
-0.058457907289266586,
0.20209334790706635,
0.2886066734790802,
-0.1014329269528389,
0.07176490128040314,
0.04254549741744995,
-0.05271827429533005,
-0.15753675997257233,
0.02230987511575222,
0.059829145669937134,
0.007197357714176178,
0.00971455778926611,
-0.17786499857902527,
0.03637805953621864,
0.10226704180240631,
-0.017067179083824158,
0.07126431912183762,
-0.3531765043735504,
-0.12112689018249512,
0.06849798560142517,
0.1042497530579567,
0.07964833080768585,
-0.16004104912281036,
-0.037219274789094925,
-0.0217670276761055,
-0.11253982782363892,
0.10693099349737167,
-0.09691113978624344,
0.12483107298612595,
-0.029006067663431168,
0.06675292551517487,
0.012483968399465084,
-0.0716375783085823,
0.11419954895973206,
0.028830980882048607,
0.07685384154319763,
-0.05723283067345619,
0.015192286111414433,
0.09579186141490936,
-0.07801118493080139,
0.03801945596933365,
-0.10100790858268738,
0.0395209901034832,
-0.12121538817882538,
-0.006478775292634964,
-0.06998132914304733,
0.009828716516494751,
-0.04250891134142876,
-0.05700402334332466,
-0.05117117986083031,
0.027468670159578323,
0.08668804913759232,
-0.01997712254524231,
0.17492634057998657,
0.014194699935615063,
0.1586436927318573,
0.1694963425397873,
0.060269687324762344,
-0.12699128687381744,
-0.041073545813560486,
0.0008937517995946109,
-0.014475439675152302,
0.029230155050754547,
-0.18474577367305756,
0.03402821719646454,
0.1450379639863968,
0.024433722719550133,
0.13561265170574188,
0.07501205056905746,
-0.050406601279973984,
0.028158608824014664,
0.06194161996245384,
-0.16201508045196533,
-0.08362826704978943,
0.019016984850168228,
0.015313887037336826,
-0.1245117112994194,
0.05582323670387268,
0.09873337298631668,
-0.05105185881257057,
-0.01937897317111492,
-0.008666092529892921,
0.04667884483933449,
-0.026434067636728287,
0.21319027245044708,
0.0015973870176821947,
0.08558833599090576,
-0.12406037747859955,
0.10168445110321045,
0.06127422675490379,
-0.12516702711582184,
0.04600324109196663,
0.09664556384086609,
-0.10063496232032776,
-0.025734778493642807,
0.0564645417034626,
0.143109530210495,
-0.007795474026352167,
-0.03123391419649124,
-0.15345261991024017,
-0.12555287778377533,
0.1070227324962616,
0.11124735325574875,
0.08447909355163574,
0.022966429591178894,
-0.03172243759036064,
-0.021852592006325722,
-0.1395566314458847,
0.0996365025639534,
0.0713956207036972,
0.05787466838955879,
-0.1184096559882164,
0.12900805473327637,
-0.0029617438558489084,
0.03932579606771469,
-0.010611096397042274,
0.008911686018109322,
-0.11015573143959045,
0.020352834835648537,
-0.09383316338062286,
-0.017769496887922287,
-0.06696808338165283,
-0.011882016435265541,
-0.024039380252361298,
-0.016373133286833763,
-0.056233637034893036,
0.012460472993552685,
-0.11871087551116943,
-0.038187891244888306,
-0.001826642663218081,
0.038225479423999786,
-0.11793367564678192,
-0.021984172984957695,
-0.0005945826414972544,
-0.09057524800300598,
0.10011273622512817,
0.05925333499908447,
-0.0009865729371085763,
0.009503116831183434,
-0.02850765362381935,
-0.008363250643014908,
0.06886249035596848,
-0.0055760652758181095,
0.07980187237262726,
-0.13043776154518127,
-0.016374174505472183,
0.009114005602896214,
0.00978186447173357,
0.033877402544021606,
0.08168154209852219,
-0.13570496439933777,
0.006359047722071409,
-0.01543837133795023,
-0.0704135149717331,
-0.05560191348195076,
0.0575764998793602,
0.08301401883363724,
0.021680660545825958,
0.18087723851203918,
-0.07383621484041214,
0.04131067916750908,
-0.21257226169109344,
-0.012014258652925491,
0.0018502091988921165,
-0.1238124668598175,
-0.07574303448200226,
-0.027593931183218956,
0.07831709831953049,
-0.07784182578325272,
0.14541946351528168,
0.014750583097338676,
0.012970727868378162,
0.041576139628887177,
-0.05441097542643547,
-0.022552259266376495,
0.021100156009197235,
0.1664745807647705,
0.01539525855332613,
-0.041574906557798386,
0.08540167659521103,
0.04916341230273247,
0.10373528301715851,
0.15661627054214478,
0.21478168666362762,
0.12472513318061829,
0.09403238445520401,
0.08199582248926163,
0.013090026564896107,
-0.08310051262378693,
-0.1886410117149353,
0.1010751873254776,
-0.020964697003364563,
0.15339131653308868,
-0.00940002128481865,
0.15566739439964294,
0.11629936099052429,
-0.18345552682876587,
0.053638678044080734,
-0.062108367681503296,
-0.08301382511854172,
-0.11057336628437042,
-0.06543656438589096,
-0.08658221364021301,
-0.1667580008506775,
-0.004923108033835888,
-0.12870657444000244,
0.038584474474191666,
0.08083992451429367,
0.030919775366783142,
0.008959905244410038,
0.08363289386034012,
0.023197825998067856,
0.02281271666288376,
0.06618763506412506,
0.006118490360677242,
-0.03714447095990181,
-0.05541076511144638,
-0.05820423364639282,
0.009513909928500652,
-0.013712036423385143,
0.0646275207400322,
-0.017094522714614868,
-0.03266702964901924,
0.049033042043447495,
-0.019696732982993126,
-0.10339327901601791,
0.010534671135246754,
0.014843210577964783,
0.077741838991642,
0.06764236092567444,
0.02115391194820404,
-0.008487651124596596,
-0.01827094331383705,
0.21911625564098358,
-0.08267181366682053,
-0.05574587732553482,
-0.12404626607894897,
0.27340221405029297,
0.040739960968494415,
-0.03453099727630615,
0.04053065553307533,
-0.07545653730630875,
-0.020516909658908844,
0.17087097465991974,
0.23462322354316711,
-0.026195652782917023,
-0.02122514322400093,
-0.006915074773132801,
-0.012536261230707169,
-0.014906550757586956,
0.07918783277273178,
0.14076878130435944,
0.03575282171368599,
-0.08514099568128586,
-0.011057380586862564,
-0.062316715717315674,
-0.033721376210451126,
-0.03900812938809395,
0.08805117011070251,
0.03334086015820503,
0.009187313728034496,
-0.03357794135808945,
0.06043779104948044,
-0.05192376673221588,
-0.07097984850406647,
0.026212621480226517,
-0.23813489079475403,
-0.1741388887166977,
0.012991872616112232,
0.062448665499687195,
0.005783857777714729,
0.0733928233385086,
-0.006816425826400518,
0.002218420384451747,
0.07052018493413925,
-0.019468894228339195,
-0.07480581849813461,
-0.11086127161979675,
0.10008812695741653,
-0.15018858015537262,
0.1847400814294815,
-0.04338105767965317,
0.02458036318421364,
0.14340759813785553,
0.04774262756109238,
-0.1081097424030304,
0.0418110154569149,
0.05090409889817238,
-0.02254590205848217,
0.011134220287203789,
0.12826618552207947,
-0.038520123809576035,
0.08810807764530182,
0.05422309786081314,
-0.11524895578622818,
-0.006634072866290808,
-0.08708541840314865,
-0.022465838119387627,
-0.03393132612109184,
-0.03512055426836014,
-0.029881775379180908,
0.1268550455570221,
0.209943026304245,
-0.05260109528899193,
0.012102355249226093,
-0.06767775863409042,
0.014113033190369606,
0.04032096266746521,
0.006463134661316872,
-0.05424747243523598,
-0.2874477803707123,
0.017394447699189186,
0.09424180537462234,
-0.0019257799722254276,
-0.2589356005191803,
-0.08441072702407837,
0.026616526767611504,
-0.0592057965695858,
-0.11457079648971558,
0.09700646251440048,
0.056220125406980515,
0.04969387501478195,
-0.0756903812289238,
-0.052557267248630524,
-0.06885884702205658,
0.1691865622997284,
-0.14244388043880463,
-0.07906962931156158
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# mt5-base-wikinewssum-english-1000
This model is a fine-tuned version of [google/mt5-base](https://huggingface.co/google/mt5-base) on an unknown dataset.
It achieves the following results on the evaluation set:
- Loss: 2.4724
- Rouge1: 7.7389
- Rouge2: 3.1606
- Rougel: 6.3317
- Rougelsum: 7.2487
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 5.6e-05
- train_batch_size: 4
- eval_batch_size: 4
- seed: 42
- gradient_accumulation_steps: 2
- total_train_batch_size: 8
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- num_epochs: 8
### Training results
| Training Loss | Epoch | Step | Validation Loss | Rouge1 | Rouge2 | Rougel | Rougelsum |
|:-------------:|:-----:|:----:|:---------------:|:------:|:------:|:------:|:---------:|
| No log | 1.0 | 125 | 2.6981 | 7.1504 | 2.6253 | 5.8261 | 6.7427 |
| No log | 2.0 | 250 | 2.5597 | 7.4666 | 2.9362 | 6.0965 | 6.9699 |
| No log | 3.0 | 375 | 2.5145 | 7.4599 | 2.9449 | 6.0941 | 6.9734 |
| No log | 4.0 | 500 | 2.4904 | 7.5063 | 2.975 | 6.137 | 7.0027 |
| No log | 5.0 | 625 | 2.4904 | 7.6027 | 3.0582 | 6.2161 | 7.0832 |
| No log | 6.0 | 750 | 2.4801 | 7.7601 | 3.1916 | 6.3689 | 7.2686 |
| No log | 7.0 | 875 | 2.4737 | 7.7162 | 3.1332 | 6.3113 | 7.2283 |
| No log | 8.0 | 1000 | 2.4724 | 7.7389 | 3.1606 | 6.3317 | 7.2487 |
### Framework versions
- Transformers 4.13.0
- Pytorch 1.10.1
- Datasets 1.16.1
- Tokenizers 0.10.3
|
{"license": "apache-2.0", "tags": ["summarization", "generated_from_trainer"], "metrics": ["rouge"], "model-index": [{"name": "mt5-base-wikinewssum-english-1000", "results": []}]}
|
summarization
|
airKlizz/mt5-base-wikinewssum-english-1000
|
[
"transformers",
"pytorch",
"mt5",
"text2text-generation",
"summarization",
"generated_from_trainer",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #mt5 #text2text-generation #summarization #generated_from_trainer #license-apache-2.0 #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
mt5-base-wikinewssum-english-1000
=================================
This model is a fine-tuned version of google/mt5-base on an unknown dataset.
It achieves the following results on the evaluation set:
* Loss: 2.4724
* Rouge1: 7.7389
* Rouge2: 3.1606
* Rougel: 6.3317
* Rougelsum: 7.2487
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 5.6e-05
* train\_batch\_size: 4
* eval\_batch\_size: 4
* seed: 42
* gradient\_accumulation\_steps: 2
* total\_train\_batch\_size: 8
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* num\_epochs: 8
### Training results
### Framework versions
* Transformers 4.13.0
* Pytorch 1.10.1
* Datasets 1.16.1
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5.6e-05\n* train\\_batch\\_size: 4\n* eval\\_batch\\_size: 4\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 8\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 8",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.13.0\n* Pytorch 1.10.1\n* Datasets 1.16.1\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #mt5 #text2text-generation #summarization #generated_from_trainer #license-apache-2.0 #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5.6e-05\n* train\\_batch\\_size: 4\n* eval\\_batch\\_size: 4\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 8\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 8",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.13.0\n* Pytorch 1.10.1\n* Datasets 1.16.1\n* Tokenizers 0.10.3"
] |
[
68,
127,
4,
30
] |
[
"passage: TAGS\n#transformers #pytorch #mt5 #text2text-generation #summarization #generated_from_trainer #license-apache-2.0 #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5.6e-05\n* train\\_batch\\_size: 4\n* eval\\_batch\\_size: 4\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 8\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 8### Training results### Framework versions\n\n\n* Transformers 4.13.0\n* Pytorch 1.10.1\n* Datasets 1.16.1\n* Tokenizers 0.10.3"
] |
[
-0.11205639690160751,
0.1200854480266571,
-0.002607157453894615,
0.09516560286283493,
0.12879973649978638,
0.002303670160472393,
0.11824613064527512,
0.171413391828537,
-0.14953720569610596,
0.06951165944337845,
0.14009463787078857,
0.14367541670799255,
0.0381598025560379,
0.19781967997550964,
-0.06012033671140671,
-0.25871190428733826,
0.022363876923918724,
0.01624239981174469,
-0.028094766661524773,
0.14088991284370422,
0.09551103413105011,
-0.1287730485200882,
0.07820693403482437,
0.000998921343125403,
-0.183598592877388,
-0.024697372689843178,
-0.02796112187206745,
-0.06271753460168839,
0.12303527444601059,
0.00036430644104257226,
0.06788218021392822,
0.03774084895849228,
0.06467381119728088,
-0.18728700280189514,
0.001241724705323577,
0.060552459210157394,
0.01991002820432186,
0.10001540929079056,
0.056589141488075256,
-0.031313855201005936,
0.13133859634399414,
-0.08424826711416245,
0.05489073321223259,
0.025151334702968597,
-0.12808622419834137,
-0.26126325130462646,
-0.10970255732536316,
0.031086960807442665,
0.09551467001438141,
0.08334353566169739,
-0.019286882132291794,
0.15294815599918365,
-0.06488027423620224,
0.10504907369613647,
0.27263572812080383,
-0.3033342659473419,
-0.05171016976237297,
0.045595161616802216,
0.013228163123130798,
0.060377441346645355,
-0.09293504059314728,
-0.02399926446378231,
0.05413489788770676,
0.045160096138715744,
0.1429183930158615,
-0.0027004037983715534,
-0.02511623315513134,
-0.0019087126711383462,
-0.14250117540359497,
-0.07364105433225632,
0.17931704223155975,
0.03428516909480095,
-0.03383367881178856,
-0.09517841786146164,
-0.0675944834947586,
-0.20747029781341553,
-0.028198545798659325,
0.014058619737625122,
0.03909992054104805,
-0.041674401611089706,
-0.09935754537582397,
0.03741353750228882,
-0.07454674690961838,
-0.039075493812561035,
-0.018075205385684967,
0.06490866839885712,
0.04652085900306702,
0.01755381189286709,
-0.03178589418530464,
0.09724763035774231,
-0.03455623239278793,
-0.16132846474647522,
0.0037479298189282417,
0.00593946548178792,
-0.00808199867606163,
-0.03548746928572655,
-0.03998066857457161,
-0.07684570550918579,
0.014109679497778416,
0.1381710320711136,
-0.09364449977874756,
0.06668062508106232,
-0.006666583940386772,
0.03617323935031891,
-0.04165424033999443,
0.14300018548965454,
-0.031221669167280197,
-0.039196353405714035,
-0.012784029357135296,
0.07528222352266312,
0.04381590336561203,
-0.02753317356109619,
-0.10624876618385315,
0.057326097041368484,
0.10818855464458466,
0.04058639705181122,
-0.036746468394994736,
0.054511938244104385,
-0.05165879800915718,
-0.02911781147122383,
0.0272970050573349,
-0.10658147186040878,
0.03305680677294731,
-0.005727611947804689,
-0.08889719843864441,
-0.04051333665847778,
-0.012938112020492554,
0.003390922211110592,
-0.04460423067212105,
0.11670494824647903,
-0.08160168677568436,
0.023699680343270302,
-0.0841912105679512,
-0.12989528477191925,
0.028230013325810432,
-0.10438722372055054,
-0.008960140869021416,
-0.06941433995962143,
-0.15547049045562744,
-0.02269802801311016,
0.06120498105883598,
-0.06857271492481232,
-0.06946265697479248,
-0.07487965375185013,
-0.08376320451498032,
0.042465370148420334,
-0.02433229237794876,
0.138554647564888,
-0.08122630417346954,
0.09492399543523788,
0.03257113695144653,
0.08185537159442902,
0.01219625398516655,
0.053119316697120667,
-0.09459764510393143,
0.03232480213046074,
-0.22062593698501587,
0.0760658010840416,
-0.044534832239151,
0.0795859545469284,
-0.11665266752243042,
-0.11618484556674957,
0.028773363679647446,
-0.03539174422621727,
0.09214713424444199,
0.1268407702445984,
-0.1912430226802826,
-0.07374312728643417,
0.20106762647628784,
-0.0881706178188324,
-0.12733972072601318,
0.12914006412029266,
-0.05568486452102661,
0.020963672548532486,
0.04997303709387779,
0.22030377388000488,
0.0132258590310812,
-0.06619121134281158,
-0.020783480256795883,
-0.04689343646168709,
0.07744970172643661,
-0.052902836352586746,
0.08229874074459076,
-0.007337320130318403,
0.08715333789587021,
0.014347528107464314,
0.03443807363510132,
0.022578751668334007,
-0.11143284291028976,
-0.07900378108024597,
-0.03549855202436447,
-0.07241262495517731,
0.012825163081288338,
0.049185190349817276,
0.07837684452533722,
-0.14081326127052307,
-0.08774495869874954,
0.008908026851713657,
0.0861697569489479,
-0.08352768421173096,
0.04624487832188606,
-0.06909306347370148,
0.11099889129400253,
-0.06278719753026962,
-0.00034085329389199615,
-0.19382749497890472,
0.014749595895409584,
0.03944957256317139,
0.019187571480870247,
-0.008854692801833153,
-0.03225889801979065,
0.05644776672124863,
0.0672122910618782,
-0.05915208160877228,
-0.02305278740823269,
-0.013876810669898987,
-0.00945417582988739,
-0.12107262760400772,
-0.23638217151165009,
-0.04785185679793358,
-0.04435846954584122,
0.10498365014791489,
-0.176314115524292,
0.04025633633136749,
0.05055408179759979,
0.11252106726169586,
0.023772336542606354,
-0.03973634913563728,
-0.00460545951500535,
0.06906706839799881,
-0.061362750828266144,
-0.06819313019514084,
0.06561906635761261,
0.017099659889936447,
-0.0800761878490448,
-0.00904841162264347,
-0.11209828406572342,
0.13416273891925812,
0.14215566217899323,
-0.014379914849996567,
-0.07797959446907043,
-0.0400732047855854,
-0.07193023711442947,
-0.030359629541635513,
-0.04994754120707512,
0.033291544765233994,
0.13199256360530853,
0.023203253746032715,
0.15598061680793762,
-0.09839808195829391,
-0.06289047002792358,
0.057420846074819565,
-0.017126668244600296,
0.006391224917024374,
0.1314973682165146,
0.06009002402424812,
-0.12431710213422775,
0.14869509637355804,
0.12587502598762512,
-0.014233122579753399,
0.1185302883386612,
-0.061093661934137344,
-0.07847945392131805,
-0.02777388133108616,
-0.005566457752138376,
0.01030904520303011,
0.10947950929403305,
-0.14417177438735962,
-0.031061502173542976,
0.03999796882271767,
0.05617692694067955,
0.01567836105823517,
-0.17570212483406067,
-0.007124630268663168,
0.03344719111919403,
-0.04787316545844078,
-0.04560607671737671,
-0.02284228429198265,
0.004227818455547094,
0.11800038069486618,
0.005832357332110405,
-0.06198959797620773,
0.020752903074026108,
0.0002620290615595877,
-0.0718337893486023,
0.19234828650951385,
-0.09336501359939575,
-0.15773937106132507,
-0.07881972938776016,
-0.0991009771823883,
-0.06453730165958405,
-0.00814135279506445,
0.08371137827634811,
-0.09895356744527817,
-0.03233583644032478,
-0.10093409568071365,
0.002524662995710969,
-0.029991062358021736,
0.03168123960494995,
0.043324198573827744,
0.012962352484464645,
0.05045382305979729,
-0.11362975090742111,
-0.020125364884734154,
-0.040214478969573975,
0.0008918482926674187,
0.0750204399228096,
0.01599774695932865,
0.10105552524328232,
0.13922399282455444,
-0.011240120977163315,
0.05168319493532181,
-0.04848644137382507,
0.20207765698432922,
-0.06343686580657959,
-0.03139430284500122,
0.10303834080696106,
0.011015428230166435,
0.07387688010931015,
0.1355835348367691,
0.044918399304151535,
-0.10764198005199432,
0.009790534153580666,
0.018470797687768936,
-0.05205294489860535,
-0.22517366707324982,
-0.0382547602057457,
-0.05631903186440468,
0.029499458149075508,
0.12404964864253998,
0.026718202978372574,
-0.005984536837786436,
0.03752150386571884,
0.01822456158697605,
0.032422635704278946,
-0.01615438610315323,
0.09237942844629288,
0.07464220374822617,
0.036158639937639236,
0.14656466245651245,
-0.04628540948033333,
-0.019978981465101242,
0.050288423895835876,
-0.01718146912753582,
0.24636387825012207,
-0.034528475254774094,
0.1597570776939392,
0.05584326386451721,
0.16479536890983582,
0.009025229141116142,
0.08763927966356277,
-0.02134997770190239,
-0.009810500778257847,
-0.028991280123591423,
-0.05003475770354271,
-0.04527236893773079,
0.028167368844151497,
-0.033007148653268814,
0.04371321573853493,
-0.14233914017677307,
0.03516094759106636,
0.0854090228676796,
0.32209962606430054,
0.085199736058712,
-0.35187843441963196,
-0.09759797155857086,
0.008699295111000538,
-0.039517972618341446,
-0.023187648504972458,
0.020709242671728134,
0.11779837310314178,
-0.08545199036598206,
0.07921327650547028,
-0.06322813034057617,
0.10837214440107346,
-0.05136217921972275,
0.036786407232284546,
0.0473417304456234,
0.06066565588116646,
-0.013728477992117405,
0.07209303975105286,
-0.30708518624305725,
0.28899139165878296,
0.018392030149698257,
0.05957576259970665,
-0.0923774391412735,
0.017143648117780685,
0.0026850122958421707,
0.024159053340554237,
0.07960312813520432,
-0.0039656939916312695,
-0.12163063883781433,
-0.143130823969841,
-0.11210592091083527,
0.008798196911811829,
0.09584887325763702,
-0.02732231840491295,
0.10918975621461868,
-0.0005942208808846772,
0.0024724307004362345,
0.03448329493403435,
-0.012539991177618504,
-0.026949169114232063,
-0.1067408099770546,
0.01221197284758091,
0.00874137319624424,
-0.03764903545379639,
-0.06522967666387558,
-0.11124996840953827,
-0.06485360860824585,
0.21355371177196503,
0.03389192372560501,
-0.06171393021941185,
-0.12091884016990662,
0.10070730000734329,
0.08661431074142456,
-0.07510244101285934,
0.025908103212714195,
0.0029307075310498476,
0.09597276896238327,
-0.00506886001676321,
-0.05563337728381157,
0.12162170559167862,
-0.056817762553691864,
-0.1918409764766693,
-0.06492508947849274,
0.1379907876253128,
0.020831160247325897,
0.07119887322187424,
-0.0195109061896801,
0.04011273756623268,
-0.015777073800563812,
-0.0810445100069046,
0.052636027336120605,
-0.004894413985311985,
0.14381149411201477,
-0.0034972357098013163,
-0.023933257907629013,
0.028880678117275238,
-0.06759252399206161,
-0.058457907289266586,
0.20209334790706635,
0.2886066734790802,
-0.1014329269528389,
0.07176490128040314,
0.04254549741744995,
-0.05271827429533005,
-0.15753675997257233,
0.02230987511575222,
0.059829145669937134,
0.007197357714176178,
0.00971455778926611,
-0.17786499857902527,
0.03637805953621864,
0.10226704180240631,
-0.017067179083824158,
0.07126431912183762,
-0.3531765043735504,
-0.12112689018249512,
0.06849798560142517,
0.1042497530579567,
0.07964833080768585,
-0.16004104912281036,
-0.037219274789094925,
-0.0217670276761055,
-0.11253982782363892,
0.10693099349737167,
-0.09691113978624344,
0.12483107298612595,
-0.029006067663431168,
0.06675292551517487,
0.012483968399465084,
-0.0716375783085823,
0.11419954895973206,
0.028830980882048607,
0.07685384154319763,
-0.05723283067345619,
0.015192286111414433,
0.09579186141490936,
-0.07801118493080139,
0.03801945596933365,
-0.10100790858268738,
0.0395209901034832,
-0.12121538817882538,
-0.006478775292634964,
-0.06998132914304733,
0.009828716516494751,
-0.04250891134142876,
-0.05700402334332466,
-0.05117117986083031,
0.027468670159578323,
0.08668804913759232,
-0.01997712254524231,
0.17492634057998657,
0.014194699935615063,
0.1586436927318573,
0.1694963425397873,
0.060269687324762344,
-0.12699128687381744,
-0.041073545813560486,
0.0008937517995946109,
-0.014475439675152302,
0.029230155050754547,
-0.18474577367305756,
0.03402821719646454,
0.1450379639863968,
0.024433722719550133,
0.13561265170574188,
0.07501205056905746,
-0.050406601279973984,
0.028158608824014664,
0.06194161996245384,
-0.16201508045196533,
-0.08362826704978943,
0.019016984850168228,
0.015313887037336826,
-0.1245117112994194,
0.05582323670387268,
0.09873337298631668,
-0.05105185881257057,
-0.01937897317111492,
-0.008666092529892921,
0.04667884483933449,
-0.026434067636728287,
0.21319027245044708,
0.0015973870176821947,
0.08558833599090576,
-0.12406037747859955,
0.10168445110321045,
0.06127422675490379,
-0.12516702711582184,
0.04600324109196663,
0.09664556384086609,
-0.10063496232032776,
-0.025734778493642807,
0.0564645417034626,
0.143109530210495,
-0.007795474026352167,
-0.03123391419649124,
-0.15345261991024017,
-0.12555287778377533,
0.1070227324962616,
0.11124735325574875,
0.08447909355163574,
0.022966429591178894,
-0.03172243759036064,
-0.021852592006325722,
-0.1395566314458847,
0.0996365025639534,
0.0713956207036972,
0.05787466838955879,
-0.1184096559882164,
0.12900805473327637,
-0.0029617438558489084,
0.03932579606771469,
-0.010611096397042274,
0.008911686018109322,
-0.11015573143959045,
0.020352834835648537,
-0.09383316338062286,
-0.017769496887922287,
-0.06696808338165283,
-0.011882016435265541,
-0.024039380252361298,
-0.016373133286833763,
-0.056233637034893036,
0.012460472993552685,
-0.11871087551116943,
-0.038187891244888306,
-0.001826642663218081,
0.038225479423999786,
-0.11793367564678192,
-0.021984172984957695,
-0.0005945826414972544,
-0.09057524800300598,
0.10011273622512817,
0.05925333499908447,
-0.0009865729371085763,
0.009503116831183434,
-0.02850765362381935,
-0.008363250643014908,
0.06886249035596848,
-0.0055760652758181095,
0.07980187237262726,
-0.13043776154518127,
-0.016374174505472183,
0.009114005602896214,
0.00978186447173357,
0.033877402544021606,
0.08168154209852219,
-0.13570496439933777,
0.006359047722071409,
-0.01543837133795023,
-0.0704135149717331,
-0.05560191348195076,
0.0575764998793602,
0.08301401883363724,
0.021680660545825958,
0.18087723851203918,
-0.07383621484041214,
0.04131067916750908,
-0.21257226169109344,
-0.012014258652925491,
0.0018502091988921165,
-0.1238124668598175,
-0.07574303448200226,
-0.027593931183218956,
0.07831709831953049,
-0.07784182578325272,
0.14541946351528168,
0.014750583097338676,
0.012970727868378162,
0.041576139628887177,
-0.05441097542643547,
-0.022552259266376495,
0.021100156009197235,
0.1664745807647705,
0.01539525855332613,
-0.041574906557798386,
0.08540167659521103,
0.04916341230273247,
0.10373528301715851,
0.15661627054214478,
0.21478168666362762,
0.12472513318061829,
0.09403238445520401,
0.08199582248926163,
0.013090026564896107,
-0.08310051262378693,
-0.1886410117149353,
0.1010751873254776,
-0.020964697003364563,
0.15339131653308868,
-0.00940002128481865,
0.15566739439964294,
0.11629936099052429,
-0.18345552682876587,
0.053638678044080734,
-0.062108367681503296,
-0.08301382511854172,
-0.11057336628437042,
-0.06543656438589096,
-0.08658221364021301,
-0.1667580008506775,
-0.004923108033835888,
-0.12870657444000244,
0.038584474474191666,
0.08083992451429367,
0.030919775366783142,
0.008959905244410038,
0.08363289386034012,
0.023197825998067856,
0.02281271666288376,
0.06618763506412506,
0.006118490360677242,
-0.03714447095990181,
-0.05541076511144638,
-0.05820423364639282,
0.009513909928500652,
-0.013712036423385143,
0.0646275207400322,
-0.017094522714614868,
-0.03266702964901924,
0.049033042043447495,
-0.019696732982993126,
-0.10339327901601791,
0.010534671135246754,
0.014843210577964783,
0.077741838991642,
0.06764236092567444,
0.02115391194820404,
-0.008487651124596596,
-0.01827094331383705,
0.21911625564098358,
-0.08267181366682053,
-0.05574587732553482,
-0.12404626607894897,
0.27340221405029297,
0.040739960968494415,
-0.03453099727630615,
0.04053065553307533,
-0.07545653730630875,
-0.020516909658908844,
0.17087097465991974,
0.23462322354316711,
-0.026195652782917023,
-0.02122514322400093,
-0.006915074773132801,
-0.012536261230707169,
-0.014906550757586956,
0.07918783277273178,
0.14076878130435944,
0.03575282171368599,
-0.08514099568128586,
-0.011057380586862564,
-0.062316715717315674,
-0.033721376210451126,
-0.03900812938809395,
0.08805117011070251,
0.03334086015820503,
0.009187313728034496,
-0.03357794135808945,
0.06043779104948044,
-0.05192376673221588,
-0.07097984850406647,
0.026212621480226517,
-0.23813489079475403,
-0.1741388887166977,
0.012991872616112232,
0.062448665499687195,
0.005783857777714729,
0.0733928233385086,
-0.006816425826400518,
0.002218420384451747,
0.07052018493413925,
-0.019468894228339195,
-0.07480581849813461,
-0.11086127161979675,
0.10008812695741653,
-0.15018858015537262,
0.1847400814294815,
-0.04338105767965317,
0.02458036318421364,
0.14340759813785553,
0.04774262756109238,
-0.1081097424030304,
0.0418110154569149,
0.05090409889817238,
-0.02254590205848217,
0.011134220287203789,
0.12826618552207947,
-0.038520123809576035,
0.08810807764530182,
0.05422309786081314,
-0.11524895578622818,
-0.006634072866290808,
-0.08708541840314865,
-0.022465838119387627,
-0.03393132612109184,
-0.03512055426836014,
-0.029881775379180908,
0.1268550455570221,
0.209943026304245,
-0.05260109528899193,
0.012102355249226093,
-0.06767775863409042,
0.014113033190369606,
0.04032096266746521,
0.006463134661316872,
-0.05424747243523598,
-0.2874477803707123,
0.017394447699189186,
0.09424180537462234,
-0.0019257799722254276,
-0.2589356005191803,
-0.08441072702407837,
0.026616526767611504,
-0.0592057965695858,
-0.11457079648971558,
0.09700646251440048,
0.056220125406980515,
0.04969387501478195,
-0.0756903812289238,
-0.052557267248630524,
-0.06885884702205658,
0.1691865622997284,
-0.14244388043880463,
-0.07906962931156158
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# mt5-base-wikinewssum-english
This model is a fine-tuned version of [google/mt5-base](https://huggingface.co/google/mt5-base) on an unknown dataset.
It achieves the following results on the evaluation set:
- Loss: 2.3040
- Rouge1: 8.9565
- Rouge2: 3.6563
- Rougel: 7.1346
- Rougelsum: 8.3802
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 5.6e-05
- train_batch_size: 4
- eval_batch_size: 4
- seed: 42
- gradient_accumulation_steps: 2
- total_train_batch_size: 8
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- num_epochs: 8
### Training results
| Training Loss | Epoch | Step | Validation Loss | Rouge1 | Rouge2 | Rougel | Rougelsum |
|:-------------:|:-----:|:----:|:---------------:|:------:|:------:|:------:|:---------:|
| No log | 1.0 | 1010 | 2.4360 | 8.7287 | 3.5817 | 7.0093 | 8.1879 |
| No log | 2.0 | 2020 | 2.3922 | 8.7227 | 3.5385 | 6.96 | 8.1887 |
| No log | 3.0 | 3030 | 2.3422 | 8.8565 | 3.5772 | 7.0203 | 8.2957 |
| No log | 4.0 | 4040 | 2.3288 | 8.89 | 3.645 | 7.0602 | 8.3314 |
| 3.1253 | 5.0 | 5050 | 2.3209 | 8.868 | 3.6109 | 7.0537 | 8.299 |
| 3.1253 | 6.0 | 6060 | 2.3127 | 8.9488 | 3.6615 | 7.1044 | 8.3785 |
| 3.1253 | 7.0 | 7070 | 2.3056 | 8.9366 | 3.6507 | 7.1338 | 8.3615 |
| 3.1253 | 8.0 | 8080 | 2.3040 | 8.9565 | 3.6563 | 7.1346 | 8.3802 |
### Framework versions
- Transformers 4.13.0
- Pytorch 1.10.1
- Datasets 1.16.1
- Tokenizers 0.10.3
|
{"license": "apache-2.0", "tags": ["summarization", "generated_from_trainer"], "metrics": ["rouge"], "model-index": [{"name": "mt5-base-wikinewssum-english", "results": []}]}
|
summarization
|
airKlizz/mt5-base-wikinewssum-english
|
[
"transformers",
"pytorch",
"mt5",
"text2text-generation",
"summarization",
"generated_from_trainer",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #mt5 #text2text-generation #summarization #generated_from_trainer #license-apache-2.0 #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
mt5-base-wikinewssum-english
============================
This model is a fine-tuned version of google/mt5-base on an unknown dataset.
It achieves the following results on the evaluation set:
* Loss: 2.3040
* Rouge1: 8.9565
* Rouge2: 3.6563
* Rougel: 7.1346
* Rougelsum: 8.3802
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 5.6e-05
* train\_batch\_size: 4
* eval\_batch\_size: 4
* seed: 42
* gradient\_accumulation\_steps: 2
* total\_train\_batch\_size: 8
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* num\_epochs: 8
### Training results
### Framework versions
* Transformers 4.13.0
* Pytorch 1.10.1
* Datasets 1.16.1
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5.6e-05\n* train\\_batch\\_size: 4\n* eval\\_batch\\_size: 4\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 8\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 8",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.13.0\n* Pytorch 1.10.1\n* Datasets 1.16.1\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #mt5 #text2text-generation #summarization #generated_from_trainer #license-apache-2.0 #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5.6e-05\n* train\\_batch\\_size: 4\n* eval\\_batch\\_size: 4\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 8\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 8",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.13.0\n* Pytorch 1.10.1\n* Datasets 1.16.1\n* Tokenizers 0.10.3"
] |
[
68,
127,
4,
30
] |
[
"passage: TAGS\n#transformers #pytorch #mt5 #text2text-generation #summarization #generated_from_trainer #license-apache-2.0 #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5.6e-05\n* train\\_batch\\_size: 4\n* eval\\_batch\\_size: 4\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 8\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 8### Training results### Framework versions\n\n\n* Transformers 4.13.0\n* Pytorch 1.10.1\n* Datasets 1.16.1\n* Tokenizers 0.10.3"
] |
[
-0.11205639690160751,
0.1200854480266571,
-0.002607157453894615,
0.09516560286283493,
0.12879973649978638,
0.002303670160472393,
0.11824613064527512,
0.171413391828537,
-0.14953720569610596,
0.06951165944337845,
0.14009463787078857,
0.14367541670799255,
0.0381598025560379,
0.19781967997550964,
-0.06012033671140671,
-0.25871190428733826,
0.022363876923918724,
0.01624239981174469,
-0.028094766661524773,
0.14088991284370422,
0.09551103413105011,
-0.1287730485200882,
0.07820693403482437,
0.000998921343125403,
-0.183598592877388,
-0.024697372689843178,
-0.02796112187206745,
-0.06271753460168839,
0.12303527444601059,
0.00036430644104257226,
0.06788218021392822,
0.03774084895849228,
0.06467381119728088,
-0.18728700280189514,
0.001241724705323577,
0.060552459210157394,
0.01991002820432186,
0.10001540929079056,
0.056589141488075256,
-0.031313855201005936,
0.13133859634399414,
-0.08424826711416245,
0.05489073321223259,
0.025151334702968597,
-0.12808622419834137,
-0.26126325130462646,
-0.10970255732536316,
0.031086960807442665,
0.09551467001438141,
0.08334353566169739,
-0.019286882132291794,
0.15294815599918365,
-0.06488027423620224,
0.10504907369613647,
0.27263572812080383,
-0.3033342659473419,
-0.05171016976237297,
0.045595161616802216,
0.013228163123130798,
0.060377441346645355,
-0.09293504059314728,
-0.02399926446378231,
0.05413489788770676,
0.045160096138715744,
0.1429183930158615,
-0.0027004037983715534,
-0.02511623315513134,
-0.0019087126711383462,
-0.14250117540359497,
-0.07364105433225632,
0.17931704223155975,
0.03428516909480095,
-0.03383367881178856,
-0.09517841786146164,
-0.0675944834947586,
-0.20747029781341553,
-0.028198545798659325,
0.014058619737625122,
0.03909992054104805,
-0.041674401611089706,
-0.09935754537582397,
0.03741353750228882,
-0.07454674690961838,
-0.039075493812561035,
-0.018075205385684967,
0.06490866839885712,
0.04652085900306702,
0.01755381189286709,
-0.03178589418530464,
0.09724763035774231,
-0.03455623239278793,
-0.16132846474647522,
0.0037479298189282417,
0.00593946548178792,
-0.00808199867606163,
-0.03548746928572655,
-0.03998066857457161,
-0.07684570550918579,
0.014109679497778416,
0.1381710320711136,
-0.09364449977874756,
0.06668062508106232,
-0.006666583940386772,
0.03617323935031891,
-0.04165424033999443,
0.14300018548965454,
-0.031221669167280197,
-0.039196353405714035,
-0.012784029357135296,
0.07528222352266312,
0.04381590336561203,
-0.02753317356109619,
-0.10624876618385315,
0.057326097041368484,
0.10818855464458466,
0.04058639705181122,
-0.036746468394994736,
0.054511938244104385,
-0.05165879800915718,
-0.02911781147122383,
0.0272970050573349,
-0.10658147186040878,
0.03305680677294731,
-0.005727611947804689,
-0.08889719843864441,
-0.04051333665847778,
-0.012938112020492554,
0.003390922211110592,
-0.04460423067212105,
0.11670494824647903,
-0.08160168677568436,
0.023699680343270302,
-0.0841912105679512,
-0.12989528477191925,
0.028230013325810432,
-0.10438722372055054,
-0.008960140869021416,
-0.06941433995962143,
-0.15547049045562744,
-0.02269802801311016,
0.06120498105883598,
-0.06857271492481232,
-0.06946265697479248,
-0.07487965375185013,
-0.08376320451498032,
0.042465370148420334,
-0.02433229237794876,
0.138554647564888,
-0.08122630417346954,
0.09492399543523788,
0.03257113695144653,
0.08185537159442902,
0.01219625398516655,
0.053119316697120667,
-0.09459764510393143,
0.03232480213046074,
-0.22062593698501587,
0.0760658010840416,
-0.044534832239151,
0.0795859545469284,
-0.11665266752243042,
-0.11618484556674957,
0.028773363679647446,
-0.03539174422621727,
0.09214713424444199,
0.1268407702445984,
-0.1912430226802826,
-0.07374312728643417,
0.20106762647628784,
-0.0881706178188324,
-0.12733972072601318,
0.12914006412029266,
-0.05568486452102661,
0.020963672548532486,
0.04997303709387779,
0.22030377388000488,
0.0132258590310812,
-0.06619121134281158,
-0.020783480256795883,
-0.04689343646168709,
0.07744970172643661,
-0.052902836352586746,
0.08229874074459076,
-0.007337320130318403,
0.08715333789587021,
0.014347528107464314,
0.03443807363510132,
0.022578751668334007,
-0.11143284291028976,
-0.07900378108024597,
-0.03549855202436447,
-0.07241262495517731,
0.012825163081288338,
0.049185190349817276,
0.07837684452533722,
-0.14081326127052307,
-0.08774495869874954,
0.008908026851713657,
0.0861697569489479,
-0.08352768421173096,
0.04624487832188606,
-0.06909306347370148,
0.11099889129400253,
-0.06278719753026962,
-0.00034085329389199615,
-0.19382749497890472,
0.014749595895409584,
0.03944957256317139,
0.019187571480870247,
-0.008854692801833153,
-0.03225889801979065,
0.05644776672124863,
0.0672122910618782,
-0.05915208160877228,
-0.02305278740823269,
-0.013876810669898987,
-0.00945417582988739,
-0.12107262760400772,
-0.23638217151165009,
-0.04785185679793358,
-0.04435846954584122,
0.10498365014791489,
-0.176314115524292,
0.04025633633136749,
0.05055408179759979,
0.11252106726169586,
0.023772336542606354,
-0.03973634913563728,
-0.00460545951500535,
0.06906706839799881,
-0.061362750828266144,
-0.06819313019514084,
0.06561906635761261,
0.017099659889936447,
-0.0800761878490448,
-0.00904841162264347,
-0.11209828406572342,
0.13416273891925812,
0.14215566217899323,
-0.014379914849996567,
-0.07797959446907043,
-0.0400732047855854,
-0.07193023711442947,
-0.030359629541635513,
-0.04994754120707512,
0.033291544765233994,
0.13199256360530853,
0.023203253746032715,
0.15598061680793762,
-0.09839808195829391,
-0.06289047002792358,
0.057420846074819565,
-0.017126668244600296,
0.006391224917024374,
0.1314973682165146,
0.06009002402424812,
-0.12431710213422775,
0.14869509637355804,
0.12587502598762512,
-0.014233122579753399,
0.1185302883386612,
-0.061093661934137344,
-0.07847945392131805,
-0.02777388133108616,
-0.005566457752138376,
0.01030904520303011,
0.10947950929403305,
-0.14417177438735962,
-0.031061502173542976,
0.03999796882271767,
0.05617692694067955,
0.01567836105823517,
-0.17570212483406067,
-0.007124630268663168,
0.03344719111919403,
-0.04787316545844078,
-0.04560607671737671,
-0.02284228429198265,
0.004227818455547094,
0.11800038069486618,
0.005832357332110405,
-0.06198959797620773,
0.020752903074026108,
0.0002620290615595877,
-0.0718337893486023,
0.19234828650951385,
-0.09336501359939575,
-0.15773937106132507,
-0.07881972938776016,
-0.0991009771823883,
-0.06453730165958405,
-0.00814135279506445,
0.08371137827634811,
-0.09895356744527817,
-0.03233583644032478,
-0.10093409568071365,
0.002524662995710969,
-0.029991062358021736,
0.03168123960494995,
0.043324198573827744,
0.012962352484464645,
0.05045382305979729,
-0.11362975090742111,
-0.020125364884734154,
-0.040214478969573975,
0.0008918482926674187,
0.0750204399228096,
0.01599774695932865,
0.10105552524328232,
0.13922399282455444,
-0.011240120977163315,
0.05168319493532181,
-0.04848644137382507,
0.20207765698432922,
-0.06343686580657959,
-0.03139430284500122,
0.10303834080696106,
0.011015428230166435,
0.07387688010931015,
0.1355835348367691,
0.044918399304151535,
-0.10764198005199432,
0.009790534153580666,
0.018470797687768936,
-0.05205294489860535,
-0.22517366707324982,
-0.0382547602057457,
-0.05631903186440468,
0.029499458149075508,
0.12404964864253998,
0.026718202978372574,
-0.005984536837786436,
0.03752150386571884,
0.01822456158697605,
0.032422635704278946,
-0.01615438610315323,
0.09237942844629288,
0.07464220374822617,
0.036158639937639236,
0.14656466245651245,
-0.04628540948033333,
-0.019978981465101242,
0.050288423895835876,
-0.01718146912753582,
0.24636387825012207,
-0.034528475254774094,
0.1597570776939392,
0.05584326386451721,
0.16479536890983582,
0.009025229141116142,
0.08763927966356277,
-0.02134997770190239,
-0.009810500778257847,
-0.028991280123591423,
-0.05003475770354271,
-0.04527236893773079,
0.028167368844151497,
-0.033007148653268814,
0.04371321573853493,
-0.14233914017677307,
0.03516094759106636,
0.0854090228676796,
0.32209962606430054,
0.085199736058712,
-0.35187843441963196,
-0.09759797155857086,
0.008699295111000538,
-0.039517972618341446,
-0.023187648504972458,
0.020709242671728134,
0.11779837310314178,
-0.08545199036598206,
0.07921327650547028,
-0.06322813034057617,
0.10837214440107346,
-0.05136217921972275,
0.036786407232284546,
0.0473417304456234,
0.06066565588116646,
-0.013728477992117405,
0.07209303975105286,
-0.30708518624305725,
0.28899139165878296,
0.018392030149698257,
0.05957576259970665,
-0.0923774391412735,
0.017143648117780685,
0.0026850122958421707,
0.024159053340554237,
0.07960312813520432,
-0.0039656939916312695,
-0.12163063883781433,
-0.143130823969841,
-0.11210592091083527,
0.008798196911811829,
0.09584887325763702,
-0.02732231840491295,
0.10918975621461868,
-0.0005942208808846772,
0.0024724307004362345,
0.03448329493403435,
-0.012539991177618504,
-0.026949169114232063,
-0.1067408099770546,
0.01221197284758091,
0.00874137319624424,
-0.03764903545379639,
-0.06522967666387558,
-0.11124996840953827,
-0.06485360860824585,
0.21355371177196503,
0.03389192372560501,
-0.06171393021941185,
-0.12091884016990662,
0.10070730000734329,
0.08661431074142456,
-0.07510244101285934,
0.025908103212714195,
0.0029307075310498476,
0.09597276896238327,
-0.00506886001676321,
-0.05563337728381157,
0.12162170559167862,
-0.056817762553691864,
-0.1918409764766693,
-0.06492508947849274,
0.1379907876253128,
0.020831160247325897,
0.07119887322187424,
-0.0195109061896801,
0.04011273756623268,
-0.015777073800563812,
-0.0810445100069046,
0.052636027336120605,
-0.004894413985311985,
0.14381149411201477,
-0.0034972357098013163,
-0.023933257907629013,
0.028880678117275238,
-0.06759252399206161,
-0.058457907289266586,
0.20209334790706635,
0.2886066734790802,
-0.1014329269528389,
0.07176490128040314,
0.04254549741744995,
-0.05271827429533005,
-0.15753675997257233,
0.02230987511575222,
0.059829145669937134,
0.007197357714176178,
0.00971455778926611,
-0.17786499857902527,
0.03637805953621864,
0.10226704180240631,
-0.017067179083824158,
0.07126431912183762,
-0.3531765043735504,
-0.12112689018249512,
0.06849798560142517,
0.1042497530579567,
0.07964833080768585,
-0.16004104912281036,
-0.037219274789094925,
-0.0217670276761055,
-0.11253982782363892,
0.10693099349737167,
-0.09691113978624344,
0.12483107298612595,
-0.029006067663431168,
0.06675292551517487,
0.012483968399465084,
-0.0716375783085823,
0.11419954895973206,
0.028830980882048607,
0.07685384154319763,
-0.05723283067345619,
0.015192286111414433,
0.09579186141490936,
-0.07801118493080139,
0.03801945596933365,
-0.10100790858268738,
0.0395209901034832,
-0.12121538817882538,
-0.006478775292634964,
-0.06998132914304733,
0.009828716516494751,
-0.04250891134142876,
-0.05700402334332466,
-0.05117117986083031,
0.027468670159578323,
0.08668804913759232,
-0.01997712254524231,
0.17492634057998657,
0.014194699935615063,
0.1586436927318573,
0.1694963425397873,
0.060269687324762344,
-0.12699128687381744,
-0.041073545813560486,
0.0008937517995946109,
-0.014475439675152302,
0.029230155050754547,
-0.18474577367305756,
0.03402821719646454,
0.1450379639863968,
0.024433722719550133,
0.13561265170574188,
0.07501205056905746,
-0.050406601279973984,
0.028158608824014664,
0.06194161996245384,
-0.16201508045196533,
-0.08362826704978943,
0.019016984850168228,
0.015313887037336826,
-0.1245117112994194,
0.05582323670387268,
0.09873337298631668,
-0.05105185881257057,
-0.01937897317111492,
-0.008666092529892921,
0.04667884483933449,
-0.026434067636728287,
0.21319027245044708,
0.0015973870176821947,
0.08558833599090576,
-0.12406037747859955,
0.10168445110321045,
0.06127422675490379,
-0.12516702711582184,
0.04600324109196663,
0.09664556384086609,
-0.10063496232032776,
-0.025734778493642807,
0.0564645417034626,
0.143109530210495,
-0.007795474026352167,
-0.03123391419649124,
-0.15345261991024017,
-0.12555287778377533,
0.1070227324962616,
0.11124735325574875,
0.08447909355163574,
0.022966429591178894,
-0.03172243759036064,
-0.021852592006325722,
-0.1395566314458847,
0.0996365025639534,
0.0713956207036972,
0.05787466838955879,
-0.1184096559882164,
0.12900805473327637,
-0.0029617438558489084,
0.03932579606771469,
-0.010611096397042274,
0.008911686018109322,
-0.11015573143959045,
0.020352834835648537,
-0.09383316338062286,
-0.017769496887922287,
-0.06696808338165283,
-0.011882016435265541,
-0.024039380252361298,
-0.016373133286833763,
-0.056233637034893036,
0.012460472993552685,
-0.11871087551116943,
-0.038187891244888306,
-0.001826642663218081,
0.038225479423999786,
-0.11793367564678192,
-0.021984172984957695,
-0.0005945826414972544,
-0.09057524800300598,
0.10011273622512817,
0.05925333499908447,
-0.0009865729371085763,
0.009503116831183434,
-0.02850765362381935,
-0.008363250643014908,
0.06886249035596848,
-0.0055760652758181095,
0.07980187237262726,
-0.13043776154518127,
-0.016374174505472183,
0.009114005602896214,
0.00978186447173357,
0.033877402544021606,
0.08168154209852219,
-0.13570496439933777,
0.006359047722071409,
-0.01543837133795023,
-0.0704135149717331,
-0.05560191348195076,
0.0575764998793602,
0.08301401883363724,
0.021680660545825958,
0.18087723851203918,
-0.07383621484041214,
0.04131067916750908,
-0.21257226169109344,
-0.012014258652925491,
0.0018502091988921165,
-0.1238124668598175,
-0.07574303448200226,
-0.027593931183218956,
0.07831709831953049,
-0.07784182578325272,
0.14541946351528168,
0.014750583097338676,
0.012970727868378162,
0.041576139628887177,
-0.05441097542643547,
-0.022552259266376495,
0.021100156009197235,
0.1664745807647705,
0.01539525855332613,
-0.041574906557798386,
0.08540167659521103,
0.04916341230273247,
0.10373528301715851,
0.15661627054214478,
0.21478168666362762,
0.12472513318061829,
0.09403238445520401,
0.08199582248926163,
0.013090026564896107,
-0.08310051262378693,
-0.1886410117149353,
0.1010751873254776,
-0.020964697003364563,
0.15339131653308868,
-0.00940002128481865,
0.15566739439964294,
0.11629936099052429,
-0.18345552682876587,
0.053638678044080734,
-0.062108367681503296,
-0.08301382511854172,
-0.11057336628437042,
-0.06543656438589096,
-0.08658221364021301,
-0.1667580008506775,
-0.004923108033835888,
-0.12870657444000244,
0.038584474474191666,
0.08083992451429367,
0.030919775366783142,
0.008959905244410038,
0.08363289386034012,
0.023197825998067856,
0.02281271666288376,
0.06618763506412506,
0.006118490360677242,
-0.03714447095990181,
-0.05541076511144638,
-0.05820423364639282,
0.009513909928500652,
-0.013712036423385143,
0.0646275207400322,
-0.017094522714614868,
-0.03266702964901924,
0.049033042043447495,
-0.019696732982993126,
-0.10339327901601791,
0.010534671135246754,
0.014843210577964783,
0.077741838991642,
0.06764236092567444,
0.02115391194820404,
-0.008487651124596596,
-0.01827094331383705,
0.21911625564098358,
-0.08267181366682053,
-0.05574587732553482,
-0.12404626607894897,
0.27340221405029297,
0.040739960968494415,
-0.03453099727630615,
0.04053065553307533,
-0.07545653730630875,
-0.020516909658908844,
0.17087097465991974,
0.23462322354316711,
-0.026195652782917023,
-0.02122514322400093,
-0.006915074773132801,
-0.012536261230707169,
-0.014906550757586956,
0.07918783277273178,
0.14076878130435944,
0.03575282171368599,
-0.08514099568128586,
-0.011057380586862564,
-0.062316715717315674,
-0.033721376210451126,
-0.03900812938809395,
0.08805117011070251,
0.03334086015820503,
0.009187313728034496,
-0.03357794135808945,
0.06043779104948044,
-0.05192376673221588,
-0.07097984850406647,
0.026212621480226517,
-0.23813489079475403,
-0.1741388887166977,
0.012991872616112232,
0.062448665499687195,
0.005783857777714729,
0.0733928233385086,
-0.006816425826400518,
0.002218420384451747,
0.07052018493413925,
-0.019468894228339195,
-0.07480581849813461,
-0.11086127161979675,
0.10008812695741653,
-0.15018858015537262,
0.1847400814294815,
-0.04338105767965317,
0.02458036318421364,
0.14340759813785553,
0.04774262756109238,
-0.1081097424030304,
0.0418110154569149,
0.05090409889817238,
-0.02254590205848217,
0.011134220287203789,
0.12826618552207947,
-0.038520123809576035,
0.08810807764530182,
0.05422309786081314,
-0.11524895578622818,
-0.006634072866290808,
-0.08708541840314865,
-0.022465838119387627,
-0.03393132612109184,
-0.03512055426836014,
-0.029881775379180908,
0.1268550455570221,
0.209943026304245,
-0.05260109528899193,
0.012102355249226093,
-0.06767775863409042,
0.014113033190369606,
0.04032096266746521,
0.006463134661316872,
-0.05424747243523598,
-0.2874477803707123,
0.017394447699189186,
0.09424180537462234,
-0.0019257799722254276,
-0.2589356005191803,
-0.08441072702407837,
0.026616526767611504,
-0.0592057965695858,
-0.11457079648971558,
0.09700646251440048,
0.056220125406980515,
0.04969387501478195,
-0.0756903812289238,
-0.052557267248630524,
-0.06885884702205658,
0.1691865622997284,
-0.14244388043880463,
-0.07906962931156158
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# mt5-base-wikinewssum-french
This model is a fine-tuned version of [google/mt5-base](https://huggingface.co/google/mt5-base) on an unknown dataset.
It achieves the following results on the evaluation set:
- Loss: 2.0917
- Rouge1: 12.0984
- Rouge2: 5.7289
- Rougel: 9.9245
- Rougelsum: 11.0697
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 5.6e-05
- train_batch_size: 4
- eval_batch_size: 4
- seed: 42
- gradient_accumulation_steps: 2
- total_train_batch_size: 8
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- num_epochs: 8
### Training results
| Training Loss | Epoch | Step | Validation Loss | Rouge1 | Rouge2 | Rougel | Rougelsum |
|:-------------:|:-----:|:----:|:---------------:|:-------:|:------:|:------:|:---------:|
| No log | 1.0 | 549 | 2.3203 | 11.5172 | 4.9352 | 9.3617 | 10.4605 |
| No log | 2.0 | 1098 | 2.2057 | 11.8469 | 5.2369 | 9.6452 | 10.8337 |
| No log | 3.0 | 1647 | 2.1525 | 11.9096 | 5.4027 | 9.7648 | 10.9315 |
| 3.1825 | 4.0 | 2196 | 2.1307 | 12.0782 | 5.5848 | 9.9614 | 11.1081 |
| 3.1825 | 5.0 | 2745 | 2.1172 | 11.9821 | 5.6042 | 9.8216 | 11.0077 |
| 3.1825 | 6.0 | 3294 | 2.1012 | 12.0845 | 5.6834 | 9.9119 | 11.0741 |
| 3.1825 | 7.0 | 3843 | 2.0964 | 12.1296 | 5.7271 | 9.9495 | 11.1227 |
| 2.3376 | 8.0 | 4392 | 2.0917 | 12.0984 | 5.7289 | 9.9245 | 11.0697 |
### Framework versions
- Transformers 4.13.0
- Pytorch 1.10.1
- Datasets 1.16.1
- Tokenizers 0.10.3
|
{"license": "apache-2.0", "tags": ["summarization", "generated_from_trainer"], "metrics": ["rouge"], "model-index": [{"name": "mt5-base-wikinewssum-french", "results": []}]}
|
summarization
|
airKlizz/mt5-base-wikinewssum-french
|
[
"transformers",
"pytorch",
"mt5",
"text2text-generation",
"summarization",
"generated_from_trainer",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #mt5 #text2text-generation #summarization #generated_from_trainer #license-apache-2.0 #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
mt5-base-wikinewssum-french
===========================
This model is a fine-tuned version of google/mt5-base on an unknown dataset.
It achieves the following results on the evaluation set:
* Loss: 2.0917
* Rouge1: 12.0984
* Rouge2: 5.7289
* Rougel: 9.9245
* Rougelsum: 11.0697
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 5.6e-05
* train\_batch\_size: 4
* eval\_batch\_size: 4
* seed: 42
* gradient\_accumulation\_steps: 2
* total\_train\_batch\_size: 8
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* num\_epochs: 8
### Training results
### Framework versions
* Transformers 4.13.0
* Pytorch 1.10.1
* Datasets 1.16.1
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5.6e-05\n* train\\_batch\\_size: 4\n* eval\\_batch\\_size: 4\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 8\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 8",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.13.0\n* Pytorch 1.10.1\n* Datasets 1.16.1\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #mt5 #text2text-generation #summarization #generated_from_trainer #license-apache-2.0 #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5.6e-05\n* train\\_batch\\_size: 4\n* eval\\_batch\\_size: 4\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 8\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 8",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.13.0\n* Pytorch 1.10.1\n* Datasets 1.16.1\n* Tokenizers 0.10.3"
] |
[
68,
127,
4,
30
] |
[
"passage: TAGS\n#transformers #pytorch #mt5 #text2text-generation #summarization #generated_from_trainer #license-apache-2.0 #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5.6e-05\n* train\\_batch\\_size: 4\n* eval\\_batch\\_size: 4\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 8\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 8### Training results### Framework versions\n\n\n* Transformers 4.13.0\n* Pytorch 1.10.1\n* Datasets 1.16.1\n* Tokenizers 0.10.3"
] |
[
-0.11205639690160751,
0.1200854480266571,
-0.002607157453894615,
0.09516560286283493,
0.12879973649978638,
0.002303670160472393,
0.11824613064527512,
0.171413391828537,
-0.14953720569610596,
0.06951165944337845,
0.14009463787078857,
0.14367541670799255,
0.0381598025560379,
0.19781967997550964,
-0.06012033671140671,
-0.25871190428733826,
0.022363876923918724,
0.01624239981174469,
-0.028094766661524773,
0.14088991284370422,
0.09551103413105011,
-0.1287730485200882,
0.07820693403482437,
0.000998921343125403,
-0.183598592877388,
-0.024697372689843178,
-0.02796112187206745,
-0.06271753460168839,
0.12303527444601059,
0.00036430644104257226,
0.06788218021392822,
0.03774084895849228,
0.06467381119728088,
-0.18728700280189514,
0.001241724705323577,
0.060552459210157394,
0.01991002820432186,
0.10001540929079056,
0.056589141488075256,
-0.031313855201005936,
0.13133859634399414,
-0.08424826711416245,
0.05489073321223259,
0.025151334702968597,
-0.12808622419834137,
-0.26126325130462646,
-0.10970255732536316,
0.031086960807442665,
0.09551467001438141,
0.08334353566169739,
-0.019286882132291794,
0.15294815599918365,
-0.06488027423620224,
0.10504907369613647,
0.27263572812080383,
-0.3033342659473419,
-0.05171016976237297,
0.045595161616802216,
0.013228163123130798,
0.060377441346645355,
-0.09293504059314728,
-0.02399926446378231,
0.05413489788770676,
0.045160096138715744,
0.1429183930158615,
-0.0027004037983715534,
-0.02511623315513134,
-0.0019087126711383462,
-0.14250117540359497,
-0.07364105433225632,
0.17931704223155975,
0.03428516909480095,
-0.03383367881178856,
-0.09517841786146164,
-0.0675944834947586,
-0.20747029781341553,
-0.028198545798659325,
0.014058619737625122,
0.03909992054104805,
-0.041674401611089706,
-0.09935754537582397,
0.03741353750228882,
-0.07454674690961838,
-0.039075493812561035,
-0.018075205385684967,
0.06490866839885712,
0.04652085900306702,
0.01755381189286709,
-0.03178589418530464,
0.09724763035774231,
-0.03455623239278793,
-0.16132846474647522,
0.0037479298189282417,
0.00593946548178792,
-0.00808199867606163,
-0.03548746928572655,
-0.03998066857457161,
-0.07684570550918579,
0.014109679497778416,
0.1381710320711136,
-0.09364449977874756,
0.06668062508106232,
-0.006666583940386772,
0.03617323935031891,
-0.04165424033999443,
0.14300018548965454,
-0.031221669167280197,
-0.039196353405714035,
-0.012784029357135296,
0.07528222352266312,
0.04381590336561203,
-0.02753317356109619,
-0.10624876618385315,
0.057326097041368484,
0.10818855464458466,
0.04058639705181122,
-0.036746468394994736,
0.054511938244104385,
-0.05165879800915718,
-0.02911781147122383,
0.0272970050573349,
-0.10658147186040878,
0.03305680677294731,
-0.005727611947804689,
-0.08889719843864441,
-0.04051333665847778,
-0.012938112020492554,
0.003390922211110592,
-0.04460423067212105,
0.11670494824647903,
-0.08160168677568436,
0.023699680343270302,
-0.0841912105679512,
-0.12989528477191925,
0.028230013325810432,
-0.10438722372055054,
-0.008960140869021416,
-0.06941433995962143,
-0.15547049045562744,
-0.02269802801311016,
0.06120498105883598,
-0.06857271492481232,
-0.06946265697479248,
-0.07487965375185013,
-0.08376320451498032,
0.042465370148420334,
-0.02433229237794876,
0.138554647564888,
-0.08122630417346954,
0.09492399543523788,
0.03257113695144653,
0.08185537159442902,
0.01219625398516655,
0.053119316697120667,
-0.09459764510393143,
0.03232480213046074,
-0.22062593698501587,
0.0760658010840416,
-0.044534832239151,
0.0795859545469284,
-0.11665266752243042,
-0.11618484556674957,
0.028773363679647446,
-0.03539174422621727,
0.09214713424444199,
0.1268407702445984,
-0.1912430226802826,
-0.07374312728643417,
0.20106762647628784,
-0.0881706178188324,
-0.12733972072601318,
0.12914006412029266,
-0.05568486452102661,
0.020963672548532486,
0.04997303709387779,
0.22030377388000488,
0.0132258590310812,
-0.06619121134281158,
-0.020783480256795883,
-0.04689343646168709,
0.07744970172643661,
-0.052902836352586746,
0.08229874074459076,
-0.007337320130318403,
0.08715333789587021,
0.014347528107464314,
0.03443807363510132,
0.022578751668334007,
-0.11143284291028976,
-0.07900378108024597,
-0.03549855202436447,
-0.07241262495517731,
0.012825163081288338,
0.049185190349817276,
0.07837684452533722,
-0.14081326127052307,
-0.08774495869874954,
0.008908026851713657,
0.0861697569489479,
-0.08352768421173096,
0.04624487832188606,
-0.06909306347370148,
0.11099889129400253,
-0.06278719753026962,
-0.00034085329389199615,
-0.19382749497890472,
0.014749595895409584,
0.03944957256317139,
0.019187571480870247,
-0.008854692801833153,
-0.03225889801979065,
0.05644776672124863,
0.0672122910618782,
-0.05915208160877228,
-0.02305278740823269,
-0.013876810669898987,
-0.00945417582988739,
-0.12107262760400772,
-0.23638217151165009,
-0.04785185679793358,
-0.04435846954584122,
0.10498365014791489,
-0.176314115524292,
0.04025633633136749,
0.05055408179759979,
0.11252106726169586,
0.023772336542606354,
-0.03973634913563728,
-0.00460545951500535,
0.06906706839799881,
-0.061362750828266144,
-0.06819313019514084,
0.06561906635761261,
0.017099659889936447,
-0.0800761878490448,
-0.00904841162264347,
-0.11209828406572342,
0.13416273891925812,
0.14215566217899323,
-0.014379914849996567,
-0.07797959446907043,
-0.0400732047855854,
-0.07193023711442947,
-0.030359629541635513,
-0.04994754120707512,
0.033291544765233994,
0.13199256360530853,
0.023203253746032715,
0.15598061680793762,
-0.09839808195829391,
-0.06289047002792358,
0.057420846074819565,
-0.017126668244600296,
0.006391224917024374,
0.1314973682165146,
0.06009002402424812,
-0.12431710213422775,
0.14869509637355804,
0.12587502598762512,
-0.014233122579753399,
0.1185302883386612,
-0.061093661934137344,
-0.07847945392131805,
-0.02777388133108616,
-0.005566457752138376,
0.01030904520303011,
0.10947950929403305,
-0.14417177438735962,
-0.031061502173542976,
0.03999796882271767,
0.05617692694067955,
0.01567836105823517,
-0.17570212483406067,
-0.007124630268663168,
0.03344719111919403,
-0.04787316545844078,
-0.04560607671737671,
-0.02284228429198265,
0.004227818455547094,
0.11800038069486618,
0.005832357332110405,
-0.06198959797620773,
0.020752903074026108,
0.0002620290615595877,
-0.0718337893486023,
0.19234828650951385,
-0.09336501359939575,
-0.15773937106132507,
-0.07881972938776016,
-0.0991009771823883,
-0.06453730165958405,
-0.00814135279506445,
0.08371137827634811,
-0.09895356744527817,
-0.03233583644032478,
-0.10093409568071365,
0.002524662995710969,
-0.029991062358021736,
0.03168123960494995,
0.043324198573827744,
0.012962352484464645,
0.05045382305979729,
-0.11362975090742111,
-0.020125364884734154,
-0.040214478969573975,
0.0008918482926674187,
0.0750204399228096,
0.01599774695932865,
0.10105552524328232,
0.13922399282455444,
-0.011240120977163315,
0.05168319493532181,
-0.04848644137382507,
0.20207765698432922,
-0.06343686580657959,
-0.03139430284500122,
0.10303834080696106,
0.011015428230166435,
0.07387688010931015,
0.1355835348367691,
0.044918399304151535,
-0.10764198005199432,
0.009790534153580666,
0.018470797687768936,
-0.05205294489860535,
-0.22517366707324982,
-0.0382547602057457,
-0.05631903186440468,
0.029499458149075508,
0.12404964864253998,
0.026718202978372574,
-0.005984536837786436,
0.03752150386571884,
0.01822456158697605,
0.032422635704278946,
-0.01615438610315323,
0.09237942844629288,
0.07464220374822617,
0.036158639937639236,
0.14656466245651245,
-0.04628540948033333,
-0.019978981465101242,
0.050288423895835876,
-0.01718146912753582,
0.24636387825012207,
-0.034528475254774094,
0.1597570776939392,
0.05584326386451721,
0.16479536890983582,
0.009025229141116142,
0.08763927966356277,
-0.02134997770190239,
-0.009810500778257847,
-0.028991280123591423,
-0.05003475770354271,
-0.04527236893773079,
0.028167368844151497,
-0.033007148653268814,
0.04371321573853493,
-0.14233914017677307,
0.03516094759106636,
0.0854090228676796,
0.32209962606430054,
0.085199736058712,
-0.35187843441963196,
-0.09759797155857086,
0.008699295111000538,
-0.039517972618341446,
-0.023187648504972458,
0.020709242671728134,
0.11779837310314178,
-0.08545199036598206,
0.07921327650547028,
-0.06322813034057617,
0.10837214440107346,
-0.05136217921972275,
0.036786407232284546,
0.0473417304456234,
0.06066565588116646,
-0.013728477992117405,
0.07209303975105286,
-0.30708518624305725,
0.28899139165878296,
0.018392030149698257,
0.05957576259970665,
-0.0923774391412735,
0.017143648117780685,
0.0026850122958421707,
0.024159053340554237,
0.07960312813520432,
-0.0039656939916312695,
-0.12163063883781433,
-0.143130823969841,
-0.11210592091083527,
0.008798196911811829,
0.09584887325763702,
-0.02732231840491295,
0.10918975621461868,
-0.0005942208808846772,
0.0024724307004362345,
0.03448329493403435,
-0.012539991177618504,
-0.026949169114232063,
-0.1067408099770546,
0.01221197284758091,
0.00874137319624424,
-0.03764903545379639,
-0.06522967666387558,
-0.11124996840953827,
-0.06485360860824585,
0.21355371177196503,
0.03389192372560501,
-0.06171393021941185,
-0.12091884016990662,
0.10070730000734329,
0.08661431074142456,
-0.07510244101285934,
0.025908103212714195,
0.0029307075310498476,
0.09597276896238327,
-0.00506886001676321,
-0.05563337728381157,
0.12162170559167862,
-0.056817762553691864,
-0.1918409764766693,
-0.06492508947849274,
0.1379907876253128,
0.020831160247325897,
0.07119887322187424,
-0.0195109061896801,
0.04011273756623268,
-0.015777073800563812,
-0.0810445100069046,
0.052636027336120605,
-0.004894413985311985,
0.14381149411201477,
-0.0034972357098013163,
-0.023933257907629013,
0.028880678117275238,
-0.06759252399206161,
-0.058457907289266586,
0.20209334790706635,
0.2886066734790802,
-0.1014329269528389,
0.07176490128040314,
0.04254549741744995,
-0.05271827429533005,
-0.15753675997257233,
0.02230987511575222,
0.059829145669937134,
0.007197357714176178,
0.00971455778926611,
-0.17786499857902527,
0.03637805953621864,
0.10226704180240631,
-0.017067179083824158,
0.07126431912183762,
-0.3531765043735504,
-0.12112689018249512,
0.06849798560142517,
0.1042497530579567,
0.07964833080768585,
-0.16004104912281036,
-0.037219274789094925,
-0.0217670276761055,
-0.11253982782363892,
0.10693099349737167,
-0.09691113978624344,
0.12483107298612595,
-0.029006067663431168,
0.06675292551517487,
0.012483968399465084,
-0.0716375783085823,
0.11419954895973206,
0.028830980882048607,
0.07685384154319763,
-0.05723283067345619,
0.015192286111414433,
0.09579186141490936,
-0.07801118493080139,
0.03801945596933365,
-0.10100790858268738,
0.0395209901034832,
-0.12121538817882538,
-0.006478775292634964,
-0.06998132914304733,
0.009828716516494751,
-0.04250891134142876,
-0.05700402334332466,
-0.05117117986083031,
0.027468670159578323,
0.08668804913759232,
-0.01997712254524231,
0.17492634057998657,
0.014194699935615063,
0.1586436927318573,
0.1694963425397873,
0.060269687324762344,
-0.12699128687381744,
-0.041073545813560486,
0.0008937517995946109,
-0.014475439675152302,
0.029230155050754547,
-0.18474577367305756,
0.03402821719646454,
0.1450379639863968,
0.024433722719550133,
0.13561265170574188,
0.07501205056905746,
-0.050406601279973984,
0.028158608824014664,
0.06194161996245384,
-0.16201508045196533,
-0.08362826704978943,
0.019016984850168228,
0.015313887037336826,
-0.1245117112994194,
0.05582323670387268,
0.09873337298631668,
-0.05105185881257057,
-0.01937897317111492,
-0.008666092529892921,
0.04667884483933449,
-0.026434067636728287,
0.21319027245044708,
0.0015973870176821947,
0.08558833599090576,
-0.12406037747859955,
0.10168445110321045,
0.06127422675490379,
-0.12516702711582184,
0.04600324109196663,
0.09664556384086609,
-0.10063496232032776,
-0.025734778493642807,
0.0564645417034626,
0.143109530210495,
-0.007795474026352167,
-0.03123391419649124,
-0.15345261991024017,
-0.12555287778377533,
0.1070227324962616,
0.11124735325574875,
0.08447909355163574,
0.022966429591178894,
-0.03172243759036064,
-0.021852592006325722,
-0.1395566314458847,
0.0996365025639534,
0.0713956207036972,
0.05787466838955879,
-0.1184096559882164,
0.12900805473327637,
-0.0029617438558489084,
0.03932579606771469,
-0.010611096397042274,
0.008911686018109322,
-0.11015573143959045,
0.020352834835648537,
-0.09383316338062286,
-0.017769496887922287,
-0.06696808338165283,
-0.011882016435265541,
-0.024039380252361298,
-0.016373133286833763,
-0.056233637034893036,
0.012460472993552685,
-0.11871087551116943,
-0.038187891244888306,
-0.001826642663218081,
0.038225479423999786,
-0.11793367564678192,
-0.021984172984957695,
-0.0005945826414972544,
-0.09057524800300598,
0.10011273622512817,
0.05925333499908447,
-0.0009865729371085763,
0.009503116831183434,
-0.02850765362381935,
-0.008363250643014908,
0.06886249035596848,
-0.0055760652758181095,
0.07980187237262726,
-0.13043776154518127,
-0.016374174505472183,
0.009114005602896214,
0.00978186447173357,
0.033877402544021606,
0.08168154209852219,
-0.13570496439933777,
0.006359047722071409,
-0.01543837133795023,
-0.0704135149717331,
-0.05560191348195076,
0.0575764998793602,
0.08301401883363724,
0.021680660545825958,
0.18087723851203918,
-0.07383621484041214,
0.04131067916750908,
-0.21257226169109344,
-0.012014258652925491,
0.0018502091988921165,
-0.1238124668598175,
-0.07574303448200226,
-0.027593931183218956,
0.07831709831953049,
-0.07784182578325272,
0.14541946351528168,
0.014750583097338676,
0.012970727868378162,
0.041576139628887177,
-0.05441097542643547,
-0.022552259266376495,
0.021100156009197235,
0.1664745807647705,
0.01539525855332613,
-0.041574906557798386,
0.08540167659521103,
0.04916341230273247,
0.10373528301715851,
0.15661627054214478,
0.21478168666362762,
0.12472513318061829,
0.09403238445520401,
0.08199582248926163,
0.013090026564896107,
-0.08310051262378693,
-0.1886410117149353,
0.1010751873254776,
-0.020964697003364563,
0.15339131653308868,
-0.00940002128481865,
0.15566739439964294,
0.11629936099052429,
-0.18345552682876587,
0.053638678044080734,
-0.062108367681503296,
-0.08301382511854172,
-0.11057336628437042,
-0.06543656438589096,
-0.08658221364021301,
-0.1667580008506775,
-0.004923108033835888,
-0.12870657444000244,
0.038584474474191666,
0.08083992451429367,
0.030919775366783142,
0.008959905244410038,
0.08363289386034012,
0.023197825998067856,
0.02281271666288376,
0.06618763506412506,
0.006118490360677242,
-0.03714447095990181,
-0.05541076511144638,
-0.05820423364639282,
0.009513909928500652,
-0.013712036423385143,
0.0646275207400322,
-0.017094522714614868,
-0.03266702964901924,
0.049033042043447495,
-0.019696732982993126,
-0.10339327901601791,
0.010534671135246754,
0.014843210577964783,
0.077741838991642,
0.06764236092567444,
0.02115391194820404,
-0.008487651124596596,
-0.01827094331383705,
0.21911625564098358,
-0.08267181366682053,
-0.05574587732553482,
-0.12404626607894897,
0.27340221405029297,
0.040739960968494415,
-0.03453099727630615,
0.04053065553307533,
-0.07545653730630875,
-0.020516909658908844,
0.17087097465991974,
0.23462322354316711,
-0.026195652782917023,
-0.02122514322400093,
-0.006915074773132801,
-0.012536261230707169,
-0.014906550757586956,
0.07918783277273178,
0.14076878130435944,
0.03575282171368599,
-0.08514099568128586,
-0.011057380586862564,
-0.062316715717315674,
-0.033721376210451126,
-0.03900812938809395,
0.08805117011070251,
0.03334086015820503,
0.009187313728034496,
-0.03357794135808945,
0.06043779104948044,
-0.05192376673221588,
-0.07097984850406647,
0.026212621480226517,
-0.23813489079475403,
-0.1741388887166977,
0.012991872616112232,
0.062448665499687195,
0.005783857777714729,
0.0733928233385086,
-0.006816425826400518,
0.002218420384451747,
0.07052018493413925,
-0.019468894228339195,
-0.07480581849813461,
-0.11086127161979675,
0.10008812695741653,
-0.15018858015537262,
0.1847400814294815,
-0.04338105767965317,
0.02458036318421364,
0.14340759813785553,
0.04774262756109238,
-0.1081097424030304,
0.0418110154569149,
0.05090409889817238,
-0.02254590205848217,
0.011134220287203789,
0.12826618552207947,
-0.038520123809576035,
0.08810807764530182,
0.05422309786081314,
-0.11524895578622818,
-0.006634072866290808,
-0.08708541840314865,
-0.022465838119387627,
-0.03393132612109184,
-0.03512055426836014,
-0.029881775379180908,
0.1268550455570221,
0.209943026304245,
-0.05260109528899193,
0.012102355249226093,
-0.06767775863409042,
0.014113033190369606,
0.04032096266746521,
0.006463134661316872,
-0.05424747243523598,
-0.2874477803707123,
0.017394447699189186,
0.09424180537462234,
-0.0019257799722254276,
-0.2589356005191803,
-0.08441072702407837,
0.026616526767611504,
-0.0592057965695858,
-0.11457079648971558,
0.09700646251440048,
0.056220125406980515,
0.04969387501478195,
-0.0756903812289238,
-0.052557267248630524,
-0.06885884702205658,
0.1691865622997284,
-0.14244388043880463,
-0.07906962931156158
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# mt5-base-wikinewssum-german
This model is a fine-tuned version of [google/mt5-base](https://huggingface.co/google/mt5-base) on an unknown dataset.
It achieves the following results on the evaluation set:
- Loss: 2.5135
- Rouge1: 8.0553
- Rouge2: 2.7846
- Rougel: 6.2182
- Rougelsum: 7.6203
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 5.6e-05
- train_batch_size: 4
- eval_batch_size: 4
- seed: 42
- gradient_accumulation_steps: 2
- total_train_batch_size: 8
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- num_epochs: 8
### Training results
| Training Loss | Epoch | Step | Validation Loss | Rouge1 | Rouge2 | Rougel | Rougelsum |
|:-------------:|:-----:|:----:|:---------------:|:------:|:------:|:------:|:---------:|
| No log | 1.0 | 723 | 2.7112 | 7.3681 | 2.3679 | 5.5705 | 6.7588 |
| No log | 2.0 | 1446 | 2.6178 | 7.8539 | 2.7551 | 6.2081 | 7.4139 |
| No log | 3.0 | 2169 | 2.5756 | 7.8401 | 2.6075 | 6.0135 | 7.4303 |
| No log | 4.0 | 2892 | 2.5465 | 8.1097 | 2.8525 | 6.268 | 7.6482 |
| 3.4589 | 5.0 | 3615 | 2.5315 | 8.0192 | 2.7848 | 6.2484 | 7.5859 |
| 3.4589 | 6.0 | 4338 | 2.5222 | 8.1063 | 2.8986 | 6.337 | 7.6564 |
| 3.4589 | 7.0 | 5061 | 2.5136 | 8.0565 | 2.8707 | 6.2732 | 7.6105 |
| 3.4589 | 8.0 | 5784 | 2.5135 | 8.0553 | 2.7846 | 6.2182 | 7.6203 |
### Framework versions
- Transformers 4.13.0
- Pytorch 1.10.1
- Datasets 1.16.1
- Tokenizers 0.10.3
|
{"license": "apache-2.0", "tags": ["summarization", "generated_from_trainer"], "metrics": ["rouge"], "model-index": [{"name": "mt5-base-wikinewssum-german", "results": []}]}
|
summarization
|
airKlizz/mt5-base-wikinewssum-german
|
[
"transformers",
"pytorch",
"mt5",
"text2text-generation",
"summarization",
"generated_from_trainer",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #mt5 #text2text-generation #summarization #generated_from_trainer #license-apache-2.0 #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
mt5-base-wikinewssum-german
===========================
This model is a fine-tuned version of google/mt5-base on an unknown dataset.
It achieves the following results on the evaluation set:
* Loss: 2.5135
* Rouge1: 8.0553
* Rouge2: 2.7846
* Rougel: 6.2182
* Rougelsum: 7.6203
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 5.6e-05
* train\_batch\_size: 4
* eval\_batch\_size: 4
* seed: 42
* gradient\_accumulation\_steps: 2
* total\_train\_batch\_size: 8
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* num\_epochs: 8
### Training results
### Framework versions
* Transformers 4.13.0
* Pytorch 1.10.1
* Datasets 1.16.1
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5.6e-05\n* train\\_batch\\_size: 4\n* eval\\_batch\\_size: 4\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 8\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 8",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.13.0\n* Pytorch 1.10.1\n* Datasets 1.16.1\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #mt5 #text2text-generation #summarization #generated_from_trainer #license-apache-2.0 #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5.6e-05\n* train\\_batch\\_size: 4\n* eval\\_batch\\_size: 4\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 8\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 8",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.13.0\n* Pytorch 1.10.1\n* Datasets 1.16.1\n* Tokenizers 0.10.3"
] |
[
68,
127,
4,
30
] |
[
"passage: TAGS\n#transformers #pytorch #mt5 #text2text-generation #summarization #generated_from_trainer #license-apache-2.0 #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5.6e-05\n* train\\_batch\\_size: 4\n* eval\\_batch\\_size: 4\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 8\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 8### Training results### Framework versions\n\n\n* Transformers 4.13.0\n* Pytorch 1.10.1\n* Datasets 1.16.1\n* Tokenizers 0.10.3"
] |
[
-0.11205639690160751,
0.1200854480266571,
-0.002607157453894615,
0.09516560286283493,
0.12879973649978638,
0.002303670160472393,
0.11824613064527512,
0.171413391828537,
-0.14953720569610596,
0.06951165944337845,
0.14009463787078857,
0.14367541670799255,
0.0381598025560379,
0.19781967997550964,
-0.06012033671140671,
-0.25871190428733826,
0.022363876923918724,
0.01624239981174469,
-0.028094766661524773,
0.14088991284370422,
0.09551103413105011,
-0.1287730485200882,
0.07820693403482437,
0.000998921343125403,
-0.183598592877388,
-0.024697372689843178,
-0.02796112187206745,
-0.06271753460168839,
0.12303527444601059,
0.00036430644104257226,
0.06788218021392822,
0.03774084895849228,
0.06467381119728088,
-0.18728700280189514,
0.001241724705323577,
0.060552459210157394,
0.01991002820432186,
0.10001540929079056,
0.056589141488075256,
-0.031313855201005936,
0.13133859634399414,
-0.08424826711416245,
0.05489073321223259,
0.025151334702968597,
-0.12808622419834137,
-0.26126325130462646,
-0.10970255732536316,
0.031086960807442665,
0.09551467001438141,
0.08334353566169739,
-0.019286882132291794,
0.15294815599918365,
-0.06488027423620224,
0.10504907369613647,
0.27263572812080383,
-0.3033342659473419,
-0.05171016976237297,
0.045595161616802216,
0.013228163123130798,
0.060377441346645355,
-0.09293504059314728,
-0.02399926446378231,
0.05413489788770676,
0.045160096138715744,
0.1429183930158615,
-0.0027004037983715534,
-0.02511623315513134,
-0.0019087126711383462,
-0.14250117540359497,
-0.07364105433225632,
0.17931704223155975,
0.03428516909480095,
-0.03383367881178856,
-0.09517841786146164,
-0.0675944834947586,
-0.20747029781341553,
-0.028198545798659325,
0.014058619737625122,
0.03909992054104805,
-0.041674401611089706,
-0.09935754537582397,
0.03741353750228882,
-0.07454674690961838,
-0.039075493812561035,
-0.018075205385684967,
0.06490866839885712,
0.04652085900306702,
0.01755381189286709,
-0.03178589418530464,
0.09724763035774231,
-0.03455623239278793,
-0.16132846474647522,
0.0037479298189282417,
0.00593946548178792,
-0.00808199867606163,
-0.03548746928572655,
-0.03998066857457161,
-0.07684570550918579,
0.014109679497778416,
0.1381710320711136,
-0.09364449977874756,
0.06668062508106232,
-0.006666583940386772,
0.03617323935031891,
-0.04165424033999443,
0.14300018548965454,
-0.031221669167280197,
-0.039196353405714035,
-0.012784029357135296,
0.07528222352266312,
0.04381590336561203,
-0.02753317356109619,
-0.10624876618385315,
0.057326097041368484,
0.10818855464458466,
0.04058639705181122,
-0.036746468394994736,
0.054511938244104385,
-0.05165879800915718,
-0.02911781147122383,
0.0272970050573349,
-0.10658147186040878,
0.03305680677294731,
-0.005727611947804689,
-0.08889719843864441,
-0.04051333665847778,
-0.012938112020492554,
0.003390922211110592,
-0.04460423067212105,
0.11670494824647903,
-0.08160168677568436,
0.023699680343270302,
-0.0841912105679512,
-0.12989528477191925,
0.028230013325810432,
-0.10438722372055054,
-0.008960140869021416,
-0.06941433995962143,
-0.15547049045562744,
-0.02269802801311016,
0.06120498105883598,
-0.06857271492481232,
-0.06946265697479248,
-0.07487965375185013,
-0.08376320451498032,
0.042465370148420334,
-0.02433229237794876,
0.138554647564888,
-0.08122630417346954,
0.09492399543523788,
0.03257113695144653,
0.08185537159442902,
0.01219625398516655,
0.053119316697120667,
-0.09459764510393143,
0.03232480213046074,
-0.22062593698501587,
0.0760658010840416,
-0.044534832239151,
0.0795859545469284,
-0.11665266752243042,
-0.11618484556674957,
0.028773363679647446,
-0.03539174422621727,
0.09214713424444199,
0.1268407702445984,
-0.1912430226802826,
-0.07374312728643417,
0.20106762647628784,
-0.0881706178188324,
-0.12733972072601318,
0.12914006412029266,
-0.05568486452102661,
0.020963672548532486,
0.04997303709387779,
0.22030377388000488,
0.0132258590310812,
-0.06619121134281158,
-0.020783480256795883,
-0.04689343646168709,
0.07744970172643661,
-0.052902836352586746,
0.08229874074459076,
-0.007337320130318403,
0.08715333789587021,
0.014347528107464314,
0.03443807363510132,
0.022578751668334007,
-0.11143284291028976,
-0.07900378108024597,
-0.03549855202436447,
-0.07241262495517731,
0.012825163081288338,
0.049185190349817276,
0.07837684452533722,
-0.14081326127052307,
-0.08774495869874954,
0.008908026851713657,
0.0861697569489479,
-0.08352768421173096,
0.04624487832188606,
-0.06909306347370148,
0.11099889129400253,
-0.06278719753026962,
-0.00034085329389199615,
-0.19382749497890472,
0.014749595895409584,
0.03944957256317139,
0.019187571480870247,
-0.008854692801833153,
-0.03225889801979065,
0.05644776672124863,
0.0672122910618782,
-0.05915208160877228,
-0.02305278740823269,
-0.013876810669898987,
-0.00945417582988739,
-0.12107262760400772,
-0.23638217151165009,
-0.04785185679793358,
-0.04435846954584122,
0.10498365014791489,
-0.176314115524292,
0.04025633633136749,
0.05055408179759979,
0.11252106726169586,
0.023772336542606354,
-0.03973634913563728,
-0.00460545951500535,
0.06906706839799881,
-0.061362750828266144,
-0.06819313019514084,
0.06561906635761261,
0.017099659889936447,
-0.0800761878490448,
-0.00904841162264347,
-0.11209828406572342,
0.13416273891925812,
0.14215566217899323,
-0.014379914849996567,
-0.07797959446907043,
-0.0400732047855854,
-0.07193023711442947,
-0.030359629541635513,
-0.04994754120707512,
0.033291544765233994,
0.13199256360530853,
0.023203253746032715,
0.15598061680793762,
-0.09839808195829391,
-0.06289047002792358,
0.057420846074819565,
-0.017126668244600296,
0.006391224917024374,
0.1314973682165146,
0.06009002402424812,
-0.12431710213422775,
0.14869509637355804,
0.12587502598762512,
-0.014233122579753399,
0.1185302883386612,
-0.061093661934137344,
-0.07847945392131805,
-0.02777388133108616,
-0.005566457752138376,
0.01030904520303011,
0.10947950929403305,
-0.14417177438735962,
-0.031061502173542976,
0.03999796882271767,
0.05617692694067955,
0.01567836105823517,
-0.17570212483406067,
-0.007124630268663168,
0.03344719111919403,
-0.04787316545844078,
-0.04560607671737671,
-0.02284228429198265,
0.004227818455547094,
0.11800038069486618,
0.005832357332110405,
-0.06198959797620773,
0.020752903074026108,
0.0002620290615595877,
-0.0718337893486023,
0.19234828650951385,
-0.09336501359939575,
-0.15773937106132507,
-0.07881972938776016,
-0.0991009771823883,
-0.06453730165958405,
-0.00814135279506445,
0.08371137827634811,
-0.09895356744527817,
-0.03233583644032478,
-0.10093409568071365,
0.002524662995710969,
-0.029991062358021736,
0.03168123960494995,
0.043324198573827744,
0.012962352484464645,
0.05045382305979729,
-0.11362975090742111,
-0.020125364884734154,
-0.040214478969573975,
0.0008918482926674187,
0.0750204399228096,
0.01599774695932865,
0.10105552524328232,
0.13922399282455444,
-0.011240120977163315,
0.05168319493532181,
-0.04848644137382507,
0.20207765698432922,
-0.06343686580657959,
-0.03139430284500122,
0.10303834080696106,
0.011015428230166435,
0.07387688010931015,
0.1355835348367691,
0.044918399304151535,
-0.10764198005199432,
0.009790534153580666,
0.018470797687768936,
-0.05205294489860535,
-0.22517366707324982,
-0.0382547602057457,
-0.05631903186440468,
0.029499458149075508,
0.12404964864253998,
0.026718202978372574,
-0.005984536837786436,
0.03752150386571884,
0.01822456158697605,
0.032422635704278946,
-0.01615438610315323,
0.09237942844629288,
0.07464220374822617,
0.036158639937639236,
0.14656466245651245,
-0.04628540948033333,
-0.019978981465101242,
0.050288423895835876,
-0.01718146912753582,
0.24636387825012207,
-0.034528475254774094,
0.1597570776939392,
0.05584326386451721,
0.16479536890983582,
0.009025229141116142,
0.08763927966356277,
-0.02134997770190239,
-0.009810500778257847,
-0.028991280123591423,
-0.05003475770354271,
-0.04527236893773079,
0.028167368844151497,
-0.033007148653268814,
0.04371321573853493,
-0.14233914017677307,
0.03516094759106636,
0.0854090228676796,
0.32209962606430054,
0.085199736058712,
-0.35187843441963196,
-0.09759797155857086,
0.008699295111000538,
-0.039517972618341446,
-0.023187648504972458,
0.020709242671728134,
0.11779837310314178,
-0.08545199036598206,
0.07921327650547028,
-0.06322813034057617,
0.10837214440107346,
-0.05136217921972275,
0.036786407232284546,
0.0473417304456234,
0.06066565588116646,
-0.013728477992117405,
0.07209303975105286,
-0.30708518624305725,
0.28899139165878296,
0.018392030149698257,
0.05957576259970665,
-0.0923774391412735,
0.017143648117780685,
0.0026850122958421707,
0.024159053340554237,
0.07960312813520432,
-0.0039656939916312695,
-0.12163063883781433,
-0.143130823969841,
-0.11210592091083527,
0.008798196911811829,
0.09584887325763702,
-0.02732231840491295,
0.10918975621461868,
-0.0005942208808846772,
0.0024724307004362345,
0.03448329493403435,
-0.012539991177618504,
-0.026949169114232063,
-0.1067408099770546,
0.01221197284758091,
0.00874137319624424,
-0.03764903545379639,
-0.06522967666387558,
-0.11124996840953827,
-0.06485360860824585,
0.21355371177196503,
0.03389192372560501,
-0.06171393021941185,
-0.12091884016990662,
0.10070730000734329,
0.08661431074142456,
-0.07510244101285934,
0.025908103212714195,
0.0029307075310498476,
0.09597276896238327,
-0.00506886001676321,
-0.05563337728381157,
0.12162170559167862,
-0.056817762553691864,
-0.1918409764766693,
-0.06492508947849274,
0.1379907876253128,
0.020831160247325897,
0.07119887322187424,
-0.0195109061896801,
0.04011273756623268,
-0.015777073800563812,
-0.0810445100069046,
0.052636027336120605,
-0.004894413985311985,
0.14381149411201477,
-0.0034972357098013163,
-0.023933257907629013,
0.028880678117275238,
-0.06759252399206161,
-0.058457907289266586,
0.20209334790706635,
0.2886066734790802,
-0.1014329269528389,
0.07176490128040314,
0.04254549741744995,
-0.05271827429533005,
-0.15753675997257233,
0.02230987511575222,
0.059829145669937134,
0.007197357714176178,
0.00971455778926611,
-0.17786499857902527,
0.03637805953621864,
0.10226704180240631,
-0.017067179083824158,
0.07126431912183762,
-0.3531765043735504,
-0.12112689018249512,
0.06849798560142517,
0.1042497530579567,
0.07964833080768585,
-0.16004104912281036,
-0.037219274789094925,
-0.0217670276761055,
-0.11253982782363892,
0.10693099349737167,
-0.09691113978624344,
0.12483107298612595,
-0.029006067663431168,
0.06675292551517487,
0.012483968399465084,
-0.0716375783085823,
0.11419954895973206,
0.028830980882048607,
0.07685384154319763,
-0.05723283067345619,
0.015192286111414433,
0.09579186141490936,
-0.07801118493080139,
0.03801945596933365,
-0.10100790858268738,
0.0395209901034832,
-0.12121538817882538,
-0.006478775292634964,
-0.06998132914304733,
0.009828716516494751,
-0.04250891134142876,
-0.05700402334332466,
-0.05117117986083031,
0.027468670159578323,
0.08668804913759232,
-0.01997712254524231,
0.17492634057998657,
0.014194699935615063,
0.1586436927318573,
0.1694963425397873,
0.060269687324762344,
-0.12699128687381744,
-0.041073545813560486,
0.0008937517995946109,
-0.014475439675152302,
0.029230155050754547,
-0.18474577367305756,
0.03402821719646454,
0.1450379639863968,
0.024433722719550133,
0.13561265170574188,
0.07501205056905746,
-0.050406601279973984,
0.028158608824014664,
0.06194161996245384,
-0.16201508045196533,
-0.08362826704978943,
0.019016984850168228,
0.015313887037336826,
-0.1245117112994194,
0.05582323670387268,
0.09873337298631668,
-0.05105185881257057,
-0.01937897317111492,
-0.008666092529892921,
0.04667884483933449,
-0.026434067636728287,
0.21319027245044708,
0.0015973870176821947,
0.08558833599090576,
-0.12406037747859955,
0.10168445110321045,
0.06127422675490379,
-0.12516702711582184,
0.04600324109196663,
0.09664556384086609,
-0.10063496232032776,
-0.025734778493642807,
0.0564645417034626,
0.143109530210495,
-0.007795474026352167,
-0.03123391419649124,
-0.15345261991024017,
-0.12555287778377533,
0.1070227324962616,
0.11124735325574875,
0.08447909355163574,
0.022966429591178894,
-0.03172243759036064,
-0.021852592006325722,
-0.1395566314458847,
0.0996365025639534,
0.0713956207036972,
0.05787466838955879,
-0.1184096559882164,
0.12900805473327637,
-0.0029617438558489084,
0.03932579606771469,
-0.010611096397042274,
0.008911686018109322,
-0.11015573143959045,
0.020352834835648537,
-0.09383316338062286,
-0.017769496887922287,
-0.06696808338165283,
-0.011882016435265541,
-0.024039380252361298,
-0.016373133286833763,
-0.056233637034893036,
0.012460472993552685,
-0.11871087551116943,
-0.038187891244888306,
-0.001826642663218081,
0.038225479423999786,
-0.11793367564678192,
-0.021984172984957695,
-0.0005945826414972544,
-0.09057524800300598,
0.10011273622512817,
0.05925333499908447,
-0.0009865729371085763,
0.009503116831183434,
-0.02850765362381935,
-0.008363250643014908,
0.06886249035596848,
-0.0055760652758181095,
0.07980187237262726,
-0.13043776154518127,
-0.016374174505472183,
0.009114005602896214,
0.00978186447173357,
0.033877402544021606,
0.08168154209852219,
-0.13570496439933777,
0.006359047722071409,
-0.01543837133795023,
-0.0704135149717331,
-0.05560191348195076,
0.0575764998793602,
0.08301401883363724,
0.021680660545825958,
0.18087723851203918,
-0.07383621484041214,
0.04131067916750908,
-0.21257226169109344,
-0.012014258652925491,
0.0018502091988921165,
-0.1238124668598175,
-0.07574303448200226,
-0.027593931183218956,
0.07831709831953049,
-0.07784182578325272,
0.14541946351528168,
0.014750583097338676,
0.012970727868378162,
0.041576139628887177,
-0.05441097542643547,
-0.022552259266376495,
0.021100156009197235,
0.1664745807647705,
0.01539525855332613,
-0.041574906557798386,
0.08540167659521103,
0.04916341230273247,
0.10373528301715851,
0.15661627054214478,
0.21478168666362762,
0.12472513318061829,
0.09403238445520401,
0.08199582248926163,
0.013090026564896107,
-0.08310051262378693,
-0.1886410117149353,
0.1010751873254776,
-0.020964697003364563,
0.15339131653308868,
-0.00940002128481865,
0.15566739439964294,
0.11629936099052429,
-0.18345552682876587,
0.053638678044080734,
-0.062108367681503296,
-0.08301382511854172,
-0.11057336628437042,
-0.06543656438589096,
-0.08658221364021301,
-0.1667580008506775,
-0.004923108033835888,
-0.12870657444000244,
0.038584474474191666,
0.08083992451429367,
0.030919775366783142,
0.008959905244410038,
0.08363289386034012,
0.023197825998067856,
0.02281271666288376,
0.06618763506412506,
0.006118490360677242,
-0.03714447095990181,
-0.05541076511144638,
-0.05820423364639282,
0.009513909928500652,
-0.013712036423385143,
0.0646275207400322,
-0.017094522714614868,
-0.03266702964901924,
0.049033042043447495,
-0.019696732982993126,
-0.10339327901601791,
0.010534671135246754,
0.014843210577964783,
0.077741838991642,
0.06764236092567444,
0.02115391194820404,
-0.008487651124596596,
-0.01827094331383705,
0.21911625564098358,
-0.08267181366682053,
-0.05574587732553482,
-0.12404626607894897,
0.27340221405029297,
0.040739960968494415,
-0.03453099727630615,
0.04053065553307533,
-0.07545653730630875,
-0.020516909658908844,
0.17087097465991974,
0.23462322354316711,
-0.026195652782917023,
-0.02122514322400093,
-0.006915074773132801,
-0.012536261230707169,
-0.014906550757586956,
0.07918783277273178,
0.14076878130435944,
0.03575282171368599,
-0.08514099568128586,
-0.011057380586862564,
-0.062316715717315674,
-0.033721376210451126,
-0.03900812938809395,
0.08805117011070251,
0.03334086015820503,
0.009187313728034496,
-0.03357794135808945,
0.06043779104948044,
-0.05192376673221588,
-0.07097984850406647,
0.026212621480226517,
-0.23813489079475403,
-0.1741388887166977,
0.012991872616112232,
0.062448665499687195,
0.005783857777714729,
0.0733928233385086,
-0.006816425826400518,
0.002218420384451747,
0.07052018493413925,
-0.019468894228339195,
-0.07480581849813461,
-0.11086127161979675,
0.10008812695741653,
-0.15018858015537262,
0.1847400814294815,
-0.04338105767965317,
0.02458036318421364,
0.14340759813785553,
0.04774262756109238,
-0.1081097424030304,
0.0418110154569149,
0.05090409889817238,
-0.02254590205848217,
0.011134220287203789,
0.12826618552207947,
-0.038520123809576035,
0.08810807764530182,
0.05422309786081314,
-0.11524895578622818,
-0.006634072866290808,
-0.08708541840314865,
-0.022465838119387627,
-0.03393132612109184,
-0.03512055426836014,
-0.029881775379180908,
0.1268550455570221,
0.209943026304245,
-0.05260109528899193,
0.012102355249226093,
-0.06767775863409042,
0.014113033190369606,
0.04032096266746521,
0.006463134661316872,
-0.05424747243523598,
-0.2874477803707123,
0.017394447699189186,
0.09424180537462234,
-0.0019257799722254276,
-0.2589356005191803,
-0.08441072702407837,
0.026616526767611504,
-0.0592057965695858,
-0.11457079648971558,
0.09700646251440048,
0.056220125406980515,
0.04969387501478195,
-0.0756903812289238,
-0.052557267248630524,
-0.06885884702205658,
0.1691865622997284,
-0.14244388043880463,
-0.07906962931156158
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# mt5-base-wikinewssum-italian
This model is a fine-tuned version of [google/mt5-base](https://huggingface.co/google/mt5-base) on an unknown dataset.
It achieves the following results on the evaluation set:
- Loss: 10.5739
- Rouge1: 2.1728
- Rouge2: 0.1516
- Rougel: 2.0846
- Rougelsum: 2.0515
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 5.6e-05
- train_batch_size: 4
- eval_batch_size: 4
- seed: 42
- gradient_accumulation_steps: 2
- total_train_batch_size: 8
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- num_epochs: 8
### Training results
| Training Loss | Epoch | Step | Validation Loss | Rouge1 | Rouge2 | Rougel | Rougelsum |
|:-------------:|:-----:|:----:|:---------------:|:------:|:------:|:------:|:---------:|
| No log | 1.0 | 8 | 16.6193 | 2.4011 | 0.3829 | 2.1505 | 2.2161 |
| No log | 2.0 | 16 | 15.8909 | 2.5165 | 0.2799 | 2.3403 | 2.3523 |
| No log | 3.0 | 24 | 15.4843 | 2.2794 | 0.2252 | 2.1849 | 2.1382 |
| 17.2559 | 4.0 | 32 | 13.0850 | 2.2448 | 0.1516 | 2.1426 | 2.0859 |
| 17.2559 | 5.0 | 40 | 11.7838 | 2.2448 | 0.1516 | 2.1426 | 2.0859 |
| 17.2559 | 6.0 | 48 | 11.3207 | 2.2424 | 0.1516 | 2.1423 | 2.1171 |
| 17.2559 | 7.0 | 56 | 10.7871 | 2.1081 | 0.1516 | 2.0227 | 1.9838 |
| 14.6026 | 8.0 | 64 | 10.5739 | 2.1728 | 0.1516 | 2.0846 | 2.0515 |
### Framework versions
- Transformers 4.13.0
- Pytorch 1.10.1
- Datasets 1.16.1
- Tokenizers 0.10.3
|
{"license": "apache-2.0", "tags": ["summarization", "generated_from_trainer"], "metrics": ["rouge"], "model-index": [{"name": "mt5-base-wikinewssum-italian", "results": []}]}
|
summarization
|
airKlizz/mt5-base-wikinewssum-italian
|
[
"transformers",
"pytorch",
"mt5",
"text2text-generation",
"summarization",
"generated_from_trainer",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #mt5 #text2text-generation #summarization #generated_from_trainer #license-apache-2.0 #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
mt5-base-wikinewssum-italian
============================
This model is a fine-tuned version of google/mt5-base on an unknown dataset.
It achieves the following results on the evaluation set:
* Loss: 10.5739
* Rouge1: 2.1728
* Rouge2: 0.1516
* Rougel: 2.0846
* Rougelsum: 2.0515
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 5.6e-05
* train\_batch\_size: 4
* eval\_batch\_size: 4
* seed: 42
* gradient\_accumulation\_steps: 2
* total\_train\_batch\_size: 8
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* num\_epochs: 8
### Training results
### Framework versions
* Transformers 4.13.0
* Pytorch 1.10.1
* Datasets 1.16.1
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5.6e-05\n* train\\_batch\\_size: 4\n* eval\\_batch\\_size: 4\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 8\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 8",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.13.0\n* Pytorch 1.10.1\n* Datasets 1.16.1\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #mt5 #text2text-generation #summarization #generated_from_trainer #license-apache-2.0 #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5.6e-05\n* train\\_batch\\_size: 4\n* eval\\_batch\\_size: 4\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 8\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 8",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.13.0\n* Pytorch 1.10.1\n* Datasets 1.16.1\n* Tokenizers 0.10.3"
] |
[
68,
127,
4,
30
] |
[
"passage: TAGS\n#transformers #pytorch #mt5 #text2text-generation #summarization #generated_from_trainer #license-apache-2.0 #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5.6e-05\n* train\\_batch\\_size: 4\n* eval\\_batch\\_size: 4\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 8\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 8### Training results### Framework versions\n\n\n* Transformers 4.13.0\n* Pytorch 1.10.1\n* Datasets 1.16.1\n* Tokenizers 0.10.3"
] |
[
-0.11205639690160751,
0.1200854480266571,
-0.002607157453894615,
0.09516560286283493,
0.12879973649978638,
0.002303670160472393,
0.11824613064527512,
0.171413391828537,
-0.14953720569610596,
0.06951165944337845,
0.14009463787078857,
0.14367541670799255,
0.0381598025560379,
0.19781967997550964,
-0.06012033671140671,
-0.25871190428733826,
0.022363876923918724,
0.01624239981174469,
-0.028094766661524773,
0.14088991284370422,
0.09551103413105011,
-0.1287730485200882,
0.07820693403482437,
0.000998921343125403,
-0.183598592877388,
-0.024697372689843178,
-0.02796112187206745,
-0.06271753460168839,
0.12303527444601059,
0.00036430644104257226,
0.06788218021392822,
0.03774084895849228,
0.06467381119728088,
-0.18728700280189514,
0.001241724705323577,
0.060552459210157394,
0.01991002820432186,
0.10001540929079056,
0.056589141488075256,
-0.031313855201005936,
0.13133859634399414,
-0.08424826711416245,
0.05489073321223259,
0.025151334702968597,
-0.12808622419834137,
-0.26126325130462646,
-0.10970255732536316,
0.031086960807442665,
0.09551467001438141,
0.08334353566169739,
-0.019286882132291794,
0.15294815599918365,
-0.06488027423620224,
0.10504907369613647,
0.27263572812080383,
-0.3033342659473419,
-0.05171016976237297,
0.045595161616802216,
0.013228163123130798,
0.060377441346645355,
-0.09293504059314728,
-0.02399926446378231,
0.05413489788770676,
0.045160096138715744,
0.1429183930158615,
-0.0027004037983715534,
-0.02511623315513134,
-0.0019087126711383462,
-0.14250117540359497,
-0.07364105433225632,
0.17931704223155975,
0.03428516909480095,
-0.03383367881178856,
-0.09517841786146164,
-0.0675944834947586,
-0.20747029781341553,
-0.028198545798659325,
0.014058619737625122,
0.03909992054104805,
-0.041674401611089706,
-0.09935754537582397,
0.03741353750228882,
-0.07454674690961838,
-0.039075493812561035,
-0.018075205385684967,
0.06490866839885712,
0.04652085900306702,
0.01755381189286709,
-0.03178589418530464,
0.09724763035774231,
-0.03455623239278793,
-0.16132846474647522,
0.0037479298189282417,
0.00593946548178792,
-0.00808199867606163,
-0.03548746928572655,
-0.03998066857457161,
-0.07684570550918579,
0.014109679497778416,
0.1381710320711136,
-0.09364449977874756,
0.06668062508106232,
-0.006666583940386772,
0.03617323935031891,
-0.04165424033999443,
0.14300018548965454,
-0.031221669167280197,
-0.039196353405714035,
-0.012784029357135296,
0.07528222352266312,
0.04381590336561203,
-0.02753317356109619,
-0.10624876618385315,
0.057326097041368484,
0.10818855464458466,
0.04058639705181122,
-0.036746468394994736,
0.054511938244104385,
-0.05165879800915718,
-0.02911781147122383,
0.0272970050573349,
-0.10658147186040878,
0.03305680677294731,
-0.005727611947804689,
-0.08889719843864441,
-0.04051333665847778,
-0.012938112020492554,
0.003390922211110592,
-0.04460423067212105,
0.11670494824647903,
-0.08160168677568436,
0.023699680343270302,
-0.0841912105679512,
-0.12989528477191925,
0.028230013325810432,
-0.10438722372055054,
-0.008960140869021416,
-0.06941433995962143,
-0.15547049045562744,
-0.02269802801311016,
0.06120498105883598,
-0.06857271492481232,
-0.06946265697479248,
-0.07487965375185013,
-0.08376320451498032,
0.042465370148420334,
-0.02433229237794876,
0.138554647564888,
-0.08122630417346954,
0.09492399543523788,
0.03257113695144653,
0.08185537159442902,
0.01219625398516655,
0.053119316697120667,
-0.09459764510393143,
0.03232480213046074,
-0.22062593698501587,
0.0760658010840416,
-0.044534832239151,
0.0795859545469284,
-0.11665266752243042,
-0.11618484556674957,
0.028773363679647446,
-0.03539174422621727,
0.09214713424444199,
0.1268407702445984,
-0.1912430226802826,
-0.07374312728643417,
0.20106762647628784,
-0.0881706178188324,
-0.12733972072601318,
0.12914006412029266,
-0.05568486452102661,
0.020963672548532486,
0.04997303709387779,
0.22030377388000488,
0.0132258590310812,
-0.06619121134281158,
-0.020783480256795883,
-0.04689343646168709,
0.07744970172643661,
-0.052902836352586746,
0.08229874074459076,
-0.007337320130318403,
0.08715333789587021,
0.014347528107464314,
0.03443807363510132,
0.022578751668334007,
-0.11143284291028976,
-0.07900378108024597,
-0.03549855202436447,
-0.07241262495517731,
0.012825163081288338,
0.049185190349817276,
0.07837684452533722,
-0.14081326127052307,
-0.08774495869874954,
0.008908026851713657,
0.0861697569489479,
-0.08352768421173096,
0.04624487832188606,
-0.06909306347370148,
0.11099889129400253,
-0.06278719753026962,
-0.00034085329389199615,
-0.19382749497890472,
0.014749595895409584,
0.03944957256317139,
0.019187571480870247,
-0.008854692801833153,
-0.03225889801979065,
0.05644776672124863,
0.0672122910618782,
-0.05915208160877228,
-0.02305278740823269,
-0.013876810669898987,
-0.00945417582988739,
-0.12107262760400772,
-0.23638217151165009,
-0.04785185679793358,
-0.04435846954584122,
0.10498365014791489,
-0.176314115524292,
0.04025633633136749,
0.05055408179759979,
0.11252106726169586,
0.023772336542606354,
-0.03973634913563728,
-0.00460545951500535,
0.06906706839799881,
-0.061362750828266144,
-0.06819313019514084,
0.06561906635761261,
0.017099659889936447,
-0.0800761878490448,
-0.00904841162264347,
-0.11209828406572342,
0.13416273891925812,
0.14215566217899323,
-0.014379914849996567,
-0.07797959446907043,
-0.0400732047855854,
-0.07193023711442947,
-0.030359629541635513,
-0.04994754120707512,
0.033291544765233994,
0.13199256360530853,
0.023203253746032715,
0.15598061680793762,
-0.09839808195829391,
-0.06289047002792358,
0.057420846074819565,
-0.017126668244600296,
0.006391224917024374,
0.1314973682165146,
0.06009002402424812,
-0.12431710213422775,
0.14869509637355804,
0.12587502598762512,
-0.014233122579753399,
0.1185302883386612,
-0.061093661934137344,
-0.07847945392131805,
-0.02777388133108616,
-0.005566457752138376,
0.01030904520303011,
0.10947950929403305,
-0.14417177438735962,
-0.031061502173542976,
0.03999796882271767,
0.05617692694067955,
0.01567836105823517,
-0.17570212483406067,
-0.007124630268663168,
0.03344719111919403,
-0.04787316545844078,
-0.04560607671737671,
-0.02284228429198265,
0.004227818455547094,
0.11800038069486618,
0.005832357332110405,
-0.06198959797620773,
0.020752903074026108,
0.0002620290615595877,
-0.0718337893486023,
0.19234828650951385,
-0.09336501359939575,
-0.15773937106132507,
-0.07881972938776016,
-0.0991009771823883,
-0.06453730165958405,
-0.00814135279506445,
0.08371137827634811,
-0.09895356744527817,
-0.03233583644032478,
-0.10093409568071365,
0.002524662995710969,
-0.029991062358021736,
0.03168123960494995,
0.043324198573827744,
0.012962352484464645,
0.05045382305979729,
-0.11362975090742111,
-0.020125364884734154,
-0.040214478969573975,
0.0008918482926674187,
0.0750204399228096,
0.01599774695932865,
0.10105552524328232,
0.13922399282455444,
-0.011240120977163315,
0.05168319493532181,
-0.04848644137382507,
0.20207765698432922,
-0.06343686580657959,
-0.03139430284500122,
0.10303834080696106,
0.011015428230166435,
0.07387688010931015,
0.1355835348367691,
0.044918399304151535,
-0.10764198005199432,
0.009790534153580666,
0.018470797687768936,
-0.05205294489860535,
-0.22517366707324982,
-0.0382547602057457,
-0.05631903186440468,
0.029499458149075508,
0.12404964864253998,
0.026718202978372574,
-0.005984536837786436,
0.03752150386571884,
0.01822456158697605,
0.032422635704278946,
-0.01615438610315323,
0.09237942844629288,
0.07464220374822617,
0.036158639937639236,
0.14656466245651245,
-0.04628540948033333,
-0.019978981465101242,
0.050288423895835876,
-0.01718146912753582,
0.24636387825012207,
-0.034528475254774094,
0.1597570776939392,
0.05584326386451721,
0.16479536890983582,
0.009025229141116142,
0.08763927966356277,
-0.02134997770190239,
-0.009810500778257847,
-0.028991280123591423,
-0.05003475770354271,
-0.04527236893773079,
0.028167368844151497,
-0.033007148653268814,
0.04371321573853493,
-0.14233914017677307,
0.03516094759106636,
0.0854090228676796,
0.32209962606430054,
0.085199736058712,
-0.35187843441963196,
-0.09759797155857086,
0.008699295111000538,
-0.039517972618341446,
-0.023187648504972458,
0.020709242671728134,
0.11779837310314178,
-0.08545199036598206,
0.07921327650547028,
-0.06322813034057617,
0.10837214440107346,
-0.05136217921972275,
0.036786407232284546,
0.0473417304456234,
0.06066565588116646,
-0.013728477992117405,
0.07209303975105286,
-0.30708518624305725,
0.28899139165878296,
0.018392030149698257,
0.05957576259970665,
-0.0923774391412735,
0.017143648117780685,
0.0026850122958421707,
0.024159053340554237,
0.07960312813520432,
-0.0039656939916312695,
-0.12163063883781433,
-0.143130823969841,
-0.11210592091083527,
0.008798196911811829,
0.09584887325763702,
-0.02732231840491295,
0.10918975621461868,
-0.0005942208808846772,
0.0024724307004362345,
0.03448329493403435,
-0.012539991177618504,
-0.026949169114232063,
-0.1067408099770546,
0.01221197284758091,
0.00874137319624424,
-0.03764903545379639,
-0.06522967666387558,
-0.11124996840953827,
-0.06485360860824585,
0.21355371177196503,
0.03389192372560501,
-0.06171393021941185,
-0.12091884016990662,
0.10070730000734329,
0.08661431074142456,
-0.07510244101285934,
0.025908103212714195,
0.0029307075310498476,
0.09597276896238327,
-0.00506886001676321,
-0.05563337728381157,
0.12162170559167862,
-0.056817762553691864,
-0.1918409764766693,
-0.06492508947849274,
0.1379907876253128,
0.020831160247325897,
0.07119887322187424,
-0.0195109061896801,
0.04011273756623268,
-0.015777073800563812,
-0.0810445100069046,
0.052636027336120605,
-0.004894413985311985,
0.14381149411201477,
-0.0034972357098013163,
-0.023933257907629013,
0.028880678117275238,
-0.06759252399206161,
-0.058457907289266586,
0.20209334790706635,
0.2886066734790802,
-0.1014329269528389,
0.07176490128040314,
0.04254549741744995,
-0.05271827429533005,
-0.15753675997257233,
0.02230987511575222,
0.059829145669937134,
0.007197357714176178,
0.00971455778926611,
-0.17786499857902527,
0.03637805953621864,
0.10226704180240631,
-0.017067179083824158,
0.07126431912183762,
-0.3531765043735504,
-0.12112689018249512,
0.06849798560142517,
0.1042497530579567,
0.07964833080768585,
-0.16004104912281036,
-0.037219274789094925,
-0.0217670276761055,
-0.11253982782363892,
0.10693099349737167,
-0.09691113978624344,
0.12483107298612595,
-0.029006067663431168,
0.06675292551517487,
0.012483968399465084,
-0.0716375783085823,
0.11419954895973206,
0.028830980882048607,
0.07685384154319763,
-0.05723283067345619,
0.015192286111414433,
0.09579186141490936,
-0.07801118493080139,
0.03801945596933365,
-0.10100790858268738,
0.0395209901034832,
-0.12121538817882538,
-0.006478775292634964,
-0.06998132914304733,
0.009828716516494751,
-0.04250891134142876,
-0.05700402334332466,
-0.05117117986083031,
0.027468670159578323,
0.08668804913759232,
-0.01997712254524231,
0.17492634057998657,
0.014194699935615063,
0.1586436927318573,
0.1694963425397873,
0.060269687324762344,
-0.12699128687381744,
-0.041073545813560486,
0.0008937517995946109,
-0.014475439675152302,
0.029230155050754547,
-0.18474577367305756,
0.03402821719646454,
0.1450379639863968,
0.024433722719550133,
0.13561265170574188,
0.07501205056905746,
-0.050406601279973984,
0.028158608824014664,
0.06194161996245384,
-0.16201508045196533,
-0.08362826704978943,
0.019016984850168228,
0.015313887037336826,
-0.1245117112994194,
0.05582323670387268,
0.09873337298631668,
-0.05105185881257057,
-0.01937897317111492,
-0.008666092529892921,
0.04667884483933449,
-0.026434067636728287,
0.21319027245044708,
0.0015973870176821947,
0.08558833599090576,
-0.12406037747859955,
0.10168445110321045,
0.06127422675490379,
-0.12516702711582184,
0.04600324109196663,
0.09664556384086609,
-0.10063496232032776,
-0.025734778493642807,
0.0564645417034626,
0.143109530210495,
-0.007795474026352167,
-0.03123391419649124,
-0.15345261991024017,
-0.12555287778377533,
0.1070227324962616,
0.11124735325574875,
0.08447909355163574,
0.022966429591178894,
-0.03172243759036064,
-0.021852592006325722,
-0.1395566314458847,
0.0996365025639534,
0.0713956207036972,
0.05787466838955879,
-0.1184096559882164,
0.12900805473327637,
-0.0029617438558489084,
0.03932579606771469,
-0.010611096397042274,
0.008911686018109322,
-0.11015573143959045,
0.020352834835648537,
-0.09383316338062286,
-0.017769496887922287,
-0.06696808338165283,
-0.011882016435265541,
-0.024039380252361298,
-0.016373133286833763,
-0.056233637034893036,
0.012460472993552685,
-0.11871087551116943,
-0.038187891244888306,
-0.001826642663218081,
0.038225479423999786,
-0.11793367564678192,
-0.021984172984957695,
-0.0005945826414972544,
-0.09057524800300598,
0.10011273622512817,
0.05925333499908447,
-0.0009865729371085763,
0.009503116831183434,
-0.02850765362381935,
-0.008363250643014908,
0.06886249035596848,
-0.0055760652758181095,
0.07980187237262726,
-0.13043776154518127,
-0.016374174505472183,
0.009114005602896214,
0.00978186447173357,
0.033877402544021606,
0.08168154209852219,
-0.13570496439933777,
0.006359047722071409,
-0.01543837133795023,
-0.0704135149717331,
-0.05560191348195076,
0.0575764998793602,
0.08301401883363724,
0.021680660545825958,
0.18087723851203918,
-0.07383621484041214,
0.04131067916750908,
-0.21257226169109344,
-0.012014258652925491,
0.0018502091988921165,
-0.1238124668598175,
-0.07574303448200226,
-0.027593931183218956,
0.07831709831953049,
-0.07784182578325272,
0.14541946351528168,
0.014750583097338676,
0.012970727868378162,
0.041576139628887177,
-0.05441097542643547,
-0.022552259266376495,
0.021100156009197235,
0.1664745807647705,
0.01539525855332613,
-0.041574906557798386,
0.08540167659521103,
0.04916341230273247,
0.10373528301715851,
0.15661627054214478,
0.21478168666362762,
0.12472513318061829,
0.09403238445520401,
0.08199582248926163,
0.013090026564896107,
-0.08310051262378693,
-0.1886410117149353,
0.1010751873254776,
-0.020964697003364563,
0.15339131653308868,
-0.00940002128481865,
0.15566739439964294,
0.11629936099052429,
-0.18345552682876587,
0.053638678044080734,
-0.062108367681503296,
-0.08301382511854172,
-0.11057336628437042,
-0.06543656438589096,
-0.08658221364021301,
-0.1667580008506775,
-0.004923108033835888,
-0.12870657444000244,
0.038584474474191666,
0.08083992451429367,
0.030919775366783142,
0.008959905244410038,
0.08363289386034012,
0.023197825998067856,
0.02281271666288376,
0.06618763506412506,
0.006118490360677242,
-0.03714447095990181,
-0.05541076511144638,
-0.05820423364639282,
0.009513909928500652,
-0.013712036423385143,
0.0646275207400322,
-0.017094522714614868,
-0.03266702964901924,
0.049033042043447495,
-0.019696732982993126,
-0.10339327901601791,
0.010534671135246754,
0.014843210577964783,
0.077741838991642,
0.06764236092567444,
0.02115391194820404,
-0.008487651124596596,
-0.01827094331383705,
0.21911625564098358,
-0.08267181366682053,
-0.05574587732553482,
-0.12404626607894897,
0.27340221405029297,
0.040739960968494415,
-0.03453099727630615,
0.04053065553307533,
-0.07545653730630875,
-0.020516909658908844,
0.17087097465991974,
0.23462322354316711,
-0.026195652782917023,
-0.02122514322400093,
-0.006915074773132801,
-0.012536261230707169,
-0.014906550757586956,
0.07918783277273178,
0.14076878130435944,
0.03575282171368599,
-0.08514099568128586,
-0.011057380586862564,
-0.062316715717315674,
-0.033721376210451126,
-0.03900812938809395,
0.08805117011070251,
0.03334086015820503,
0.009187313728034496,
-0.03357794135808945,
0.06043779104948044,
-0.05192376673221588,
-0.07097984850406647,
0.026212621480226517,
-0.23813489079475403,
-0.1741388887166977,
0.012991872616112232,
0.062448665499687195,
0.005783857777714729,
0.0733928233385086,
-0.006816425826400518,
0.002218420384451747,
0.07052018493413925,
-0.019468894228339195,
-0.07480581849813461,
-0.11086127161979675,
0.10008812695741653,
-0.15018858015537262,
0.1847400814294815,
-0.04338105767965317,
0.02458036318421364,
0.14340759813785553,
0.04774262756109238,
-0.1081097424030304,
0.0418110154569149,
0.05090409889817238,
-0.02254590205848217,
0.011134220287203789,
0.12826618552207947,
-0.038520123809576035,
0.08810807764530182,
0.05422309786081314,
-0.11524895578622818,
-0.006634072866290808,
-0.08708541840314865,
-0.022465838119387627,
-0.03393132612109184,
-0.03512055426836014,
-0.029881775379180908,
0.1268550455570221,
0.209943026304245,
-0.05260109528899193,
0.012102355249226093,
-0.06767775863409042,
0.014113033190369606,
0.04032096266746521,
0.006463134661316872,
-0.05424747243523598,
-0.2874477803707123,
0.017394447699189186,
0.09424180537462234,
-0.0019257799722254276,
-0.2589356005191803,
-0.08441072702407837,
0.026616526767611504,
-0.0592057965695858,
-0.11457079648971558,
0.09700646251440048,
0.056220125406980515,
0.04969387501478195,
-0.0756903812289238,
-0.052557267248630524,
-0.06885884702205658,
0.1691865622997284,
-0.14244388043880463,
-0.07906962931156158
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# mt5-base-wikinewssum-polish
This model is a fine-tuned version of [google/mt5-base](https://huggingface.co/google/mt5-base) on an unknown dataset.
It achieves the following results on the evaluation set:
- Loss: 2.3179
- Rouge1: 7.911
- Rouge2: 3.2189
- Rougel: 6.7856
- Rougelsum: 7.4485
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 5.6e-05
- train_batch_size: 4
- eval_batch_size: 4
- seed: 42
- gradient_accumulation_steps: 2
- total_train_batch_size: 8
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- num_epochs: 8
### Training results
| Training Loss | Epoch | Step | Validation Loss | Rouge1 | Rouge2 | Rougel | Rougelsum |
|:-------------:|:-----:|:----:|:---------------:|:------:|:------:|:------:|:---------:|
| No log | 1.0 | 315 | 2.5391 | 5.9874 | 2.3594 | 5.1303 | 5.6116 |
| No log | 2.0 | 630 | 2.4446 | 7.7294 | 3.0152 | 6.6024 | 7.2757 |
| No log | 3.0 | 945 | 2.3912 | 7.6451 | 2.9785 | 6.5714 | 7.2011 |
| 3.5311 | 4.0 | 1260 | 2.3720 | 7.8007 | 3.0913 | 6.7067 | 7.3451 |
| 3.5311 | 5.0 | 1575 | 2.3411 | 7.8374 | 3.1208 | 6.7288 | 7.3459 |
| 3.5311 | 6.0 | 1890 | 2.3354 | 7.8664 | 3.1655 | 6.762 | 7.4364 |
| 3.5311 | 7.0 | 2205 | 2.3175 | 7.9529 | 3.2225 | 6.8438 | 7.4904 |
| 2.692 | 8.0 | 2520 | 2.3179 | 7.911 | 3.2189 | 6.7856 | 7.4485 |
### Framework versions
- Transformers 4.13.0
- Pytorch 1.10.1
- Datasets 1.16.1
- Tokenizers 0.10.3
|
{"license": "apache-2.0", "tags": ["summarization", "generated_from_trainer"], "metrics": ["rouge"], "model-index": [{"name": "mt5-base-wikinewssum-polish", "results": []}]}
|
summarization
|
airKlizz/mt5-base-wikinewssum-polish
|
[
"transformers",
"pytorch",
"mt5",
"text2text-generation",
"summarization",
"generated_from_trainer",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #mt5 #text2text-generation #summarization #generated_from_trainer #license-apache-2.0 #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
mt5-base-wikinewssum-polish
===========================
This model is a fine-tuned version of google/mt5-base on an unknown dataset.
It achieves the following results on the evaluation set:
* Loss: 2.3179
* Rouge1: 7.911
* Rouge2: 3.2189
* Rougel: 6.7856
* Rougelsum: 7.4485
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 5.6e-05
* train\_batch\_size: 4
* eval\_batch\_size: 4
* seed: 42
* gradient\_accumulation\_steps: 2
* total\_train\_batch\_size: 8
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* num\_epochs: 8
### Training results
### Framework versions
* Transformers 4.13.0
* Pytorch 1.10.1
* Datasets 1.16.1
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5.6e-05\n* train\\_batch\\_size: 4\n* eval\\_batch\\_size: 4\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 8\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 8",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.13.0\n* Pytorch 1.10.1\n* Datasets 1.16.1\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #mt5 #text2text-generation #summarization #generated_from_trainer #license-apache-2.0 #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5.6e-05\n* train\\_batch\\_size: 4\n* eval\\_batch\\_size: 4\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 8\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 8",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.13.0\n* Pytorch 1.10.1\n* Datasets 1.16.1\n* Tokenizers 0.10.3"
] |
[
68,
127,
4,
30
] |
[
"passage: TAGS\n#transformers #pytorch #mt5 #text2text-generation #summarization #generated_from_trainer #license-apache-2.0 #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5.6e-05\n* train\\_batch\\_size: 4\n* eval\\_batch\\_size: 4\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 8\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 8### Training results### Framework versions\n\n\n* Transformers 4.13.0\n* Pytorch 1.10.1\n* Datasets 1.16.1\n* Tokenizers 0.10.3"
] |
[
-0.11205639690160751,
0.1200854480266571,
-0.002607157453894615,
0.09516560286283493,
0.12879973649978638,
0.002303670160472393,
0.11824613064527512,
0.171413391828537,
-0.14953720569610596,
0.06951165944337845,
0.14009463787078857,
0.14367541670799255,
0.0381598025560379,
0.19781967997550964,
-0.06012033671140671,
-0.25871190428733826,
0.022363876923918724,
0.01624239981174469,
-0.028094766661524773,
0.14088991284370422,
0.09551103413105011,
-0.1287730485200882,
0.07820693403482437,
0.000998921343125403,
-0.183598592877388,
-0.024697372689843178,
-0.02796112187206745,
-0.06271753460168839,
0.12303527444601059,
0.00036430644104257226,
0.06788218021392822,
0.03774084895849228,
0.06467381119728088,
-0.18728700280189514,
0.001241724705323577,
0.060552459210157394,
0.01991002820432186,
0.10001540929079056,
0.056589141488075256,
-0.031313855201005936,
0.13133859634399414,
-0.08424826711416245,
0.05489073321223259,
0.025151334702968597,
-0.12808622419834137,
-0.26126325130462646,
-0.10970255732536316,
0.031086960807442665,
0.09551467001438141,
0.08334353566169739,
-0.019286882132291794,
0.15294815599918365,
-0.06488027423620224,
0.10504907369613647,
0.27263572812080383,
-0.3033342659473419,
-0.05171016976237297,
0.045595161616802216,
0.013228163123130798,
0.060377441346645355,
-0.09293504059314728,
-0.02399926446378231,
0.05413489788770676,
0.045160096138715744,
0.1429183930158615,
-0.0027004037983715534,
-0.02511623315513134,
-0.0019087126711383462,
-0.14250117540359497,
-0.07364105433225632,
0.17931704223155975,
0.03428516909480095,
-0.03383367881178856,
-0.09517841786146164,
-0.0675944834947586,
-0.20747029781341553,
-0.028198545798659325,
0.014058619737625122,
0.03909992054104805,
-0.041674401611089706,
-0.09935754537582397,
0.03741353750228882,
-0.07454674690961838,
-0.039075493812561035,
-0.018075205385684967,
0.06490866839885712,
0.04652085900306702,
0.01755381189286709,
-0.03178589418530464,
0.09724763035774231,
-0.03455623239278793,
-0.16132846474647522,
0.0037479298189282417,
0.00593946548178792,
-0.00808199867606163,
-0.03548746928572655,
-0.03998066857457161,
-0.07684570550918579,
0.014109679497778416,
0.1381710320711136,
-0.09364449977874756,
0.06668062508106232,
-0.006666583940386772,
0.03617323935031891,
-0.04165424033999443,
0.14300018548965454,
-0.031221669167280197,
-0.039196353405714035,
-0.012784029357135296,
0.07528222352266312,
0.04381590336561203,
-0.02753317356109619,
-0.10624876618385315,
0.057326097041368484,
0.10818855464458466,
0.04058639705181122,
-0.036746468394994736,
0.054511938244104385,
-0.05165879800915718,
-0.02911781147122383,
0.0272970050573349,
-0.10658147186040878,
0.03305680677294731,
-0.005727611947804689,
-0.08889719843864441,
-0.04051333665847778,
-0.012938112020492554,
0.003390922211110592,
-0.04460423067212105,
0.11670494824647903,
-0.08160168677568436,
0.023699680343270302,
-0.0841912105679512,
-0.12989528477191925,
0.028230013325810432,
-0.10438722372055054,
-0.008960140869021416,
-0.06941433995962143,
-0.15547049045562744,
-0.02269802801311016,
0.06120498105883598,
-0.06857271492481232,
-0.06946265697479248,
-0.07487965375185013,
-0.08376320451498032,
0.042465370148420334,
-0.02433229237794876,
0.138554647564888,
-0.08122630417346954,
0.09492399543523788,
0.03257113695144653,
0.08185537159442902,
0.01219625398516655,
0.053119316697120667,
-0.09459764510393143,
0.03232480213046074,
-0.22062593698501587,
0.0760658010840416,
-0.044534832239151,
0.0795859545469284,
-0.11665266752243042,
-0.11618484556674957,
0.028773363679647446,
-0.03539174422621727,
0.09214713424444199,
0.1268407702445984,
-0.1912430226802826,
-0.07374312728643417,
0.20106762647628784,
-0.0881706178188324,
-0.12733972072601318,
0.12914006412029266,
-0.05568486452102661,
0.020963672548532486,
0.04997303709387779,
0.22030377388000488,
0.0132258590310812,
-0.06619121134281158,
-0.020783480256795883,
-0.04689343646168709,
0.07744970172643661,
-0.052902836352586746,
0.08229874074459076,
-0.007337320130318403,
0.08715333789587021,
0.014347528107464314,
0.03443807363510132,
0.022578751668334007,
-0.11143284291028976,
-0.07900378108024597,
-0.03549855202436447,
-0.07241262495517731,
0.012825163081288338,
0.049185190349817276,
0.07837684452533722,
-0.14081326127052307,
-0.08774495869874954,
0.008908026851713657,
0.0861697569489479,
-0.08352768421173096,
0.04624487832188606,
-0.06909306347370148,
0.11099889129400253,
-0.06278719753026962,
-0.00034085329389199615,
-0.19382749497890472,
0.014749595895409584,
0.03944957256317139,
0.019187571480870247,
-0.008854692801833153,
-0.03225889801979065,
0.05644776672124863,
0.0672122910618782,
-0.05915208160877228,
-0.02305278740823269,
-0.013876810669898987,
-0.00945417582988739,
-0.12107262760400772,
-0.23638217151165009,
-0.04785185679793358,
-0.04435846954584122,
0.10498365014791489,
-0.176314115524292,
0.04025633633136749,
0.05055408179759979,
0.11252106726169586,
0.023772336542606354,
-0.03973634913563728,
-0.00460545951500535,
0.06906706839799881,
-0.061362750828266144,
-0.06819313019514084,
0.06561906635761261,
0.017099659889936447,
-0.0800761878490448,
-0.00904841162264347,
-0.11209828406572342,
0.13416273891925812,
0.14215566217899323,
-0.014379914849996567,
-0.07797959446907043,
-0.0400732047855854,
-0.07193023711442947,
-0.030359629541635513,
-0.04994754120707512,
0.033291544765233994,
0.13199256360530853,
0.023203253746032715,
0.15598061680793762,
-0.09839808195829391,
-0.06289047002792358,
0.057420846074819565,
-0.017126668244600296,
0.006391224917024374,
0.1314973682165146,
0.06009002402424812,
-0.12431710213422775,
0.14869509637355804,
0.12587502598762512,
-0.014233122579753399,
0.1185302883386612,
-0.061093661934137344,
-0.07847945392131805,
-0.02777388133108616,
-0.005566457752138376,
0.01030904520303011,
0.10947950929403305,
-0.14417177438735962,
-0.031061502173542976,
0.03999796882271767,
0.05617692694067955,
0.01567836105823517,
-0.17570212483406067,
-0.007124630268663168,
0.03344719111919403,
-0.04787316545844078,
-0.04560607671737671,
-0.02284228429198265,
0.004227818455547094,
0.11800038069486618,
0.005832357332110405,
-0.06198959797620773,
0.020752903074026108,
0.0002620290615595877,
-0.0718337893486023,
0.19234828650951385,
-0.09336501359939575,
-0.15773937106132507,
-0.07881972938776016,
-0.0991009771823883,
-0.06453730165958405,
-0.00814135279506445,
0.08371137827634811,
-0.09895356744527817,
-0.03233583644032478,
-0.10093409568071365,
0.002524662995710969,
-0.029991062358021736,
0.03168123960494995,
0.043324198573827744,
0.012962352484464645,
0.05045382305979729,
-0.11362975090742111,
-0.020125364884734154,
-0.040214478969573975,
0.0008918482926674187,
0.0750204399228096,
0.01599774695932865,
0.10105552524328232,
0.13922399282455444,
-0.011240120977163315,
0.05168319493532181,
-0.04848644137382507,
0.20207765698432922,
-0.06343686580657959,
-0.03139430284500122,
0.10303834080696106,
0.011015428230166435,
0.07387688010931015,
0.1355835348367691,
0.044918399304151535,
-0.10764198005199432,
0.009790534153580666,
0.018470797687768936,
-0.05205294489860535,
-0.22517366707324982,
-0.0382547602057457,
-0.05631903186440468,
0.029499458149075508,
0.12404964864253998,
0.026718202978372574,
-0.005984536837786436,
0.03752150386571884,
0.01822456158697605,
0.032422635704278946,
-0.01615438610315323,
0.09237942844629288,
0.07464220374822617,
0.036158639937639236,
0.14656466245651245,
-0.04628540948033333,
-0.019978981465101242,
0.050288423895835876,
-0.01718146912753582,
0.24636387825012207,
-0.034528475254774094,
0.1597570776939392,
0.05584326386451721,
0.16479536890983582,
0.009025229141116142,
0.08763927966356277,
-0.02134997770190239,
-0.009810500778257847,
-0.028991280123591423,
-0.05003475770354271,
-0.04527236893773079,
0.028167368844151497,
-0.033007148653268814,
0.04371321573853493,
-0.14233914017677307,
0.03516094759106636,
0.0854090228676796,
0.32209962606430054,
0.085199736058712,
-0.35187843441963196,
-0.09759797155857086,
0.008699295111000538,
-0.039517972618341446,
-0.023187648504972458,
0.020709242671728134,
0.11779837310314178,
-0.08545199036598206,
0.07921327650547028,
-0.06322813034057617,
0.10837214440107346,
-0.05136217921972275,
0.036786407232284546,
0.0473417304456234,
0.06066565588116646,
-0.013728477992117405,
0.07209303975105286,
-0.30708518624305725,
0.28899139165878296,
0.018392030149698257,
0.05957576259970665,
-0.0923774391412735,
0.017143648117780685,
0.0026850122958421707,
0.024159053340554237,
0.07960312813520432,
-0.0039656939916312695,
-0.12163063883781433,
-0.143130823969841,
-0.11210592091083527,
0.008798196911811829,
0.09584887325763702,
-0.02732231840491295,
0.10918975621461868,
-0.0005942208808846772,
0.0024724307004362345,
0.03448329493403435,
-0.012539991177618504,
-0.026949169114232063,
-0.1067408099770546,
0.01221197284758091,
0.00874137319624424,
-0.03764903545379639,
-0.06522967666387558,
-0.11124996840953827,
-0.06485360860824585,
0.21355371177196503,
0.03389192372560501,
-0.06171393021941185,
-0.12091884016990662,
0.10070730000734329,
0.08661431074142456,
-0.07510244101285934,
0.025908103212714195,
0.0029307075310498476,
0.09597276896238327,
-0.00506886001676321,
-0.05563337728381157,
0.12162170559167862,
-0.056817762553691864,
-0.1918409764766693,
-0.06492508947849274,
0.1379907876253128,
0.020831160247325897,
0.07119887322187424,
-0.0195109061896801,
0.04011273756623268,
-0.015777073800563812,
-0.0810445100069046,
0.052636027336120605,
-0.004894413985311985,
0.14381149411201477,
-0.0034972357098013163,
-0.023933257907629013,
0.028880678117275238,
-0.06759252399206161,
-0.058457907289266586,
0.20209334790706635,
0.2886066734790802,
-0.1014329269528389,
0.07176490128040314,
0.04254549741744995,
-0.05271827429533005,
-0.15753675997257233,
0.02230987511575222,
0.059829145669937134,
0.007197357714176178,
0.00971455778926611,
-0.17786499857902527,
0.03637805953621864,
0.10226704180240631,
-0.017067179083824158,
0.07126431912183762,
-0.3531765043735504,
-0.12112689018249512,
0.06849798560142517,
0.1042497530579567,
0.07964833080768585,
-0.16004104912281036,
-0.037219274789094925,
-0.0217670276761055,
-0.11253982782363892,
0.10693099349737167,
-0.09691113978624344,
0.12483107298612595,
-0.029006067663431168,
0.06675292551517487,
0.012483968399465084,
-0.0716375783085823,
0.11419954895973206,
0.028830980882048607,
0.07685384154319763,
-0.05723283067345619,
0.015192286111414433,
0.09579186141490936,
-0.07801118493080139,
0.03801945596933365,
-0.10100790858268738,
0.0395209901034832,
-0.12121538817882538,
-0.006478775292634964,
-0.06998132914304733,
0.009828716516494751,
-0.04250891134142876,
-0.05700402334332466,
-0.05117117986083031,
0.027468670159578323,
0.08668804913759232,
-0.01997712254524231,
0.17492634057998657,
0.014194699935615063,
0.1586436927318573,
0.1694963425397873,
0.060269687324762344,
-0.12699128687381744,
-0.041073545813560486,
0.0008937517995946109,
-0.014475439675152302,
0.029230155050754547,
-0.18474577367305756,
0.03402821719646454,
0.1450379639863968,
0.024433722719550133,
0.13561265170574188,
0.07501205056905746,
-0.050406601279973984,
0.028158608824014664,
0.06194161996245384,
-0.16201508045196533,
-0.08362826704978943,
0.019016984850168228,
0.015313887037336826,
-0.1245117112994194,
0.05582323670387268,
0.09873337298631668,
-0.05105185881257057,
-0.01937897317111492,
-0.008666092529892921,
0.04667884483933449,
-0.026434067636728287,
0.21319027245044708,
0.0015973870176821947,
0.08558833599090576,
-0.12406037747859955,
0.10168445110321045,
0.06127422675490379,
-0.12516702711582184,
0.04600324109196663,
0.09664556384086609,
-0.10063496232032776,
-0.025734778493642807,
0.0564645417034626,
0.143109530210495,
-0.007795474026352167,
-0.03123391419649124,
-0.15345261991024017,
-0.12555287778377533,
0.1070227324962616,
0.11124735325574875,
0.08447909355163574,
0.022966429591178894,
-0.03172243759036064,
-0.021852592006325722,
-0.1395566314458847,
0.0996365025639534,
0.0713956207036972,
0.05787466838955879,
-0.1184096559882164,
0.12900805473327637,
-0.0029617438558489084,
0.03932579606771469,
-0.010611096397042274,
0.008911686018109322,
-0.11015573143959045,
0.020352834835648537,
-0.09383316338062286,
-0.017769496887922287,
-0.06696808338165283,
-0.011882016435265541,
-0.024039380252361298,
-0.016373133286833763,
-0.056233637034893036,
0.012460472993552685,
-0.11871087551116943,
-0.038187891244888306,
-0.001826642663218081,
0.038225479423999786,
-0.11793367564678192,
-0.021984172984957695,
-0.0005945826414972544,
-0.09057524800300598,
0.10011273622512817,
0.05925333499908447,
-0.0009865729371085763,
0.009503116831183434,
-0.02850765362381935,
-0.008363250643014908,
0.06886249035596848,
-0.0055760652758181095,
0.07980187237262726,
-0.13043776154518127,
-0.016374174505472183,
0.009114005602896214,
0.00978186447173357,
0.033877402544021606,
0.08168154209852219,
-0.13570496439933777,
0.006359047722071409,
-0.01543837133795023,
-0.0704135149717331,
-0.05560191348195076,
0.0575764998793602,
0.08301401883363724,
0.021680660545825958,
0.18087723851203918,
-0.07383621484041214,
0.04131067916750908,
-0.21257226169109344,
-0.012014258652925491,
0.0018502091988921165,
-0.1238124668598175,
-0.07574303448200226,
-0.027593931183218956,
0.07831709831953049,
-0.07784182578325272,
0.14541946351528168,
0.014750583097338676,
0.012970727868378162,
0.041576139628887177,
-0.05441097542643547,
-0.022552259266376495,
0.021100156009197235,
0.1664745807647705,
0.01539525855332613,
-0.041574906557798386,
0.08540167659521103,
0.04916341230273247,
0.10373528301715851,
0.15661627054214478,
0.21478168666362762,
0.12472513318061829,
0.09403238445520401,
0.08199582248926163,
0.013090026564896107,
-0.08310051262378693,
-0.1886410117149353,
0.1010751873254776,
-0.020964697003364563,
0.15339131653308868,
-0.00940002128481865,
0.15566739439964294,
0.11629936099052429,
-0.18345552682876587,
0.053638678044080734,
-0.062108367681503296,
-0.08301382511854172,
-0.11057336628437042,
-0.06543656438589096,
-0.08658221364021301,
-0.1667580008506775,
-0.004923108033835888,
-0.12870657444000244,
0.038584474474191666,
0.08083992451429367,
0.030919775366783142,
0.008959905244410038,
0.08363289386034012,
0.023197825998067856,
0.02281271666288376,
0.06618763506412506,
0.006118490360677242,
-0.03714447095990181,
-0.05541076511144638,
-0.05820423364639282,
0.009513909928500652,
-0.013712036423385143,
0.0646275207400322,
-0.017094522714614868,
-0.03266702964901924,
0.049033042043447495,
-0.019696732982993126,
-0.10339327901601791,
0.010534671135246754,
0.014843210577964783,
0.077741838991642,
0.06764236092567444,
0.02115391194820404,
-0.008487651124596596,
-0.01827094331383705,
0.21911625564098358,
-0.08267181366682053,
-0.05574587732553482,
-0.12404626607894897,
0.27340221405029297,
0.040739960968494415,
-0.03453099727630615,
0.04053065553307533,
-0.07545653730630875,
-0.020516909658908844,
0.17087097465991974,
0.23462322354316711,
-0.026195652782917023,
-0.02122514322400093,
-0.006915074773132801,
-0.012536261230707169,
-0.014906550757586956,
0.07918783277273178,
0.14076878130435944,
0.03575282171368599,
-0.08514099568128586,
-0.011057380586862564,
-0.062316715717315674,
-0.033721376210451126,
-0.03900812938809395,
0.08805117011070251,
0.03334086015820503,
0.009187313728034496,
-0.03357794135808945,
0.06043779104948044,
-0.05192376673221588,
-0.07097984850406647,
0.026212621480226517,
-0.23813489079475403,
-0.1741388887166977,
0.012991872616112232,
0.062448665499687195,
0.005783857777714729,
0.0733928233385086,
-0.006816425826400518,
0.002218420384451747,
0.07052018493413925,
-0.019468894228339195,
-0.07480581849813461,
-0.11086127161979675,
0.10008812695741653,
-0.15018858015537262,
0.1847400814294815,
-0.04338105767965317,
0.02458036318421364,
0.14340759813785553,
0.04774262756109238,
-0.1081097424030304,
0.0418110154569149,
0.05090409889817238,
-0.02254590205848217,
0.011134220287203789,
0.12826618552207947,
-0.038520123809576035,
0.08810807764530182,
0.05422309786081314,
-0.11524895578622818,
-0.006634072866290808,
-0.08708541840314865,
-0.022465838119387627,
-0.03393132612109184,
-0.03512055426836014,
-0.029881775379180908,
0.1268550455570221,
0.209943026304245,
-0.05260109528899193,
0.012102355249226093,
-0.06767775863409042,
0.014113033190369606,
0.04032096266746521,
0.006463134661316872,
-0.05424747243523598,
-0.2874477803707123,
0.017394447699189186,
0.09424180537462234,
-0.0019257799722254276,
-0.2589356005191803,
-0.08441072702407837,
0.026616526767611504,
-0.0592057965695858,
-0.11457079648971558,
0.09700646251440048,
0.056220125406980515,
0.04969387501478195,
-0.0756903812289238,
-0.052557267248630524,
-0.06885884702205658,
0.1691865622997284,
-0.14244388043880463,
-0.07906962931156158
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# mt5-base-wikinewssum-portuguese
This model is a fine-tuned version of [google/mt5-base](https://huggingface.co/google/mt5-base) on an unknown dataset.
It achieves the following results on the evaluation set:
- Loss: 2.0428
- Rouge1: 9.4966
- Rouge2: 4.2224
- Rougel: 7.9845
- Rougelsum: 8.8641
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 5.6e-05
- train_batch_size: 4
- eval_batch_size: 4
- seed: 42
- gradient_accumulation_steps: 2
- total_train_batch_size: 8
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- num_epochs: 8
### Training results
| Training Loss | Epoch | Step | Validation Loss | Rouge1 | Rouge2 | Rougel | Rougelsum |
|:-------------:|:-----:|:----:|:---------------:|:------:|:------:|:------:|:---------:|
| No log | 1.0 | 334 | 2.2258 | 7.3686 | 2.9066 | 6.3167 | 6.8758 |
| No log | 2.0 | 668 | 2.1389 | 9.0551 | 3.8395 | 7.6578 | 8.4641 |
| No log | 3.0 | 1002 | 2.1030 | 9.2792 | 3.9352 | 7.8259 | 8.663 |
| No log | 4.0 | 1336 | 2.0841 | 9.337 | 4.0647 | 7.8662 | 8.693 |
| 3.2831 | 5.0 | 1670 | 2.0487 | 9.4244 | 4.0821 | 7.8633 | 8.7111 |
| 3.2831 | 6.0 | 2004 | 2.0580 | 9.4598 | 4.1598 | 7.9511 | 8.8299 |
| 3.2831 | 7.0 | 2338 | 2.0426 | 9.501 | 4.1885 | 7.9803 | 8.8612 |
| 3.2831 | 8.0 | 2672 | 2.0428 | 9.4966 | 4.2224 | 7.9845 | 8.8641 |
### Framework versions
- Transformers 4.13.0
- Pytorch 1.10.1
- Datasets 1.16.1
- Tokenizers 0.10.3
|
{"license": "apache-2.0", "tags": ["summarization", "generated_from_trainer"], "metrics": ["rouge"], "model-index": [{"name": "mt5-base-wikinewssum-portuguese", "results": []}]}
|
summarization
|
airKlizz/mt5-base-wikinewssum-portuguese
|
[
"transformers",
"pytorch",
"mt5",
"text2text-generation",
"summarization",
"generated_from_trainer",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #mt5 #text2text-generation #summarization #generated_from_trainer #license-apache-2.0 #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
mt5-base-wikinewssum-portuguese
===============================
This model is a fine-tuned version of google/mt5-base on an unknown dataset.
It achieves the following results on the evaluation set:
* Loss: 2.0428
* Rouge1: 9.4966
* Rouge2: 4.2224
* Rougel: 7.9845
* Rougelsum: 8.8641
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 5.6e-05
* train\_batch\_size: 4
* eval\_batch\_size: 4
* seed: 42
* gradient\_accumulation\_steps: 2
* total\_train\_batch\_size: 8
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* num\_epochs: 8
### Training results
### Framework versions
* Transformers 4.13.0
* Pytorch 1.10.1
* Datasets 1.16.1
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5.6e-05\n* train\\_batch\\_size: 4\n* eval\\_batch\\_size: 4\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 8\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 8",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.13.0\n* Pytorch 1.10.1\n* Datasets 1.16.1\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #mt5 #text2text-generation #summarization #generated_from_trainer #license-apache-2.0 #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5.6e-05\n* train\\_batch\\_size: 4\n* eval\\_batch\\_size: 4\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 8\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 8",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.13.0\n* Pytorch 1.10.1\n* Datasets 1.16.1\n* Tokenizers 0.10.3"
] |
[
68,
127,
4,
30
] |
[
"passage: TAGS\n#transformers #pytorch #mt5 #text2text-generation #summarization #generated_from_trainer #license-apache-2.0 #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5.6e-05\n* train\\_batch\\_size: 4\n* eval\\_batch\\_size: 4\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 8\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 8### Training results### Framework versions\n\n\n* Transformers 4.13.0\n* Pytorch 1.10.1\n* Datasets 1.16.1\n* Tokenizers 0.10.3"
] |
[
-0.11205639690160751,
0.1200854480266571,
-0.002607157453894615,
0.09516560286283493,
0.12879973649978638,
0.002303670160472393,
0.11824613064527512,
0.171413391828537,
-0.14953720569610596,
0.06951165944337845,
0.14009463787078857,
0.14367541670799255,
0.0381598025560379,
0.19781967997550964,
-0.06012033671140671,
-0.25871190428733826,
0.022363876923918724,
0.01624239981174469,
-0.028094766661524773,
0.14088991284370422,
0.09551103413105011,
-0.1287730485200882,
0.07820693403482437,
0.000998921343125403,
-0.183598592877388,
-0.024697372689843178,
-0.02796112187206745,
-0.06271753460168839,
0.12303527444601059,
0.00036430644104257226,
0.06788218021392822,
0.03774084895849228,
0.06467381119728088,
-0.18728700280189514,
0.001241724705323577,
0.060552459210157394,
0.01991002820432186,
0.10001540929079056,
0.056589141488075256,
-0.031313855201005936,
0.13133859634399414,
-0.08424826711416245,
0.05489073321223259,
0.025151334702968597,
-0.12808622419834137,
-0.26126325130462646,
-0.10970255732536316,
0.031086960807442665,
0.09551467001438141,
0.08334353566169739,
-0.019286882132291794,
0.15294815599918365,
-0.06488027423620224,
0.10504907369613647,
0.27263572812080383,
-0.3033342659473419,
-0.05171016976237297,
0.045595161616802216,
0.013228163123130798,
0.060377441346645355,
-0.09293504059314728,
-0.02399926446378231,
0.05413489788770676,
0.045160096138715744,
0.1429183930158615,
-0.0027004037983715534,
-0.02511623315513134,
-0.0019087126711383462,
-0.14250117540359497,
-0.07364105433225632,
0.17931704223155975,
0.03428516909480095,
-0.03383367881178856,
-0.09517841786146164,
-0.0675944834947586,
-0.20747029781341553,
-0.028198545798659325,
0.014058619737625122,
0.03909992054104805,
-0.041674401611089706,
-0.09935754537582397,
0.03741353750228882,
-0.07454674690961838,
-0.039075493812561035,
-0.018075205385684967,
0.06490866839885712,
0.04652085900306702,
0.01755381189286709,
-0.03178589418530464,
0.09724763035774231,
-0.03455623239278793,
-0.16132846474647522,
0.0037479298189282417,
0.00593946548178792,
-0.00808199867606163,
-0.03548746928572655,
-0.03998066857457161,
-0.07684570550918579,
0.014109679497778416,
0.1381710320711136,
-0.09364449977874756,
0.06668062508106232,
-0.006666583940386772,
0.03617323935031891,
-0.04165424033999443,
0.14300018548965454,
-0.031221669167280197,
-0.039196353405714035,
-0.012784029357135296,
0.07528222352266312,
0.04381590336561203,
-0.02753317356109619,
-0.10624876618385315,
0.057326097041368484,
0.10818855464458466,
0.04058639705181122,
-0.036746468394994736,
0.054511938244104385,
-0.05165879800915718,
-0.02911781147122383,
0.0272970050573349,
-0.10658147186040878,
0.03305680677294731,
-0.005727611947804689,
-0.08889719843864441,
-0.04051333665847778,
-0.012938112020492554,
0.003390922211110592,
-0.04460423067212105,
0.11670494824647903,
-0.08160168677568436,
0.023699680343270302,
-0.0841912105679512,
-0.12989528477191925,
0.028230013325810432,
-0.10438722372055054,
-0.008960140869021416,
-0.06941433995962143,
-0.15547049045562744,
-0.02269802801311016,
0.06120498105883598,
-0.06857271492481232,
-0.06946265697479248,
-0.07487965375185013,
-0.08376320451498032,
0.042465370148420334,
-0.02433229237794876,
0.138554647564888,
-0.08122630417346954,
0.09492399543523788,
0.03257113695144653,
0.08185537159442902,
0.01219625398516655,
0.053119316697120667,
-0.09459764510393143,
0.03232480213046074,
-0.22062593698501587,
0.0760658010840416,
-0.044534832239151,
0.0795859545469284,
-0.11665266752243042,
-0.11618484556674957,
0.028773363679647446,
-0.03539174422621727,
0.09214713424444199,
0.1268407702445984,
-0.1912430226802826,
-0.07374312728643417,
0.20106762647628784,
-0.0881706178188324,
-0.12733972072601318,
0.12914006412029266,
-0.05568486452102661,
0.020963672548532486,
0.04997303709387779,
0.22030377388000488,
0.0132258590310812,
-0.06619121134281158,
-0.020783480256795883,
-0.04689343646168709,
0.07744970172643661,
-0.052902836352586746,
0.08229874074459076,
-0.007337320130318403,
0.08715333789587021,
0.014347528107464314,
0.03443807363510132,
0.022578751668334007,
-0.11143284291028976,
-0.07900378108024597,
-0.03549855202436447,
-0.07241262495517731,
0.012825163081288338,
0.049185190349817276,
0.07837684452533722,
-0.14081326127052307,
-0.08774495869874954,
0.008908026851713657,
0.0861697569489479,
-0.08352768421173096,
0.04624487832188606,
-0.06909306347370148,
0.11099889129400253,
-0.06278719753026962,
-0.00034085329389199615,
-0.19382749497890472,
0.014749595895409584,
0.03944957256317139,
0.019187571480870247,
-0.008854692801833153,
-0.03225889801979065,
0.05644776672124863,
0.0672122910618782,
-0.05915208160877228,
-0.02305278740823269,
-0.013876810669898987,
-0.00945417582988739,
-0.12107262760400772,
-0.23638217151165009,
-0.04785185679793358,
-0.04435846954584122,
0.10498365014791489,
-0.176314115524292,
0.04025633633136749,
0.05055408179759979,
0.11252106726169586,
0.023772336542606354,
-0.03973634913563728,
-0.00460545951500535,
0.06906706839799881,
-0.061362750828266144,
-0.06819313019514084,
0.06561906635761261,
0.017099659889936447,
-0.0800761878490448,
-0.00904841162264347,
-0.11209828406572342,
0.13416273891925812,
0.14215566217899323,
-0.014379914849996567,
-0.07797959446907043,
-0.0400732047855854,
-0.07193023711442947,
-0.030359629541635513,
-0.04994754120707512,
0.033291544765233994,
0.13199256360530853,
0.023203253746032715,
0.15598061680793762,
-0.09839808195829391,
-0.06289047002792358,
0.057420846074819565,
-0.017126668244600296,
0.006391224917024374,
0.1314973682165146,
0.06009002402424812,
-0.12431710213422775,
0.14869509637355804,
0.12587502598762512,
-0.014233122579753399,
0.1185302883386612,
-0.061093661934137344,
-0.07847945392131805,
-0.02777388133108616,
-0.005566457752138376,
0.01030904520303011,
0.10947950929403305,
-0.14417177438735962,
-0.031061502173542976,
0.03999796882271767,
0.05617692694067955,
0.01567836105823517,
-0.17570212483406067,
-0.007124630268663168,
0.03344719111919403,
-0.04787316545844078,
-0.04560607671737671,
-0.02284228429198265,
0.004227818455547094,
0.11800038069486618,
0.005832357332110405,
-0.06198959797620773,
0.020752903074026108,
0.0002620290615595877,
-0.0718337893486023,
0.19234828650951385,
-0.09336501359939575,
-0.15773937106132507,
-0.07881972938776016,
-0.0991009771823883,
-0.06453730165958405,
-0.00814135279506445,
0.08371137827634811,
-0.09895356744527817,
-0.03233583644032478,
-0.10093409568071365,
0.002524662995710969,
-0.029991062358021736,
0.03168123960494995,
0.043324198573827744,
0.012962352484464645,
0.05045382305979729,
-0.11362975090742111,
-0.020125364884734154,
-0.040214478969573975,
0.0008918482926674187,
0.0750204399228096,
0.01599774695932865,
0.10105552524328232,
0.13922399282455444,
-0.011240120977163315,
0.05168319493532181,
-0.04848644137382507,
0.20207765698432922,
-0.06343686580657959,
-0.03139430284500122,
0.10303834080696106,
0.011015428230166435,
0.07387688010931015,
0.1355835348367691,
0.044918399304151535,
-0.10764198005199432,
0.009790534153580666,
0.018470797687768936,
-0.05205294489860535,
-0.22517366707324982,
-0.0382547602057457,
-0.05631903186440468,
0.029499458149075508,
0.12404964864253998,
0.026718202978372574,
-0.005984536837786436,
0.03752150386571884,
0.01822456158697605,
0.032422635704278946,
-0.01615438610315323,
0.09237942844629288,
0.07464220374822617,
0.036158639937639236,
0.14656466245651245,
-0.04628540948033333,
-0.019978981465101242,
0.050288423895835876,
-0.01718146912753582,
0.24636387825012207,
-0.034528475254774094,
0.1597570776939392,
0.05584326386451721,
0.16479536890983582,
0.009025229141116142,
0.08763927966356277,
-0.02134997770190239,
-0.009810500778257847,
-0.028991280123591423,
-0.05003475770354271,
-0.04527236893773079,
0.028167368844151497,
-0.033007148653268814,
0.04371321573853493,
-0.14233914017677307,
0.03516094759106636,
0.0854090228676796,
0.32209962606430054,
0.085199736058712,
-0.35187843441963196,
-0.09759797155857086,
0.008699295111000538,
-0.039517972618341446,
-0.023187648504972458,
0.020709242671728134,
0.11779837310314178,
-0.08545199036598206,
0.07921327650547028,
-0.06322813034057617,
0.10837214440107346,
-0.05136217921972275,
0.036786407232284546,
0.0473417304456234,
0.06066565588116646,
-0.013728477992117405,
0.07209303975105286,
-0.30708518624305725,
0.28899139165878296,
0.018392030149698257,
0.05957576259970665,
-0.0923774391412735,
0.017143648117780685,
0.0026850122958421707,
0.024159053340554237,
0.07960312813520432,
-0.0039656939916312695,
-0.12163063883781433,
-0.143130823969841,
-0.11210592091083527,
0.008798196911811829,
0.09584887325763702,
-0.02732231840491295,
0.10918975621461868,
-0.0005942208808846772,
0.0024724307004362345,
0.03448329493403435,
-0.012539991177618504,
-0.026949169114232063,
-0.1067408099770546,
0.01221197284758091,
0.00874137319624424,
-0.03764903545379639,
-0.06522967666387558,
-0.11124996840953827,
-0.06485360860824585,
0.21355371177196503,
0.03389192372560501,
-0.06171393021941185,
-0.12091884016990662,
0.10070730000734329,
0.08661431074142456,
-0.07510244101285934,
0.025908103212714195,
0.0029307075310498476,
0.09597276896238327,
-0.00506886001676321,
-0.05563337728381157,
0.12162170559167862,
-0.056817762553691864,
-0.1918409764766693,
-0.06492508947849274,
0.1379907876253128,
0.020831160247325897,
0.07119887322187424,
-0.0195109061896801,
0.04011273756623268,
-0.015777073800563812,
-0.0810445100069046,
0.052636027336120605,
-0.004894413985311985,
0.14381149411201477,
-0.0034972357098013163,
-0.023933257907629013,
0.028880678117275238,
-0.06759252399206161,
-0.058457907289266586,
0.20209334790706635,
0.2886066734790802,
-0.1014329269528389,
0.07176490128040314,
0.04254549741744995,
-0.05271827429533005,
-0.15753675997257233,
0.02230987511575222,
0.059829145669937134,
0.007197357714176178,
0.00971455778926611,
-0.17786499857902527,
0.03637805953621864,
0.10226704180240631,
-0.017067179083824158,
0.07126431912183762,
-0.3531765043735504,
-0.12112689018249512,
0.06849798560142517,
0.1042497530579567,
0.07964833080768585,
-0.16004104912281036,
-0.037219274789094925,
-0.0217670276761055,
-0.11253982782363892,
0.10693099349737167,
-0.09691113978624344,
0.12483107298612595,
-0.029006067663431168,
0.06675292551517487,
0.012483968399465084,
-0.0716375783085823,
0.11419954895973206,
0.028830980882048607,
0.07685384154319763,
-0.05723283067345619,
0.015192286111414433,
0.09579186141490936,
-0.07801118493080139,
0.03801945596933365,
-0.10100790858268738,
0.0395209901034832,
-0.12121538817882538,
-0.006478775292634964,
-0.06998132914304733,
0.009828716516494751,
-0.04250891134142876,
-0.05700402334332466,
-0.05117117986083031,
0.027468670159578323,
0.08668804913759232,
-0.01997712254524231,
0.17492634057998657,
0.014194699935615063,
0.1586436927318573,
0.1694963425397873,
0.060269687324762344,
-0.12699128687381744,
-0.041073545813560486,
0.0008937517995946109,
-0.014475439675152302,
0.029230155050754547,
-0.18474577367305756,
0.03402821719646454,
0.1450379639863968,
0.024433722719550133,
0.13561265170574188,
0.07501205056905746,
-0.050406601279973984,
0.028158608824014664,
0.06194161996245384,
-0.16201508045196533,
-0.08362826704978943,
0.019016984850168228,
0.015313887037336826,
-0.1245117112994194,
0.05582323670387268,
0.09873337298631668,
-0.05105185881257057,
-0.01937897317111492,
-0.008666092529892921,
0.04667884483933449,
-0.026434067636728287,
0.21319027245044708,
0.0015973870176821947,
0.08558833599090576,
-0.12406037747859955,
0.10168445110321045,
0.06127422675490379,
-0.12516702711582184,
0.04600324109196663,
0.09664556384086609,
-0.10063496232032776,
-0.025734778493642807,
0.0564645417034626,
0.143109530210495,
-0.007795474026352167,
-0.03123391419649124,
-0.15345261991024017,
-0.12555287778377533,
0.1070227324962616,
0.11124735325574875,
0.08447909355163574,
0.022966429591178894,
-0.03172243759036064,
-0.021852592006325722,
-0.1395566314458847,
0.0996365025639534,
0.0713956207036972,
0.05787466838955879,
-0.1184096559882164,
0.12900805473327637,
-0.0029617438558489084,
0.03932579606771469,
-0.010611096397042274,
0.008911686018109322,
-0.11015573143959045,
0.020352834835648537,
-0.09383316338062286,
-0.017769496887922287,
-0.06696808338165283,
-0.011882016435265541,
-0.024039380252361298,
-0.016373133286833763,
-0.056233637034893036,
0.012460472993552685,
-0.11871087551116943,
-0.038187891244888306,
-0.001826642663218081,
0.038225479423999786,
-0.11793367564678192,
-0.021984172984957695,
-0.0005945826414972544,
-0.09057524800300598,
0.10011273622512817,
0.05925333499908447,
-0.0009865729371085763,
0.009503116831183434,
-0.02850765362381935,
-0.008363250643014908,
0.06886249035596848,
-0.0055760652758181095,
0.07980187237262726,
-0.13043776154518127,
-0.016374174505472183,
0.009114005602896214,
0.00978186447173357,
0.033877402544021606,
0.08168154209852219,
-0.13570496439933777,
0.006359047722071409,
-0.01543837133795023,
-0.0704135149717331,
-0.05560191348195076,
0.0575764998793602,
0.08301401883363724,
0.021680660545825958,
0.18087723851203918,
-0.07383621484041214,
0.04131067916750908,
-0.21257226169109344,
-0.012014258652925491,
0.0018502091988921165,
-0.1238124668598175,
-0.07574303448200226,
-0.027593931183218956,
0.07831709831953049,
-0.07784182578325272,
0.14541946351528168,
0.014750583097338676,
0.012970727868378162,
0.041576139628887177,
-0.05441097542643547,
-0.022552259266376495,
0.021100156009197235,
0.1664745807647705,
0.01539525855332613,
-0.041574906557798386,
0.08540167659521103,
0.04916341230273247,
0.10373528301715851,
0.15661627054214478,
0.21478168666362762,
0.12472513318061829,
0.09403238445520401,
0.08199582248926163,
0.013090026564896107,
-0.08310051262378693,
-0.1886410117149353,
0.1010751873254776,
-0.020964697003364563,
0.15339131653308868,
-0.00940002128481865,
0.15566739439964294,
0.11629936099052429,
-0.18345552682876587,
0.053638678044080734,
-0.062108367681503296,
-0.08301382511854172,
-0.11057336628437042,
-0.06543656438589096,
-0.08658221364021301,
-0.1667580008506775,
-0.004923108033835888,
-0.12870657444000244,
0.038584474474191666,
0.08083992451429367,
0.030919775366783142,
0.008959905244410038,
0.08363289386034012,
0.023197825998067856,
0.02281271666288376,
0.06618763506412506,
0.006118490360677242,
-0.03714447095990181,
-0.05541076511144638,
-0.05820423364639282,
0.009513909928500652,
-0.013712036423385143,
0.0646275207400322,
-0.017094522714614868,
-0.03266702964901924,
0.049033042043447495,
-0.019696732982993126,
-0.10339327901601791,
0.010534671135246754,
0.014843210577964783,
0.077741838991642,
0.06764236092567444,
0.02115391194820404,
-0.008487651124596596,
-0.01827094331383705,
0.21911625564098358,
-0.08267181366682053,
-0.05574587732553482,
-0.12404626607894897,
0.27340221405029297,
0.040739960968494415,
-0.03453099727630615,
0.04053065553307533,
-0.07545653730630875,
-0.020516909658908844,
0.17087097465991974,
0.23462322354316711,
-0.026195652782917023,
-0.02122514322400093,
-0.006915074773132801,
-0.012536261230707169,
-0.014906550757586956,
0.07918783277273178,
0.14076878130435944,
0.03575282171368599,
-0.08514099568128586,
-0.011057380586862564,
-0.062316715717315674,
-0.033721376210451126,
-0.03900812938809395,
0.08805117011070251,
0.03334086015820503,
0.009187313728034496,
-0.03357794135808945,
0.06043779104948044,
-0.05192376673221588,
-0.07097984850406647,
0.026212621480226517,
-0.23813489079475403,
-0.1741388887166977,
0.012991872616112232,
0.062448665499687195,
0.005783857777714729,
0.0733928233385086,
-0.006816425826400518,
0.002218420384451747,
0.07052018493413925,
-0.019468894228339195,
-0.07480581849813461,
-0.11086127161979675,
0.10008812695741653,
-0.15018858015537262,
0.1847400814294815,
-0.04338105767965317,
0.02458036318421364,
0.14340759813785553,
0.04774262756109238,
-0.1081097424030304,
0.0418110154569149,
0.05090409889817238,
-0.02254590205848217,
0.011134220287203789,
0.12826618552207947,
-0.038520123809576035,
0.08810807764530182,
0.05422309786081314,
-0.11524895578622818,
-0.006634072866290808,
-0.08708541840314865,
-0.022465838119387627,
-0.03393132612109184,
-0.03512055426836014,
-0.029881775379180908,
0.1268550455570221,
0.209943026304245,
-0.05260109528899193,
0.012102355249226093,
-0.06767775863409042,
0.014113033190369606,
0.04032096266746521,
0.006463134661316872,
-0.05424747243523598,
-0.2874477803707123,
0.017394447699189186,
0.09424180537462234,
-0.0019257799722254276,
-0.2589356005191803,
-0.08441072702407837,
0.026616526767611504,
-0.0592057965695858,
-0.11457079648971558,
0.09700646251440048,
0.056220125406980515,
0.04969387501478195,
-0.0756903812289238,
-0.052557267248630524,
-0.06885884702205658,
0.1691865622997284,
-0.14244388043880463,
-0.07906962931156158
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# mt5-base-wikinewssum-spanish
This model is a fine-tuned version of [google/mt5-base](https://huggingface.co/google/mt5-base) on an unknown dataset.
It achieves the following results on the evaluation set:
- Loss: 2.2394
- Rouge1: 7.9732
- Rouge2: 3.5041
- Rougel: 6.6713
- Rougelsum: 7.5229
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 5.6e-05
- train_batch_size: 4
- eval_batch_size: 4
- seed: 42
- gradient_accumulation_steps: 2
- total_train_batch_size: 8
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- num_epochs: 8
### Training results
| Training Loss | Epoch | Step | Validation Loss | Rouge1 | Rouge2 | Rougel | Rougelsum |
|:-------------:|:-----:|:----:|:---------------:|:------:|:------:|:------:|:---------:|
| No log | 1.0 | 528 | 2.3707 | 6.687 | 2.9169 | 5.6793 | 6.2978 |
| No log | 2.0 | 1056 | 2.3140 | 7.9518 | 3.4529 | 6.7265 | 7.4984 |
| No log | 3.0 | 1584 | 2.2848 | 7.9708 | 3.5344 | 6.7272 | 7.534 |
| No log | 4.0 | 2112 | 2.2668 | 8.0252 | 3.5323 | 6.7319 | 7.5819 |
| 3.2944 | 5.0 | 2640 | 2.2532 | 8.0143 | 3.534 | 6.7155 | 7.582 |
| 3.2944 | 6.0 | 3168 | 2.2399 | 7.9525 | 3.4849 | 6.6716 | 7.5155 |
| 3.2944 | 7.0 | 3696 | 2.2376 | 7.9405 | 3.4661 | 6.6559 | 7.5043 |
| 3.2944 | 8.0 | 4224 | 2.2394 | 7.9732 | 3.5041 | 6.6713 | 7.5229 |
### Framework versions
- Transformers 4.13.0
- Pytorch 1.10.1
- Datasets 1.16.1
- Tokenizers 0.10.3
|
{"license": "apache-2.0", "tags": ["summarization", "generated_from_trainer"], "metrics": ["rouge"], "model-index": [{"name": "mt5-base-wikinewssum-spanish", "results": []}]}
|
summarization
|
airKlizz/mt5-base-wikinewssum-spanish
|
[
"transformers",
"pytorch",
"mt5",
"text2text-generation",
"summarization",
"generated_from_trainer",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #mt5 #text2text-generation #summarization #generated_from_trainer #license-apache-2.0 #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
mt5-base-wikinewssum-spanish
============================
This model is a fine-tuned version of google/mt5-base on an unknown dataset.
It achieves the following results on the evaluation set:
* Loss: 2.2394
* Rouge1: 7.9732
* Rouge2: 3.5041
* Rougel: 6.6713
* Rougelsum: 7.5229
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 5.6e-05
* train\_batch\_size: 4
* eval\_batch\_size: 4
* seed: 42
* gradient\_accumulation\_steps: 2
* total\_train\_batch\_size: 8
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* num\_epochs: 8
### Training results
### Framework versions
* Transformers 4.13.0
* Pytorch 1.10.1
* Datasets 1.16.1
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5.6e-05\n* train\\_batch\\_size: 4\n* eval\\_batch\\_size: 4\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 8\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 8",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.13.0\n* Pytorch 1.10.1\n* Datasets 1.16.1\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #mt5 #text2text-generation #summarization #generated_from_trainer #license-apache-2.0 #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5.6e-05\n* train\\_batch\\_size: 4\n* eval\\_batch\\_size: 4\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 8\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 8",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.13.0\n* Pytorch 1.10.1\n* Datasets 1.16.1\n* Tokenizers 0.10.3"
] |
[
68,
127,
4,
30
] |
[
"passage: TAGS\n#transformers #pytorch #mt5 #text2text-generation #summarization #generated_from_trainer #license-apache-2.0 #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5.6e-05\n* train\\_batch\\_size: 4\n* eval\\_batch\\_size: 4\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 8\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 8### Training results### Framework versions\n\n\n* Transformers 4.13.0\n* Pytorch 1.10.1\n* Datasets 1.16.1\n* Tokenizers 0.10.3"
] |
[
-0.11205639690160751,
0.1200854480266571,
-0.002607157453894615,
0.09516560286283493,
0.12879973649978638,
0.002303670160472393,
0.11824613064527512,
0.171413391828537,
-0.14953720569610596,
0.06951165944337845,
0.14009463787078857,
0.14367541670799255,
0.0381598025560379,
0.19781967997550964,
-0.06012033671140671,
-0.25871190428733826,
0.022363876923918724,
0.01624239981174469,
-0.028094766661524773,
0.14088991284370422,
0.09551103413105011,
-0.1287730485200882,
0.07820693403482437,
0.000998921343125403,
-0.183598592877388,
-0.024697372689843178,
-0.02796112187206745,
-0.06271753460168839,
0.12303527444601059,
0.00036430644104257226,
0.06788218021392822,
0.03774084895849228,
0.06467381119728088,
-0.18728700280189514,
0.001241724705323577,
0.060552459210157394,
0.01991002820432186,
0.10001540929079056,
0.056589141488075256,
-0.031313855201005936,
0.13133859634399414,
-0.08424826711416245,
0.05489073321223259,
0.025151334702968597,
-0.12808622419834137,
-0.26126325130462646,
-0.10970255732536316,
0.031086960807442665,
0.09551467001438141,
0.08334353566169739,
-0.019286882132291794,
0.15294815599918365,
-0.06488027423620224,
0.10504907369613647,
0.27263572812080383,
-0.3033342659473419,
-0.05171016976237297,
0.045595161616802216,
0.013228163123130798,
0.060377441346645355,
-0.09293504059314728,
-0.02399926446378231,
0.05413489788770676,
0.045160096138715744,
0.1429183930158615,
-0.0027004037983715534,
-0.02511623315513134,
-0.0019087126711383462,
-0.14250117540359497,
-0.07364105433225632,
0.17931704223155975,
0.03428516909480095,
-0.03383367881178856,
-0.09517841786146164,
-0.0675944834947586,
-0.20747029781341553,
-0.028198545798659325,
0.014058619737625122,
0.03909992054104805,
-0.041674401611089706,
-0.09935754537582397,
0.03741353750228882,
-0.07454674690961838,
-0.039075493812561035,
-0.018075205385684967,
0.06490866839885712,
0.04652085900306702,
0.01755381189286709,
-0.03178589418530464,
0.09724763035774231,
-0.03455623239278793,
-0.16132846474647522,
0.0037479298189282417,
0.00593946548178792,
-0.00808199867606163,
-0.03548746928572655,
-0.03998066857457161,
-0.07684570550918579,
0.014109679497778416,
0.1381710320711136,
-0.09364449977874756,
0.06668062508106232,
-0.006666583940386772,
0.03617323935031891,
-0.04165424033999443,
0.14300018548965454,
-0.031221669167280197,
-0.039196353405714035,
-0.012784029357135296,
0.07528222352266312,
0.04381590336561203,
-0.02753317356109619,
-0.10624876618385315,
0.057326097041368484,
0.10818855464458466,
0.04058639705181122,
-0.036746468394994736,
0.054511938244104385,
-0.05165879800915718,
-0.02911781147122383,
0.0272970050573349,
-0.10658147186040878,
0.03305680677294731,
-0.005727611947804689,
-0.08889719843864441,
-0.04051333665847778,
-0.012938112020492554,
0.003390922211110592,
-0.04460423067212105,
0.11670494824647903,
-0.08160168677568436,
0.023699680343270302,
-0.0841912105679512,
-0.12989528477191925,
0.028230013325810432,
-0.10438722372055054,
-0.008960140869021416,
-0.06941433995962143,
-0.15547049045562744,
-0.02269802801311016,
0.06120498105883598,
-0.06857271492481232,
-0.06946265697479248,
-0.07487965375185013,
-0.08376320451498032,
0.042465370148420334,
-0.02433229237794876,
0.138554647564888,
-0.08122630417346954,
0.09492399543523788,
0.03257113695144653,
0.08185537159442902,
0.01219625398516655,
0.053119316697120667,
-0.09459764510393143,
0.03232480213046074,
-0.22062593698501587,
0.0760658010840416,
-0.044534832239151,
0.0795859545469284,
-0.11665266752243042,
-0.11618484556674957,
0.028773363679647446,
-0.03539174422621727,
0.09214713424444199,
0.1268407702445984,
-0.1912430226802826,
-0.07374312728643417,
0.20106762647628784,
-0.0881706178188324,
-0.12733972072601318,
0.12914006412029266,
-0.05568486452102661,
0.020963672548532486,
0.04997303709387779,
0.22030377388000488,
0.0132258590310812,
-0.06619121134281158,
-0.020783480256795883,
-0.04689343646168709,
0.07744970172643661,
-0.052902836352586746,
0.08229874074459076,
-0.007337320130318403,
0.08715333789587021,
0.014347528107464314,
0.03443807363510132,
0.022578751668334007,
-0.11143284291028976,
-0.07900378108024597,
-0.03549855202436447,
-0.07241262495517731,
0.012825163081288338,
0.049185190349817276,
0.07837684452533722,
-0.14081326127052307,
-0.08774495869874954,
0.008908026851713657,
0.0861697569489479,
-0.08352768421173096,
0.04624487832188606,
-0.06909306347370148,
0.11099889129400253,
-0.06278719753026962,
-0.00034085329389199615,
-0.19382749497890472,
0.014749595895409584,
0.03944957256317139,
0.019187571480870247,
-0.008854692801833153,
-0.03225889801979065,
0.05644776672124863,
0.0672122910618782,
-0.05915208160877228,
-0.02305278740823269,
-0.013876810669898987,
-0.00945417582988739,
-0.12107262760400772,
-0.23638217151165009,
-0.04785185679793358,
-0.04435846954584122,
0.10498365014791489,
-0.176314115524292,
0.04025633633136749,
0.05055408179759979,
0.11252106726169586,
0.023772336542606354,
-0.03973634913563728,
-0.00460545951500535,
0.06906706839799881,
-0.061362750828266144,
-0.06819313019514084,
0.06561906635761261,
0.017099659889936447,
-0.0800761878490448,
-0.00904841162264347,
-0.11209828406572342,
0.13416273891925812,
0.14215566217899323,
-0.014379914849996567,
-0.07797959446907043,
-0.0400732047855854,
-0.07193023711442947,
-0.030359629541635513,
-0.04994754120707512,
0.033291544765233994,
0.13199256360530853,
0.023203253746032715,
0.15598061680793762,
-0.09839808195829391,
-0.06289047002792358,
0.057420846074819565,
-0.017126668244600296,
0.006391224917024374,
0.1314973682165146,
0.06009002402424812,
-0.12431710213422775,
0.14869509637355804,
0.12587502598762512,
-0.014233122579753399,
0.1185302883386612,
-0.061093661934137344,
-0.07847945392131805,
-0.02777388133108616,
-0.005566457752138376,
0.01030904520303011,
0.10947950929403305,
-0.14417177438735962,
-0.031061502173542976,
0.03999796882271767,
0.05617692694067955,
0.01567836105823517,
-0.17570212483406067,
-0.007124630268663168,
0.03344719111919403,
-0.04787316545844078,
-0.04560607671737671,
-0.02284228429198265,
0.004227818455547094,
0.11800038069486618,
0.005832357332110405,
-0.06198959797620773,
0.020752903074026108,
0.0002620290615595877,
-0.0718337893486023,
0.19234828650951385,
-0.09336501359939575,
-0.15773937106132507,
-0.07881972938776016,
-0.0991009771823883,
-0.06453730165958405,
-0.00814135279506445,
0.08371137827634811,
-0.09895356744527817,
-0.03233583644032478,
-0.10093409568071365,
0.002524662995710969,
-0.029991062358021736,
0.03168123960494995,
0.043324198573827744,
0.012962352484464645,
0.05045382305979729,
-0.11362975090742111,
-0.020125364884734154,
-0.040214478969573975,
0.0008918482926674187,
0.0750204399228096,
0.01599774695932865,
0.10105552524328232,
0.13922399282455444,
-0.011240120977163315,
0.05168319493532181,
-0.04848644137382507,
0.20207765698432922,
-0.06343686580657959,
-0.03139430284500122,
0.10303834080696106,
0.011015428230166435,
0.07387688010931015,
0.1355835348367691,
0.044918399304151535,
-0.10764198005199432,
0.009790534153580666,
0.018470797687768936,
-0.05205294489860535,
-0.22517366707324982,
-0.0382547602057457,
-0.05631903186440468,
0.029499458149075508,
0.12404964864253998,
0.026718202978372574,
-0.005984536837786436,
0.03752150386571884,
0.01822456158697605,
0.032422635704278946,
-0.01615438610315323,
0.09237942844629288,
0.07464220374822617,
0.036158639937639236,
0.14656466245651245,
-0.04628540948033333,
-0.019978981465101242,
0.050288423895835876,
-0.01718146912753582,
0.24636387825012207,
-0.034528475254774094,
0.1597570776939392,
0.05584326386451721,
0.16479536890983582,
0.009025229141116142,
0.08763927966356277,
-0.02134997770190239,
-0.009810500778257847,
-0.028991280123591423,
-0.05003475770354271,
-0.04527236893773079,
0.028167368844151497,
-0.033007148653268814,
0.04371321573853493,
-0.14233914017677307,
0.03516094759106636,
0.0854090228676796,
0.32209962606430054,
0.085199736058712,
-0.35187843441963196,
-0.09759797155857086,
0.008699295111000538,
-0.039517972618341446,
-0.023187648504972458,
0.020709242671728134,
0.11779837310314178,
-0.08545199036598206,
0.07921327650547028,
-0.06322813034057617,
0.10837214440107346,
-0.05136217921972275,
0.036786407232284546,
0.0473417304456234,
0.06066565588116646,
-0.013728477992117405,
0.07209303975105286,
-0.30708518624305725,
0.28899139165878296,
0.018392030149698257,
0.05957576259970665,
-0.0923774391412735,
0.017143648117780685,
0.0026850122958421707,
0.024159053340554237,
0.07960312813520432,
-0.0039656939916312695,
-0.12163063883781433,
-0.143130823969841,
-0.11210592091083527,
0.008798196911811829,
0.09584887325763702,
-0.02732231840491295,
0.10918975621461868,
-0.0005942208808846772,
0.0024724307004362345,
0.03448329493403435,
-0.012539991177618504,
-0.026949169114232063,
-0.1067408099770546,
0.01221197284758091,
0.00874137319624424,
-0.03764903545379639,
-0.06522967666387558,
-0.11124996840953827,
-0.06485360860824585,
0.21355371177196503,
0.03389192372560501,
-0.06171393021941185,
-0.12091884016990662,
0.10070730000734329,
0.08661431074142456,
-0.07510244101285934,
0.025908103212714195,
0.0029307075310498476,
0.09597276896238327,
-0.00506886001676321,
-0.05563337728381157,
0.12162170559167862,
-0.056817762553691864,
-0.1918409764766693,
-0.06492508947849274,
0.1379907876253128,
0.020831160247325897,
0.07119887322187424,
-0.0195109061896801,
0.04011273756623268,
-0.015777073800563812,
-0.0810445100069046,
0.052636027336120605,
-0.004894413985311985,
0.14381149411201477,
-0.0034972357098013163,
-0.023933257907629013,
0.028880678117275238,
-0.06759252399206161,
-0.058457907289266586,
0.20209334790706635,
0.2886066734790802,
-0.1014329269528389,
0.07176490128040314,
0.04254549741744995,
-0.05271827429533005,
-0.15753675997257233,
0.02230987511575222,
0.059829145669937134,
0.007197357714176178,
0.00971455778926611,
-0.17786499857902527,
0.03637805953621864,
0.10226704180240631,
-0.017067179083824158,
0.07126431912183762,
-0.3531765043735504,
-0.12112689018249512,
0.06849798560142517,
0.1042497530579567,
0.07964833080768585,
-0.16004104912281036,
-0.037219274789094925,
-0.0217670276761055,
-0.11253982782363892,
0.10693099349737167,
-0.09691113978624344,
0.12483107298612595,
-0.029006067663431168,
0.06675292551517487,
0.012483968399465084,
-0.0716375783085823,
0.11419954895973206,
0.028830980882048607,
0.07685384154319763,
-0.05723283067345619,
0.015192286111414433,
0.09579186141490936,
-0.07801118493080139,
0.03801945596933365,
-0.10100790858268738,
0.0395209901034832,
-0.12121538817882538,
-0.006478775292634964,
-0.06998132914304733,
0.009828716516494751,
-0.04250891134142876,
-0.05700402334332466,
-0.05117117986083031,
0.027468670159578323,
0.08668804913759232,
-0.01997712254524231,
0.17492634057998657,
0.014194699935615063,
0.1586436927318573,
0.1694963425397873,
0.060269687324762344,
-0.12699128687381744,
-0.041073545813560486,
0.0008937517995946109,
-0.014475439675152302,
0.029230155050754547,
-0.18474577367305756,
0.03402821719646454,
0.1450379639863968,
0.024433722719550133,
0.13561265170574188,
0.07501205056905746,
-0.050406601279973984,
0.028158608824014664,
0.06194161996245384,
-0.16201508045196533,
-0.08362826704978943,
0.019016984850168228,
0.015313887037336826,
-0.1245117112994194,
0.05582323670387268,
0.09873337298631668,
-0.05105185881257057,
-0.01937897317111492,
-0.008666092529892921,
0.04667884483933449,
-0.026434067636728287,
0.21319027245044708,
0.0015973870176821947,
0.08558833599090576,
-0.12406037747859955,
0.10168445110321045,
0.06127422675490379,
-0.12516702711582184,
0.04600324109196663,
0.09664556384086609,
-0.10063496232032776,
-0.025734778493642807,
0.0564645417034626,
0.143109530210495,
-0.007795474026352167,
-0.03123391419649124,
-0.15345261991024017,
-0.12555287778377533,
0.1070227324962616,
0.11124735325574875,
0.08447909355163574,
0.022966429591178894,
-0.03172243759036064,
-0.021852592006325722,
-0.1395566314458847,
0.0996365025639534,
0.0713956207036972,
0.05787466838955879,
-0.1184096559882164,
0.12900805473327637,
-0.0029617438558489084,
0.03932579606771469,
-0.010611096397042274,
0.008911686018109322,
-0.11015573143959045,
0.020352834835648537,
-0.09383316338062286,
-0.017769496887922287,
-0.06696808338165283,
-0.011882016435265541,
-0.024039380252361298,
-0.016373133286833763,
-0.056233637034893036,
0.012460472993552685,
-0.11871087551116943,
-0.038187891244888306,
-0.001826642663218081,
0.038225479423999786,
-0.11793367564678192,
-0.021984172984957695,
-0.0005945826414972544,
-0.09057524800300598,
0.10011273622512817,
0.05925333499908447,
-0.0009865729371085763,
0.009503116831183434,
-0.02850765362381935,
-0.008363250643014908,
0.06886249035596848,
-0.0055760652758181095,
0.07980187237262726,
-0.13043776154518127,
-0.016374174505472183,
0.009114005602896214,
0.00978186447173357,
0.033877402544021606,
0.08168154209852219,
-0.13570496439933777,
0.006359047722071409,
-0.01543837133795023,
-0.0704135149717331,
-0.05560191348195076,
0.0575764998793602,
0.08301401883363724,
0.021680660545825958,
0.18087723851203918,
-0.07383621484041214,
0.04131067916750908,
-0.21257226169109344,
-0.012014258652925491,
0.0018502091988921165,
-0.1238124668598175,
-0.07574303448200226,
-0.027593931183218956,
0.07831709831953049,
-0.07784182578325272,
0.14541946351528168,
0.014750583097338676,
0.012970727868378162,
0.041576139628887177,
-0.05441097542643547,
-0.022552259266376495,
0.021100156009197235,
0.1664745807647705,
0.01539525855332613,
-0.041574906557798386,
0.08540167659521103,
0.04916341230273247,
0.10373528301715851,
0.15661627054214478,
0.21478168666362762,
0.12472513318061829,
0.09403238445520401,
0.08199582248926163,
0.013090026564896107,
-0.08310051262378693,
-0.1886410117149353,
0.1010751873254776,
-0.020964697003364563,
0.15339131653308868,
-0.00940002128481865,
0.15566739439964294,
0.11629936099052429,
-0.18345552682876587,
0.053638678044080734,
-0.062108367681503296,
-0.08301382511854172,
-0.11057336628437042,
-0.06543656438589096,
-0.08658221364021301,
-0.1667580008506775,
-0.004923108033835888,
-0.12870657444000244,
0.038584474474191666,
0.08083992451429367,
0.030919775366783142,
0.008959905244410038,
0.08363289386034012,
0.023197825998067856,
0.02281271666288376,
0.06618763506412506,
0.006118490360677242,
-0.03714447095990181,
-0.05541076511144638,
-0.05820423364639282,
0.009513909928500652,
-0.013712036423385143,
0.0646275207400322,
-0.017094522714614868,
-0.03266702964901924,
0.049033042043447495,
-0.019696732982993126,
-0.10339327901601791,
0.010534671135246754,
0.014843210577964783,
0.077741838991642,
0.06764236092567444,
0.02115391194820404,
-0.008487651124596596,
-0.01827094331383705,
0.21911625564098358,
-0.08267181366682053,
-0.05574587732553482,
-0.12404626607894897,
0.27340221405029297,
0.040739960968494415,
-0.03453099727630615,
0.04053065553307533,
-0.07545653730630875,
-0.020516909658908844,
0.17087097465991974,
0.23462322354316711,
-0.026195652782917023,
-0.02122514322400093,
-0.006915074773132801,
-0.012536261230707169,
-0.014906550757586956,
0.07918783277273178,
0.14076878130435944,
0.03575282171368599,
-0.08514099568128586,
-0.011057380586862564,
-0.062316715717315674,
-0.033721376210451126,
-0.03900812938809395,
0.08805117011070251,
0.03334086015820503,
0.009187313728034496,
-0.03357794135808945,
0.06043779104948044,
-0.05192376673221588,
-0.07097984850406647,
0.026212621480226517,
-0.23813489079475403,
-0.1741388887166977,
0.012991872616112232,
0.062448665499687195,
0.005783857777714729,
0.0733928233385086,
-0.006816425826400518,
0.002218420384451747,
0.07052018493413925,
-0.019468894228339195,
-0.07480581849813461,
-0.11086127161979675,
0.10008812695741653,
-0.15018858015537262,
0.1847400814294815,
-0.04338105767965317,
0.02458036318421364,
0.14340759813785553,
0.04774262756109238,
-0.1081097424030304,
0.0418110154569149,
0.05090409889817238,
-0.02254590205848217,
0.011134220287203789,
0.12826618552207947,
-0.038520123809576035,
0.08810807764530182,
0.05422309786081314,
-0.11524895578622818,
-0.006634072866290808,
-0.08708541840314865,
-0.022465838119387627,
-0.03393132612109184,
-0.03512055426836014,
-0.029881775379180908,
0.1268550455570221,
0.209943026304245,
-0.05260109528899193,
0.012102355249226093,
-0.06767775863409042,
0.014113033190369606,
0.04032096266746521,
0.006463134661316872,
-0.05424747243523598,
-0.2874477803707123,
0.017394447699189186,
0.09424180537462234,
-0.0019257799722254276,
-0.2589356005191803,
-0.08441072702407837,
0.026616526767611504,
-0.0592057965695858,
-0.11457079648971558,
0.09700646251440048,
0.056220125406980515,
0.04969387501478195,
-0.0756903812289238,
-0.052557267248630524,
-0.06885884702205658,
0.1691865622997284,
-0.14244388043880463,
-0.07906962931156158
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# mt5-small-wikinewssum-test
This model is a fine-tuned version of [google/mt5-small](https://huggingface.co/google/mt5-small) on an unknown dataset.
It achieves the following results on the evaluation set:
- Loss: 2.9354
- Rouge1: 6.8433
- Rouge2: 2.5498
- Rougel: 5.6114
- Rougelsum: 6.353
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 5.6e-05
- train_batch_size: 12
- eval_batch_size: 12
- seed: 42
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- num_epochs: 8
### Training results
| Training Loss | Epoch | Step | Validation Loss | Rouge1 | Rouge2 | Rougel | Rougelsum |
|:-------------:|:-----:|:----:|:---------------:|:------:|:------:|:------:|:---------:|
| No log | 1.0 | 661 | 3.2810 | 6.4161 | 2.403 | 5.3674 | 6.0329 |
| No log | 2.0 | 1322 | 3.1515 | 6.9291 | 2.6826 | 5.6839 | 6.4359 |
| No log | 3.0 | 1983 | 3.0565 | 6.7939 | 2.6113 | 5.6133 | 6.3126 |
| No log | 4.0 | 2644 | 2.9815 | 6.0279 | 2.1637 | 4.9892 | 5.5962 |
| No log | 5.0 | 3305 | 2.9645 | 6.3926 | 2.339 | 5.2716 | 5.9443 |
| 3.9937 | 6.0 | 3966 | 2.9476 | 6.4739 | 2.3615 | 5.3473 | 6.0089 |
| 3.9937 | 7.0 | 4627 | 2.9405 | 6.615 | 2.4309 | 5.4493 | 6.1445 |
| 3.9937 | 8.0 | 5288 | 2.9354 | 6.8433 | 2.5498 | 5.6114 | 6.353 |
### Framework versions
- Transformers 4.13.0
- Pytorch 1.10.1
- Datasets 1.16.1
- Tokenizers 0.10.3
|
{"license": "apache-2.0", "tags": ["summarization", "generated_from_trainer"], "metrics": ["rouge"], "model-index": [{"name": "mt5-small-wikinewssum-test", "results": []}]}
|
summarization
|
airKlizz/mt5-small-wikinewssum-test
|
[
"transformers",
"pytorch",
"mt5",
"text2text-generation",
"summarization",
"generated_from_trainer",
"license:apache-2.0",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #mt5 #text2text-generation #summarization #generated_from_trainer #license-apache-2.0 #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
mt5-small-wikinewssum-test
==========================
This model is a fine-tuned version of google/mt5-small on an unknown dataset.
It achieves the following results on the evaluation set:
* Loss: 2.9354
* Rouge1: 6.8433
* Rouge2: 2.5498
* Rougel: 5.6114
* Rougelsum: 6.353
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 5.6e-05
* train\_batch\_size: 12
* eval\_batch\_size: 12
* seed: 42
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* num\_epochs: 8
### Training results
### Framework versions
* Transformers 4.13.0
* Pytorch 1.10.1
* Datasets 1.16.1
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5.6e-05\n* train\\_batch\\_size: 12\n* eval\\_batch\\_size: 12\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 8",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.13.0\n* Pytorch 1.10.1\n* Datasets 1.16.1\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #mt5 #text2text-generation #summarization #generated_from_trainer #license-apache-2.0 #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5.6e-05\n* train\\_batch\\_size: 12\n* eval\\_batch\\_size: 12\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 8",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.13.0\n* Pytorch 1.10.1\n* Datasets 1.16.1\n* Tokenizers 0.10.3"
] |
[
68,
99,
4,
30
] |
[
"passage: TAGS\n#transformers #pytorch #mt5 #text2text-generation #summarization #generated_from_trainer #license-apache-2.0 #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 5.6e-05\n* train\\_batch\\_size: 12\n* eval\\_batch\\_size: 12\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* num\\_epochs: 8### Training results### Framework versions\n\n\n* Transformers 4.13.0\n* Pytorch 1.10.1\n* Datasets 1.16.1\n* Tokenizers 0.10.3"
] |
[
-0.07920566946268082,
0.04815811291337013,
-0.0030167566146701574,
0.1113176941871643,
0.15733343362808228,
0.023622360080480576,
0.12489539384841919,
0.12160741537809372,
-0.10935614258050919,
0.021317528560757637,
0.12848234176635742,
0.15419958531856537,
0.019906306639313698,
0.11933234333992004,
-0.05702918767929077,
-0.2633913457393646,
0.00579065503552556,
0.029634451493620872,
-0.03047148510813713,
0.14520622789859772,
0.10337115824222565,
-0.11878964304924011,
0.09041735529899597,
-0.0066857426427304745,
-0.17965136468410492,
0.002603648230433464,
-0.013032259419560432,
-0.054323967546224594,
0.1533244252204895,
0.023058734834194183,
0.09432505071163177,
-0.0028189991135150194,
0.07107240706682205,
-0.20792731642723083,
0.006521133240312338,
0.04150191321969032,
0.010825274512171745,
0.08069253712892532,
0.04091610759496689,
0.013323213905096054,
0.17131462693214417,
-0.05866534635424614,
0.045781154185533524,
0.02231113240122795,
-0.12664730846881866,
-0.19730953872203827,
-0.07631833851337433,
0.020685525611042976,
0.09141670167446136,
0.11345428228378296,
-0.01346673909574747,
0.11895771324634552,
-0.0906224250793457,
0.0995216965675354,
0.2271619737148285,
-0.28490057587623596,
-0.062051922082901,
0.029698919504880905,
0.027270261198282242,
0.08025968074798584,
-0.08945216983556747,
-0.027600141242146492,
0.04534094035625458,
0.05888352915644646,
0.13348573446273804,
-0.024624688550829887,
-0.10450121760368347,
0.008016129955649376,
-0.14915500581264496,
-0.06736092269420624,
0.1960822194814682,
0.04052884876728058,
-0.024694569408893585,
-0.058400869369506836,
-0.07364863157272339,
-0.1561138778924942,
-0.028487710282206535,
-0.0034991446882486343,
0.042071592062711716,
-0.016768459230661392,
-0.04786938056349754,
-0.0019534225575625896,
-0.10327614098787308,
-0.06538999825716019,
-0.06805670261383057,
0.11389225721359253,
0.04152914509177208,
0.011243697255849838,
-0.04203042387962341,
0.1076345294713974,
-0.01502564363181591,
-0.12993252277374268,
0.012379933148622513,
0.01964212767779827,
0.03175249323248863,
-0.028918836265802383,
-0.07457426935434341,
-0.06368279457092285,
0.015083865262567997,
0.13261985778808594,
-0.0582699328660965,
0.05048692226409912,
0.033073555678129196,
0.04868176206946373,
-0.08516837656497955,
0.16180166602134705,
-0.007832310162484646,
-0.030706843361258507,
-0.0030313096940517426,
0.05612616613507271,
0.03530086204409599,
-0.012892725877463818,
-0.11677990853786469,
0.01887878030538559,
0.09441515803337097,
0.024240195751190186,
-0.0721413642168045,
0.07866761088371277,
-0.04668061062693596,
-0.02076195739209652,
-0.012783238664269447,
-0.09320114552974701,
0.00868748128414154,
-0.008429137989878654,
-0.07968831807374954,
-0.0230594202876091,
0.022450009360909462,
0.017720920965075493,
-0.043278034776449203,
0.10791925340890884,
-0.08073427528142929,
0.021620815619826317,
-0.09104623645544052,
-0.10156998038291931,
0.015290210023522377,
-0.06919944286346436,
0.021587874740362167,
-0.10145170241594315,
-0.2107991874217987,
-0.00562526797875762,
0.05728820711374283,
-0.026453033089637756,
-0.06613034754991531,
-0.06511145085096359,
-0.07521001994609833,
0.018468720838427544,
-0.024479003623127937,
0.14480887353420258,
-0.07548129558563232,
0.10162151604890823,
0.037599775940179825,
0.06400652974843979,
-0.04462681710720062,
0.05965031683444977,
-0.11041068285703659,
0.0062795779667794704,
-0.15322190523147583,
0.0558212585747242,
-0.034833576530218124,
0.06894810497760773,
-0.08398181945085526,
-0.10534534603357315,
-0.007889306172728539,
-0.00428891833871603,
0.07012777030467987,
0.10961316525936127,
-0.1589307337999344,
-0.07678285241127014,
0.174253910779953,
-0.07512936741113663,
-0.13734714686870575,
0.13044793903827667,
-0.056375857442617416,
0.057576123625040054,
0.0685167983174324,
0.18601776659488678,
0.04542246833443642,
-0.07097247242927551,
0.021454431116580963,
0.00988113135099411,
0.059806618839502335,
-0.05032641440629959,
0.07333065569400787,
0.006367813795804977,
0.014357426203787327,
0.037361059337854385,
-0.004871551413089037,
0.062359753996133804,
-0.09602884203195572,
-0.09004003554582596,
-0.0455462746322155,
-0.0857900083065033,
0.02643243968486786,
0.05575184151530266,
0.07703686505556107,
-0.1099681407213211,
-0.06931612640619278,
0.02070281282067299,
0.07921430468559265,
-0.06255657970905304,
0.033371783792972565,
-0.04780769348144531,
0.07056280970573425,
-0.02111763320863247,
0.0015149222454056144,
-0.1831519454717636,
-0.001423297799192369,
0.017902027815580368,
0.015112138353288174,
0.015340976417064667,
-0.002477056346833706,
0.05721175670623779,
0.06193539500236511,
-0.05665343254804611,
-0.013662847690284252,
-0.03758619725704193,
-0.010117691941559315,
-0.13105528056621552,
-0.1759289652109146,
-0.02843862771987915,
-0.012496928684413433,
0.13161301612854004,
-0.19134756922721863,
0.049771782010793686,
-0.017607741057872772,
0.06623104959726334,
-0.002750494284555316,
-0.009181132540106773,
-0.04579232260584831,
0.07439934462308884,
-0.0534222275018692,
-0.03991397097706795,
0.08082782477140427,
0.01438167691230774,
-0.08929024636745453,
-0.039211828261613846,
-0.10984364151954651,
0.1620793491601944,
0.1384364515542984,
-0.12953276932239532,
-0.059349190443754196,
-0.015664581209421158,
-0.06228398531675339,
-0.03187147527933121,
-0.04663305729627609,
0.007415636908262968,
0.17901872098445892,
-0.009839342907071114,
0.15648411214351654,
-0.08299306035041809,
-0.04018540307879448,
0.021482044830918312,
-0.03183139115571976,
0.03810819238424301,
0.1262798309326172,
0.1267043650150299,
-0.09215499460697174,
0.14731326699256897,
0.17004293203353882,
-0.06859951466321945,
0.13117200136184692,
-0.049572259187698364,
-0.07075691968202591,
-0.0059329308569431305,
-0.015463999472558498,
-0.020813854411244392,
0.06400421261787415,
-0.16901153326034546,
0.00035178440157324076,
0.03105265460908413,
0.04437200725078583,
0.029920706525444984,
-0.21641427278518677,
-0.03051435761153698,
0.03968608006834984,
-0.04849621653556824,
-0.03220834583044052,
-0.007929467596113682,
0.0034002254251390696,
0.10714319348335266,
-0.0025281962007284164,
-0.08489087969064713,
0.035765230655670166,
-0.002715642563998699,
-0.08171629160642624,
0.2100120484828949,
-0.0834992378950119,
-0.16721422970294952,
-0.12608225643634796,
-0.08307340741157532,
-0.05990710109472275,
0.010883730836212635,
0.09085867553949356,
-0.08035074174404144,
-0.028075063601136208,
-0.08821536600589752,
0.04611527547240257,
-0.013857986778020859,
0.020000863820314407,
0.02346476912498474,
0.0021308346185833216,
0.049560707062482834,
-0.11375060677528381,
-0.02225341647863388,
-0.05254510045051575,
-0.05566015839576721,
0.04943384602665901,
0.012353179045021534,
0.10700348764657974,
0.13797399401664734,
-0.008472261019051075,
0.011699321679770947,
-0.03589580953121185,
0.252043753862381,
-0.06018863245844841,
-0.03220376744866371,
0.15709896385669708,
0.001028467551805079,
0.05931122228503227,
0.10614151507616043,
0.05680156126618385,
-0.09974116832017899,
0.01932774856686592,
0.019237343221902847,
-0.046413812786340714,
-0.2286800891160965,
-0.05222419276833534,
-0.06782829761505127,
0.011129737831652164,
0.09263201057910919,
0.02465672418475151,
0.026887748390436172,
0.06213056296110153,
0.025896133854985237,
0.0761856660246849,
-0.013035831041634083,
0.06569648534059525,
0.1528611034154892,
0.036605868488550186,
0.1334397941827774,
-0.043793559074401855,
-0.05772002041339874,
0.04424544423818588,
-0.015612924471497536,
0.23669229447841644,
0.012068324722349644,
0.13018815219402313,
0.061027705669403076,
0.14896099269390106,
-0.013166993856430054,
0.09394533932209015,
-0.010370897129178047,
-0.027332771569490433,
-0.03820374980568886,
-0.04019199311733246,
-0.0636889711022377,
0.027946200221776962,
-0.08165086805820465,
0.05952690914273262,
-0.13025429844856262,
0.007774559780955315,
0.06405643373727798,
0.27379941940307617,
0.02669568918645382,
-0.32385510206222534,
-0.0977957621216774,
0.014210266061127186,
-0.05835750326514244,
-0.01644592545926571,
0.051961131393909454,
0.0931679829955101,
-0.0959831103682518,
0.04063606262207031,
-0.053606487810611725,
0.11115845292806625,
-0.03726157918572426,
0.06822623312473297,
0.04706262797117233,
0.06685903668403625,
0.01647263765335083,
0.09350644797086716,
-0.3155793845653534,
0.2774350941181183,
-0.005590592045336962,
0.0599040761590004,
-0.08444105088710785,
0.01219366304576397,
0.027998844161629677,
0.061104290187358856,
0.060259606689214706,
-0.009961032308638096,
-0.04421522468328476,
-0.16203095018863678,
-0.04873527213931084,
0.037631239742040634,
0.09466706216335297,
-0.039832618087530136,
0.09491458535194397,
-0.038146935403347015,
0.021813733503222466,
0.06609169393777847,
0.021931691095232964,
-0.044268496334552765,
-0.1044638529419899,
-0.005309918895363808,
0.041147612035274506,
-0.022434672340750694,
-0.059984657913446426,
-0.0977160856127739,
-0.09985863417387009,
0.13782931864261627,
-0.0036099173594266176,
-0.04459940269589424,
-0.1020008772611618,
0.06140226125717163,
0.06942519545555115,
-0.07917895913124084,
0.040977317839860916,
0.008520476520061493,
0.0634273961186409,
0.025958729907870293,
-0.06746578961610794,
0.11187891662120819,
-0.07526646554470062,
-0.1646653413772583,
-0.05621572956442833,
0.12438133358955383,
0.01349491998553276,
0.06416228413581848,
-0.014442242681980133,
0.013909263536334038,
-0.059106048196554184,
-0.09569985419511795,
0.013074448332190514,
-0.008459503762423992,
0.07689423114061356,
0.029724353924393654,
-0.060844048857688904,
0.021276403218507767,
-0.06583140045404434,
-0.057789985090494156,
0.20734843611717224,
0.24499325454235077,
-0.07841960340738297,
0.02675280161201954,
0.041384946554899216,
-0.07146956771612167,
-0.18340906500816345,
-0.004527483135461807,
0.050504185259342194,
-0.002598698018118739,
0.038278914988040924,
-0.1799866259098053,
0.0947662889957428,
0.11015106737613678,
-0.010233289562165737,
0.09725337475538254,
-0.3629354238510132,
-0.12169282883405685,
0.10653143376111984,
0.12918810546398163,
0.13677580654621124,
-0.14721129834651947,
-0.022909551858901978,
-0.041361771523952484,
-0.14187045395374298,
0.10766144841909409,
-0.0816182941198349,
0.13549990952014923,
-0.03305571526288986,
0.10307945311069489,
0.0007267431356012821,
-0.05329393595457077,
0.11902713030576706,
0.029017020016908646,
0.08964721113443375,
-0.06625257432460785,
0.0016297845868393779,
0.02909458614885807,
-0.04142480343580246,
0.03337300941348076,
-0.1232781633734703,
0.03627392649650574,
-0.11543336510658264,
-0.019056065008044243,
-0.0685330182313919,
0.02703753113746643,
-0.032984115183353424,
-0.064038947224617,
-0.03517011180520058,
-0.0027187431696802378,
0.07168377190828323,
-0.004299290012568235,
0.1443139761686325,
0.01583070680499077,
0.15444836020469666,
0.13270539045333862,
0.06927776336669922,
-0.09429984539747238,
-0.057487815618515015,
-0.029374241828918457,
-0.01148436963558197,
0.052447762340307236,
-0.1600394994020462,
0.027794845402240753,
0.1416092813014984,
0.01400441862642765,
0.14781561493873596,
0.09178045392036438,
-0.008000190369784832,
0.011261841282248497,
0.056632865220308304,
-0.1709195226430893,
-0.09318671375513077,
-0.017439259216189384,
-0.022436598315835,
-0.10217080265283585,
0.05469169467687607,
0.1036049872636795,
-0.07419826090335846,
-0.0009358818060718477,
-0.018124118447303772,
0.027978843078017235,
-0.056037046015262604,
0.19034473598003387,
0.02201787754893303,
0.05338439717888832,
-0.10191377997398376,
0.0910077840089798,
0.043359093368053436,
-0.07972651720046997,
0.012651168741285801,
0.08976022899150848,
-0.09239498525857925,
-0.05287184938788414,
0.045972127467393875,
0.14926831424236298,
-0.04774013161659241,
-0.0435536727309227,
-0.1358429193496704,
-0.1377008557319641,
0.09701888263225555,
0.13424043357372284,
0.10833096504211426,
0.008115516975522041,
-0.07073778659105301,
0.0068639419041574,
-0.10721863061189651,
0.08850134909152985,
0.03900611028075218,
0.05763440206646919,
-0.124304860830307,
0.12773525714874268,
0.010839175432920456,
0.03502528741955757,
-0.02114691026508808,
0.015797972679138184,
-0.08745020627975464,
0.025425957515835762,
-0.11974766105413437,
-0.031493429094552994,
-0.021107764914631844,
0.0014051991747692227,
-0.010925638489425182,
-0.04460747539997101,
-0.061678383499383926,
0.013905931264162064,
-0.11779990792274475,
-0.026097066700458527,
0.015517382882535458,
0.06984514743089676,
-0.10735932737588882,
-0.03169885650277138,
0.030655408278107643,
-0.07196055352687836,
0.07848287373781204,
0.06662141531705856,
-0.0017905387794598937,
0.06075560674071312,
-0.13104915618896484,
0.00788868498057127,
0.07770723849534988,
0.029709240421652794,
0.04535835608839989,
-0.0830349400639534,
-0.010848769918084145,
0.009980813600122929,
0.04607982933521271,
0.020393040031194687,
0.06160623952746391,
-0.14229366183280945,
-0.0006931423558853567,
-0.008761200122535229,
-0.08805520087480545,
-0.05843943729996681,
0.009839327074587345,
0.07247715443372726,
0.017838288098573685,
0.1993095576763153,
-0.07072267681360245,
0.04041281342506409,
-0.20891542732715607,
0.009672140702605247,
0.003867476712912321,
-0.12658990919589996,
-0.12485688924789429,
-0.08377106487751007,
0.05731770396232605,
-0.057351309806108475,
0.15782688558101654,
0.027261674404144287,
0.0333009772002697,
0.02963750623166561,
-0.0006635090103372931,
0.009887591935694218,
0.006628153380006552,
0.21825525164604187,
0.046891480684280396,
-0.03750454634428024,
0.054259367287158966,
0.0387999564409256,
0.11251255124807358,
0.10829321295022964,
0.2009643018245697,
0.15109862387180328,
0.01172992866486311,
0.1008065789937973,
0.027078788727521896,
-0.04588756337761879,
-0.16296443343162537,
0.019428420811891556,
-0.009060898795723915,
0.12377867102622986,
-0.02409924939274788,
0.21730127930641174,
0.07059691846370697,
-0.15806476771831512,
0.042402829974889755,
-0.06512074172496796,
-0.06992527842521667,
-0.10970388352870941,
-0.03383413329720497,
-0.0759795680642128,
-0.1528080701828003,
-0.0046133240684866905,
-0.12294533103704453,
0.026195093989372253,
0.13120399415493011,
0.013043525628745556,
-0.026641150936484337,
0.13806629180908203,
0.010297344997525215,
0.0032638669945299625,
0.061542581766843796,
-0.01251494511961937,
-0.03027746081352234,
-0.11859628558158875,
-0.0750010684132576,
-0.005291099194437265,
-0.012534537352621555,
0.040697209537029266,
-0.0395318828523159,
-0.0630105659365654,
0.021736999973654747,
-0.035695016384124756,
-0.08874943107366562,
0.0076824333518743515,
0.01932012103497982,
0.054323192685842514,
0.044085968285799026,
-0.00010223114077234641,
0.008015112951397896,
0.003336902940645814,
0.2326664924621582,
-0.08718099445104599,
-0.09095340222120285,
-0.08618287742137909,
0.24957188963890076,
0.047242309898138046,
-0.0061520542949438095,
0.02908812277019024,
-0.05531904473900795,
-0.020394405350089073,
0.2490682154893875,
0.20738689601421356,
-0.0899912640452385,
-0.01859390176832676,
0.00815556664019823,
-0.0023213978856801987,
-0.007580799050629139,
0.1057734340429306,
0.15999716520309448,
0.04997071623802185,
-0.08658535033464432,
-0.003282845951616764,
-0.05311394855380058,
-0.007320925127714872,
-0.04227927699685097,
0.07589226961135864,
0.0351007804274559,
-0.0012123482301831245,
-0.03251378610730171,
0.05744772404432297,
-0.07875775545835495,
-0.07695365697145462,
0.007998953573405743,
-0.20932020246982574,
-0.15140913426876068,
-0.004963694605976343,
0.1132420226931572,
0.013654151931405067,
0.06676063686609268,
-0.026138069108128548,
0.005211587995290756,
0.0714183896780014,
-0.0204933974891901,
-0.08282114565372467,
-0.08970426023006439,
0.09918823838233948,
-0.12061179429292679,
0.19403459131717682,
-0.04184124991297722,
0.05653001740574837,
0.11919823288917542,
0.06030278652906418,
-0.06628725677728653,
0.0788046196103096,
0.047447673976421356,
-0.03658370301127434,
0.02228563465178013,
0.07596838474273682,
-0.03287418186664581,
0.08100872486829758,
0.04873047396540642,
-0.13680271804332733,
0.012750135734677315,
-0.024337273091077805,
-0.050838060677051544,
-0.025174351409077644,
-0.05015527456998825,
-0.05384037271142006,
0.12259245663881302,
0.2124648243188858,
-0.03904446214437485,
-0.0009989301906898618,
-0.07574564963579178,
0.014373376034200191,
0.05090692266821861,
0.012348094955086708,
-0.0591336227953434,
-0.23803238570690155,
-0.00850228127092123,
0.08559342473745346,
-0.01515812799334526,
-0.25454384088516235,
-0.09021934866905212,
-0.005085038021206856,
-0.05666882544755936,
-0.10967348515987396,
0.07806266099214554,
0.08677038550376892,
0.03274858742952347,
-0.057636622339487076,
-0.09097708761692047,
-0.07293550670146942,
0.16326239705085754,
-0.1335078328847885,
-0.09058472514152527
] |
null | null |
transformers
|
{"language": "fr", "license": "mit"}
|
text2text-generation
|
airKlizz/t5-base-with-title-multi-fr-wiki-news
|
[
"transformers",
"pytorch",
"jax",
"t5",
"text2text-generation",
"fr",
"license:mit",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"fr"
] |
TAGS
#transformers #pytorch #jax #t5 #text2text-generation #fr #license-mit #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
[] |
[
"TAGS\n#transformers #pytorch #jax #t5 #text2text-generation #fr #license-mit #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n"
] |
[
58
] |
[
"passage: TAGS\n#transformers #pytorch #jax #t5 #text2text-generation #fr #license-mit #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n"
] |
[
-0.004016456194221973,
0.01848491095006466,
-0.006496403366327286,
0.043066032230854034,
0.14289376139640808,
0.01765657216310501,
0.15650422871112823,
0.13302861154079437,
0.015481752343475819,
-0.05525532737374306,
0.14402993023395538,
0.22595512866973877,
0.0012570908293128014,
0.06285139173269272,
-0.09647909551858902,
-0.25212588906288147,
0.03330136835575104,
0.05885815992951393,
0.017320793122053146,
0.11719357222318649,
0.10762462019920349,
-0.0466182604432106,
0.10093005746603012,
-0.013367878273129463,
-0.17000748217105865,
0.02716231346130371,
0.07043696939945221,
-0.11721063405275345,
0.11741169542074203,
0.06557036191225052,
0.06717056035995483,
0.0734763890504837,
-0.02192041091620922,
-0.13789185881614685,
0.0296246949583292,
0.0013138663489371538,
-0.09609337896108627,
0.06808533519506454,
0.10469082742929459,
-0.053556159138679504,
0.15384608507156372,
0.0748506486415863,
-0.01978234387934208,
0.07592438906431198,
-0.1439860314130783,
-0.07176815718412399,
-0.05353574454784393,
0.07299083471298218,
0.0583641454577446,
0.08785653114318848,
0.01615208014845848,
0.12172240763902664,
-0.0762084573507309,
0.11061026155948639,
0.14716178178787231,
-0.3521377444267273,
0.00512642040848732,
0.07286885380744934,
0.08049244433641434,
0.046970147639513016,
-0.03246666491031647,
0.04806888476014137,
0.042418695986270905,
0.021415483206510544,
0.05220276489853859,
-0.08943850547075272,
-0.12271252274513245,
0.04134410247206688,
-0.07577890157699585,
-0.06397784501314163,
0.25439393520355225,
-0.046706750988960266,
0.03105187602341175,
-0.012951193377375603,
-0.08133857697248459,
-0.04809693247079849,
-0.004845818504691124,
-0.003414181526750326,
-0.02670820616185665,
0.0780935138463974,
0.048337843269109726,
-0.06654974818229675,
-0.14413078129291534,
-0.01874541863799095,
-0.17791025340557098,
0.09533081948757172,
0.014165620319545269,
0.056712668389081955,
-0.20672063529491425,
0.08528638631105423,
0.01907948963344097,
-0.10123489052057266,
0.017441147938370705,
-0.07616275548934937,
0.07440999150276184,
-0.018781039863824844,
-0.05488724261522293,
-0.08488129824399948,
0.07280467450618744,
0.1407790631055832,
-0.024974636733531952,
0.0013184199342504144,
-0.060695830732584,
0.0995965451002121,
0.010022246278822422,
0.025990458205342293,
0.03732742741703987,
0.0006768034072592854,
0.0723133385181427,
-0.11998762935400009,
0.0073865605518221855,
-0.052829448133707047,
-0.17747342586517334,
-0.04762626811861992,
0.036512941122055054,
0.08546877652406693,
0.01733415201306343,
0.08896463364362717,
-0.031181756407022476,
-0.016038671135902405,
0.06470827758312225,
-0.07054024189710617,
-0.012139634229242802,
0.004252406302839518,
0.026837212964892387,
0.0982564315199852,
0.0360725000500679,
0.009520819410681725,
-0.12426042556762695,
0.06327875703573227,
-0.07216978073120117,
-0.019548075273633003,
-0.0239065270870924,
-0.06842836737632751,
0.05138428136706352,
-0.10180012136697769,
0.02603205293416977,
-0.15444879233837128,
-0.1755804866552353,
0.02682499587535858,
0.03159323334693909,
-0.021923372521996498,
-0.08077522367238998,
-0.00899975374341011,
-0.053673598915338516,
0.04842296242713928,
-0.06865956634283066,
-0.006483031436800957,
-0.05268034338951111,
0.1087631806731224,
-0.05257030948996544,
0.03555774316191673,
-0.17249232530593872,
0.07364986836910248,
-0.12286253273487091,
-0.024992341175675392,
-0.028097670525312424,
0.018634449690580368,
0.0008847470744512975,
0.11198468506336212,
-0.031220607459545135,
-0.039854634553194046,
-0.03685854747891426,
0.03732467442750931,
-0.044128648936748505,
0.1770145744085312,
-0.09952939301729202,
-0.09015235304832458,
0.20575858652591705,
-0.10034045577049255,
-0.20087894797325134,
0.08590174466371536,
0.00280096591450274,
0.07306366413831711,
0.08003360778093338,
0.18057824671268463,
0.05035652592778206,
-0.06966698914766312,
0.09886591881513596,
0.11536622792482376,
-0.08991175144910812,
-0.13924425840377808,
0.041402846574783325,
-0.025919198989868164,
-0.10930965095758438,
0.040070295333862305,
0.035953979939222336,
0.0648391917347908,
-0.03860592097043991,
-0.04320826008915901,
-0.04679524898529053,
0.00262060621753335,
0.02709715999662876,
0.002133514964953065,
0.10875143110752106,
-0.060668908059597015,
-0.00973584409803152,
0.024380210787057877,
-0.02057277411222458,
0.005354856140911579,
0.043349165469408035,
-0.036708325147628784,
0.11938253045082092,
0.00540830846875906,
0.04305141046643257,
-0.15742048621177673,
-0.030189137905836105,
-0.006177584175020456,
0.10866618156433105,
0.032437290996313095,
0.10317419469356537,
0.03564707562327385,
-0.01593809574842453,
-0.01058230735361576,
0.004524210933595896,
0.11880636215209961,
0.0038143449928611517,
-0.05399732664227486,
-0.1168166846036911,
0.03660173714160919,
-0.05285004898905754,
-0.02331257052719593,
-0.09004463255405426,
0.027644073590636253,
0.03873373940587044,
0.08924825489521027,
-0.0012544820783659816,
0.06703867763280869,
-0.04017803445458412,
0.0024946376215666533,
-0.09431828558444977,
0.020738862454891205,
0.11324137449264526,
0.018850792199373245,
-0.0503946989774704,
0.23264804482460022,
-0.14428304135799408,
0.24863742291927338,
0.20141099393367767,
-0.23433451354503632,
0.004599994048476219,
-0.0655447468161583,
-0.033385831862688065,
0.00557195208966732,
0.054317720234394073,
-0.01128044817596674,
0.04975944012403488,
-0.0089059267193079,
0.19650641083717346,
-0.08102947473526001,
-0.053237829357385635,
0.013089857064187527,
-0.05663515627384186,
-0.019810808822512627,
0.06309131532907486,
0.14663498103618622,
-0.2180546224117279,
0.17684409022331238,
0.25082314014434814,
0.04008981212973595,
0.17608340084552765,
-0.036410070955753326,
-0.021555950865149498,
0.04110586270689964,
-0.003316620597615838,
-0.018324248492717743,
-0.04683581367135048,
-0.14490865170955658,
-0.004040002357214689,
0.07829710841178894,
0.017909247428178787,
0.07079610228538513,
-0.12267928570508957,
-0.052852749824523926,
-0.007946821860969067,
-0.020742395892739296,
-0.045609474182128906,
0.1056114137172699,
0.036515288054943085,
0.12981583178043365,
-0.03850352764129639,
-0.042019739747047424,
0.12640638649463654,
0.015130539424717426,
-0.12324267625808716,
0.17975389957427979,
-0.12527048587799072,
-0.25368794798851013,
-0.15752369165420532,
-0.1602916717529297,
-0.03321881964802742,
0.02712438814342022,
0.13751555979251862,
-0.04378459230065346,
-0.0235152468085289,
-0.02300889790058136,
0.018398523330688477,
-0.09171634167432785,
0.014687743037939072,
-0.0936245322227478,
0.06018713489174843,
-0.06588701903820038,
-0.09546037018299103,
-0.06268279254436493,
-0.0038219657726585865,
-0.05104752629995346,
0.12662367522716522,
-0.1055075004696846,
0.06206906959414482,
0.14726245403289795,
-0.011935035698115826,
0.046627890318632126,
-0.0578303188085556,
0.19057592749595642,
-0.037105776369571686,
0.020675135776400566,
0.22987762093544006,
-0.028965746983885765,
0.07235225290060043,
0.1325131058692932,
0.01976962387561798,
-0.0635870173573494,
0.019375750795006752,
-0.04360884055495262,
-0.07995887100696564,
-0.2697877585887909,
-0.07710245251655579,
-0.1382880061864853,
0.08767794817686081,
0.06776261329650879,
0.06957830488681793,
0.17723120748996735,
0.05788516625761986,
-0.022227687761187553,
0.04821963235735893,
0.04288947582244873,
0.09961522370576859,
0.23699308931827545,
-0.0017775758169591427,
0.11518573015928268,
-0.07895579934120178,
-0.07691233605146408,
0.09765956550836563,
0.05740625783801079,
0.12530328333377838,
0.08213560283184052,
0.08882889896631241,
0.05201460421085358,
0.11453372240066528,
0.11871248483657837,
0.13785125315189362,
0.023712070658802986,
-0.003911609295755625,
-0.04437151178717613,
-0.049671564251184464,
-0.012899227440357208,
0.043596696108579636,
-0.022571559995412827,
-0.125803604722023,
-0.08551450818777084,
-0.0881505161523819,
0.0345795676112175,
0.12702946364879608,
0.04371129348874092,
-0.21052899956703186,
0.015057915821671486,
0.06308748573064804,
-0.01840454526245594,
-0.09195508807897568,
0.0820927619934082,
-0.01055455207824707,
-0.11995849013328552,
0.08064018189907074,
-0.054073963314294815,
0.1262417733669281,
0.020620403811335564,
0.08132687956094742,
-0.008267893455922604,
-0.08287329226732254,
0.029417507350444794,
0.11116122454404831,
-0.3539831042289734,
0.2113921195268631,
-0.001173506141640246,
-0.0377812460064888,
-0.0858275517821312,
0.002515423111617565,
-0.003920155111700296,
0.15948113799095154,
0.12376372516155243,
-0.0012128881644457579,
-0.05570856109261513,
-0.039714518934488297,
0.02057010307908058,
0.024193625897169113,
0.10065846890211105,
-0.026569755747914314,
-0.004770717117935419,
-0.07194137573242188,
-0.0043803006410598755,
-0.005122404079884291,
0.024264831095933914,
-0.04143843427300453,
-0.1652309149503708,
0.06634360551834106,
0.023028340190649033,
0.0437203012406826,
-0.006099828518927097,
-0.02318958379328251,
-0.08130022883415222,
0.17775899171829224,
-0.09012778848409653,
-0.08684566617012024,
-0.12864737212657928,
-0.08896547555923462,
0.021954379975795746,
-0.07431259006261826,
0.042615458369255066,
-0.0882137268781662,
0.004029980394989252,
-0.07511013001203537,
-0.22892630100250244,
0.12348388880491257,
-0.09429077059030533,
-0.060157302767038345,
-0.04634649306535721,
0.16826699674129486,
-0.09527941048145294,
0.011824395507574081,
0.032934222370386124,
0.005592751782387495,
-0.09128434956073761,
-0.0870399922132492,
-0.001810642541386187,
-0.04326526075601578,
0.027580296620726585,
-0.03688588738441467,
-0.10932820290327072,
-0.047426968812942505,
-0.015019436366856098,
-0.02810702472925186,
0.2723504304885864,
0.18588188290596008,
-0.05186399444937706,
0.19856145977973938,
0.13086560368537903,
-0.1121174618601799,
-0.28927069902420044,
-0.11225418746471405,
-0.1120804026722908,
-0.046039603650569916,
0.0061188661493361,
-0.1631075143814087,
0.04896310716867447,
0.0048615154810249805,
-0.025039754807949066,
0.11954974383115768,
-0.24660983681678772,
-0.09527094662189484,
0.12730011343955994,
0.03196980059146881,
0.3175541162490845,
-0.1337650716304779,
-0.10447840392589569,
-0.03748976066708565,
-0.19978515803813934,
0.2170543372631073,
-0.07687535881996155,
0.08719722181558609,
-0.043200016021728516,
0.05819466710090637,
0.022722367197275162,
-0.03754601627588272,
0.05861308425664902,
-0.003187060821801424,
0.040474262088537216,
-0.11354046314954758,
-0.042397838085889816,
0.09465882182121277,
0.001141843618825078,
0.047938600182533264,
-0.1326921433210373,
0.03673499450087547,
-0.11325152963399887,
-0.03277375549077988,
-0.09165152162313461,
0.08015389740467072,
-0.01082849595695734,
-0.07603152096271515,
0.0035489711444824934,
-0.05296473577618599,
0.014913580380380154,
-0.02275775372982025,
0.23988746106624603,
-0.028599636629223824,
0.16330936551094055,
0.19271963834762573,
0.10948019474744797,
-0.10394907742738724,
0.032538022845983505,
-0.08620081841945648,
-0.0757952481508255,
0.06370038539171219,
-0.1323770433664322,
0.02940995991230011,
0.1145525872707367,
-0.022551370784640312,
0.08332575112581253,
0.10373304039239883,
0.0008104142034426332,
-0.021481303498148918,
0.12781065702438354,
-0.22560128569602966,
-0.03550715744495392,
-0.07429368793964386,
-0.019009331241250038,
0.06446930021047592,
0.05750887095928192,
0.1601928323507309,
-0.020437706261873245,
-0.030251238495111465,
0.0013367274077609181,
0.009499046951532364,
-0.06655818223953247,
0.03358164057135582,
0.02247074991464615,
0.014598686248064041,
-0.1256992071866989,
0.08740832656621933,
0.037955667823553085,
-0.12243245542049408,
0.0021749348379671574,
0.18430198729038239,
-0.1309635043144226,
-0.12637417018413544,
-0.0020758213941007853,
0.1015893742442131,
-0.17986364662647247,
-0.031074175611138344,
-0.057615164667367935,
-0.14975902438163757,
0.08968901634216309,
0.18903127312660217,
0.052152909338474274,
0.08305366337299347,
-0.03703875094652176,
-0.059332165867090225,
-0.032481640577316284,
0.02139253169298172,
-0.04695713147521019,
0.026414623484015465,
-0.0917094275355339,
0.04907849058508873,
-0.020008422434329987,
0.13178592920303345,
-0.07063303142786026,
-0.037509169429540634,
-0.14239832758903503,
0.011869030073285103,
-0.11658631265163422,
-0.03776087239384651,
-0.0722569152712822,
-0.04043419659137726,
-0.00606326200067997,
-0.020060354843735695,
-0.04853002727031708,
-0.018107477575540543,
-0.11937674880027771,
0.0034756893292069435,
-0.018792329356074333,
0.07463563978672028,
-0.08853619545698166,
-0.01390845887362957,
0.06009776517748833,
-0.027346761897206306,
0.11565010994672775,
0.09684047847986221,
-0.09959342330694199,
0.11444094777107239,
-0.17781607806682587,
-0.07026682794094086,
0.09093695133924484,
0.028722934424877167,
0.03837895393371582,
0.06271238625049591,
0.016694998368620872,
0.08814739435911179,
-0.0030009665060788393,
0.038821544498205185,
-0.008953706361353397,
-0.13094909489154816,
-0.006292565260082483,
-0.027704859152436256,
-0.13507351279258728,
-0.05352415516972542,
-0.026142479851841927,
0.047687262296676636,
0.007890996523201466,
0.15599611401557922,
-0.05170551314949989,
0.08697550743818283,
-0.08733776211738586,
0.02129594422876835,
0.00774417445063591,
-0.15623678267002106,
-0.14296671748161316,
-0.09668809175491333,
-0.005640968214720488,
-0.008345563896000385,
0.20040279626846313,
0.039612915366888046,
-0.038347095251083374,
0.048448722809553146,
0.06458577513694763,
-0.002651832764968276,
0.00043773651123046875,
0.260414183139801,
0.053931351751089096,
-0.043430883437395096,
-0.11133064329624176,
0.053993724286556244,
-0.010879860259592533,
-0.00840104091912508,
0.16147811710834503,
0.07374140620231628,
-0.01483012456446886,
0.08066444844007492,
0.049650657922029495,
0.01867341436445713,
-0.06288443505764008,
-0.12389591336250305,
0.05555681884288788,
0.08927066624164581,
-0.022922424599528313,
0.10120990127325058,
0.1877104789018631,
-0.03417515382170677,
0.025798456743359566,
-0.021092353388667107,
-0.03730280324816704,
-0.1794828623533249,
-0.1660255491733551,
-0.06830311566591263,
-0.0974152609705925,
0.006670941598713398,
-0.0824233889579773,
0.07414107769727707,
0.010873440653085709,
0.0648740753531456,
-0.08136404305696487,
0.015032951720058918,
0.04794551059603691,
-0.10697080194950104,
0.059127796441316605,
-0.03422648832201958,
0.0403822585940361,
-0.04853198677301407,
0.00203158101066947,
-0.0730384886264801,
-0.03924550861120224,
-0.03823718801140785,
0.0570005364716053,
-0.010222075507044792,
0.02953232452273369,
-0.1399974673986435,
-0.09889072924852371,
-0.013613350689411163,
0.061622992157936096,
-0.015586993657052517,
0.15851926803588867,
0.011691692285239697,
-0.01634599082171917,
0.04867596551775932,
0.19400063157081604,
-0.05815696343779564,
-0.09858264029026031,
-0.009543819352984428,
0.24200573563575745,
0.07263723760843277,
0.07542851567268372,
0.01155335083603859,
0.012282408773899078,
-0.054497625678777695,
0.30681541562080383,
0.3142048120498657,
-0.051544927060604095,
0.026938999071717262,
0.01026423741132021,
0.031428541988134384,
0.11206534504890442,
0.14530286192893982,
0.0709543228149414,
0.24443931877613068,
-0.06003979593515396,
0.02169831655919552,
-0.037961993366479874,
0.0187104269862175,
-0.09156404435634613,
0.11960096657276154,
0.03468496724963188,
-0.09585459530353546,
0.007701038382947445,
0.08637642860412598,
-0.20803143084049225,
0.10253550857305527,
-0.05886629968881607,
-0.12071666121482849,
-0.03264179453253746,
-0.020288540050387383,
0.11481420695781708,
0.015069718472659588,
0.05959608405828476,
-0.029405340552330017,
-0.06286758929491043,
0.0724678635597229,
0.013606696389615536,
-0.23186242580413818,
0.014302882365882397,
0.07346966862678528,
-0.09509607404470444,
0.025481784716248512,
-0.008986293338239193,
0.06582962721586227,
0.06998452544212341,
0.0662793517112732,
-0.0679837167263031,
0.05742710456252098,
0.0109796691685915,
-0.010514259338378906,
0.023183219134807587,
-0.01509741973131895,
0.021053040400147438,
-0.0710611641407013,
0.04118622466921806,
-0.10961642116308212,
0.045880209654569626,
-0.028908872976899147,
-0.06341168284416199,
-0.026581885293126106,
0.022474320605397224,
-0.05080234631896019,
0.05949041247367859,
0.07827071100473404,
-0.011726764030754566,
-0.023670565336942673,
-0.089909128844738,
-0.023044170811772346,
0.04824388027191162,
-0.13124237954616547,
-0.06144273653626442,
-0.06770457327365875,
-0.07079706341028214,
0.0988001823425293,
0.01127884816378355,
-0.22350536286830902,
0.007046347483992577,
-0.11578018218278885,
0.030934680253267288,
-0.20877468585968018,
0.07696854323148727,
0.12089034169912338,
0.012117188423871994,
0.005815904587507248,
-0.05057491734623909,
0.0340706929564476,
0.07755234837532043,
-0.10429210215806961,
-0.07207293808460236
] |
||
null | null |
transformers
|
# bert-base-multilingual-cased
Finetuning `bert-base-multilingual-cased` with the training set of `iapp_wiki_qa_squad`, `thaiqa_squad`, and `nsc_qa` (removed examples which have cosine similarity with validation and test examples over 0.8; contexts of the latter two are trimmed to be around 300 `newmm` words). Benchmarks shared on [wandb](https://wandb.ai/cstorm125/wangchanberta-qa) using validation and test sets of `iapp_wiki_qa_squad`.
Trained with [thai2transformers](https://github.com/vistec-AI/thai2transformers/blob/dev/scripts/downstream/train_question_answering_lm_finetuning.py).
Run with:
```
export MODEL_NAME=bert-base-multilingual-cased
python train_question_answering_lm_finetuning.py \
--model_name $MODEL_NAME \
--dataset_name chimera_qa \
--output_dir $MODEL_NAME-finetune-chimera_qa-model \
--log_dir $MODEL_NAME-finetune-chimera_qa-log \
--pad_on_right \
--fp16
```
|
{"widget": [{"text": "\u0e2a\u0e27\u0e19\u0e01\u0e38\u0e2b\u0e25\u0e32\u0e1a\u0e40\u0e1b\u0e47\u0e19\u0e42\u0e23\u0e07\u0e40\u0e23\u0e35\u0e22\u0e19\u0e2d\u0e30\u0e44\u0e23", "context": "\u0e42\u0e23\u0e07\u0e40\u0e23\u0e35\u0e22\u0e19\u0e2a\u0e27\u0e19\u0e01\u0e38\u0e2b\u0e25\u0e32\u0e1a\u0e27\u0e34\u0e17\u0e22\u0e32\u0e25\u0e31\u0e22 (Suankularb Wittayalai School) (\u0e2d\u0e31\u0e01\u0e29\u0e23\u0e22\u0e48\u0e2d : \u0e2a.\u0e01. / S.K.) \u0e40\u0e1b\u0e47\u0e19\u0e42\u0e23\u0e07\u0e40\u0e23\u0e35\u0e22\u0e19\u0e0a\u0e32\u0e22\u0e25\u0e49\u0e27\u0e19 \u0e23\u0e30\u0e14\u0e31\u0e1a\u0e0a\u0e31\u0e49\u0e19\u0e21\u0e31\u0e18\u0e22\u0e21\u0e28\u0e36\u0e01\u0e29\u0e32\u0e02\u0e19\u0e32\u0e14\u0e43\u0e2b\u0e0d\u0e48\u0e1e\u0e34\u0e40\u0e28\u0e29 \u0e2a\u0e31\u0e07\u0e01\u0e31\u0e14\u0e2a\u0e33\u0e19\u0e31\u0e01\u0e07\u0e32\u0e19\u0e40\u0e02\u0e15\u0e1e\u0e37\u0e49\u0e19\u0e17\u0e35\u0e48\u0e01\u0e32\u0e23\u0e28\u0e36\u0e01\u0e29\u0e32\u0e21\u0e31\u0e18\u0e22\u0e21\u0e28\u0e36\u0e01\u0e29\u0e32\u0e40\u0e02\u0e15 1 \u0e2a\u0e33\u0e19\u0e31\u0e01\u0e07\u0e32\u0e19\u0e04\u0e13\u0e30\u0e01\u0e23\u0e23\u0e21\u0e01\u0e32\u0e23\u0e01\u0e32\u0e23\u0e28\u0e36\u0e01\u0e29\u0e32\u0e02\u0e31\u0e49\u0e19\u0e1e\u0e37\u0e49\u0e19\u0e10\u0e32\u0e19 (\u0e0a\u0e37\u0e48\u0e2d\u0e40\u0e14\u0e34\u0e21: \u0e01\u0e23\u0e21\u0e2a\u0e32\u0e21\u0e31\u0e0d\u0e28\u0e36\u0e01\u0e29\u0e32) \u0e01\u0e23\u0e30\u0e17\u0e23\u0e27\u0e07\u0e28\u0e36\u0e01\u0e29\u0e32\u0e18\u0e34\u0e01\u0e32\u0e23 \u0e01\u0e48\u0e2d\u0e15\u0e31\u0e49\u0e07\u0e42\u0e14\u0e22 \u0e1e\u0e23\u0e30\u0e1a\u0e32\u0e17\u0e2a\u0e21\u0e40\u0e14\u0e47\u0e08\u0e1e\u0e23\u0e30\u0e08\u0e38\u0e25\u0e08\u0e2d\u0e21\u0e40\u0e01\u0e25\u0e49\u0e32\u0e40\u0e08\u0e49\u0e32\u0e2d\u0e22\u0e39\u0e48\u0e2b\u0e31\u0e27 \u0e44\u0e14\u0e49\u0e23\u0e31\u0e1a\u0e01\u0e32\u0e23\u0e2a\u0e16\u0e32\u0e1b\u0e19\u0e32\u0e02\u0e36\u0e49\u0e19\u0e43\u0e19\u0e27\u0e31\u0e19\u0e17\u0e35\u0e48 8 \u0e21\u0e35\u0e19\u0e32\u0e04\u0e21 \u0e1e.\u0e28. 2424 (\u0e02\u0e13\u0e30\u0e19\u0e31\u0e49\u0e19\u0e19\u0e31\u0e1a\u0e27\u0e31\u0e19\u0e17\u0e35\u0e48 1 \u0e40\u0e21\u0e29\u0e32\u0e22\u0e19 \u0e40\u0e1b\u0e47\u0e19\u0e27\u0e31\u0e19\u0e02\u0e36\u0e49\u0e19\u0e1b\u0e35\u0e43\u0e2b\u0e21\u0e48 \u0e40\u0e21\u0e37\u0e48\u0e2d\u0e19\u0e31\u0e1a\u0e2d\u0e22\u0e48\u0e32\u0e07\u0e2a\u0e32\u0e01\u0e25\u0e16\u0e37\u0e2d\u0e40\u0e1b\u0e47\u0e19 \u0e1e.\u0e28. 2425) \u0e42\u0e14\u0e22\u0e40\u0e1b\u0e47\u0e19\u0e42\u0e23\u0e07\u0e40\u0e23\u0e35\u0e22\u0e19\u0e23\u0e31\u0e10\u0e1a\u0e32\u0e25\u0e41\u0e2b\u0e48\u0e07\u0e41\u0e23\u0e01\u0e02\u0e2d\u0e07\u0e1b\u0e23\u0e30\u0e40\u0e17\u0e28\u0e44\u0e17\u0e22"}]}
|
question-answering
|
airesearch/bert-base-multilingual-cased-finetune-qa
|
[
"transformers",
"pytorch",
"bert",
"question-answering",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #bert #question-answering #endpoints_compatible #region-us
|
# bert-base-multilingual-cased
Finetuning 'bert-base-multilingual-cased' with the training set of 'iapp_wiki_qa_squad', 'thaiqa_squad', and 'nsc_qa' (removed examples which have cosine similarity with validation and test examples over 0.8; contexts of the latter two are trimmed to be around 300 'newmm' words). Benchmarks shared on wandb using validation and test sets of 'iapp_wiki_qa_squad'.
Trained with thai2transformers.
Run with:
|
[
"# bert-base-multilingual-cased\n\nFinetuning 'bert-base-multilingual-cased' with the training set of 'iapp_wiki_qa_squad', 'thaiqa_squad', and 'nsc_qa' (removed examples which have cosine similarity with validation and test examples over 0.8; contexts of the latter two are trimmed to be around 300 'newmm' words). Benchmarks shared on wandb using validation and test sets of 'iapp_wiki_qa_squad'.\nTrained with thai2transformers.\n\nRun with:"
] |
[
"TAGS\n#transformers #pytorch #bert #question-answering #endpoints_compatible #region-us \n",
"# bert-base-multilingual-cased\n\nFinetuning 'bert-base-multilingual-cased' with the training set of 'iapp_wiki_qa_squad', 'thaiqa_squad', and 'nsc_qa' (removed examples which have cosine similarity with validation and test examples over 0.8; contexts of the latter two are trimmed to be around 300 'newmm' words). Benchmarks shared on wandb using validation and test sets of 'iapp_wiki_qa_squad'.\nTrained with thai2transformers.\n\nRun with:"
] |
[
29,
139
] |
[
"passage: TAGS\n#transformers #pytorch #bert #question-answering #endpoints_compatible #region-us \n# bert-base-multilingual-cased\n\nFinetuning 'bert-base-multilingual-cased' with the training set of 'iapp_wiki_qa_squad', 'thaiqa_squad', and 'nsc_qa' (removed examples which have cosine similarity with validation and test examples over 0.8; contexts of the latter two are trimmed to be around 300 'newmm' words). Benchmarks shared on wandb using validation and test sets of 'iapp_wiki_qa_squad'.\nTrained with thai2transformers.\n\nRun with:"
] |
[
-0.05037573724985123,
-0.13167336583137512,
-0.003586225677281618,
0.0750507116317749,
0.036429379135370255,
-0.003792376723140478,
0.10118138790130615,
0.11611802130937576,
0.004925064742565155,
0.0024002883583307266,
0.06598848849534988,
-0.012101930566132069,
0.061700597405433655,
-0.007078728172928095,
-0.06681698560714722,
-0.2002054750919342,
0.08024726808071136,
0.1239180788397789,
-0.04260154068470001,
0.1238020658493042,
0.1074824407696724,
-0.05394481122493744,
0.05844228342175484,
0.046503253281116486,
-0.10256262868642807,
0.0329880565404892,
-0.000719836272764951,
-0.07705377787351608,
0.10391578078269958,
0.03226371854543686,
0.19762510061264038,
0.050827618688344955,
-0.06300840526819229,
-0.2244204431772232,
0.044404082000255585,
-0.023002220317721367,
0.05345435068011284,
-0.027961209416389465,
0.009669024497270584,
0.05009849742054939,
-0.12112533301115036,
0.04347354173660278,
0.03236083313822746,
0.01016979943960905,
-0.07521016895771027,
-0.023425869643688202,
-0.014549538493156433,
0.01643906906247139,
0.12721289694309235,
0.03937428072094917,
-0.03386780247092247,
0.1718757599592209,
-0.20091111958026886,
0.07848577946424484,
0.07077151536941528,
-0.3998008072376251,
-0.027416778728365898,
0.055362168699502945,
0.08728108555078506,
0.013891876675188541,
-0.05961678922176361,
-0.009565821848809719,
0.06070972606539726,
0.05957118794322014,
-0.09998991340398788,
-0.11395284533500671,
-0.11909083276987076,
-0.020617948845028877,
-0.07610116899013519,
0.02126663736999035,
0.1704859733581543,
0.04415920004248619,
-0.1003623977303505,
-0.04618680104613304,
-0.0226752832531929,
0.011643165722489357,
-0.02270110882818699,
-0.06416311860084534,
-0.04966789111495018,
0.021285297349095345,
-0.04256373643875122,
0.000979781849309802,
-0.10699550807476044,
-0.08124914765357971,
-0.13382373750209808,
0.06900853663682938,
0.09120779484510422,
0.020034458488225937,
-0.13698767125606537,
0.018321074545383453,
-0.04522918909788132,
-0.09859183430671692,
-0.09085150808095932,
-0.0363009013235569,
-0.03385034576058388,
0.003525505540892482,
-0.08256559073925018,
-0.03306782618165016,
0.1439746767282486,
0.09763316065073013,
-0.06853949278593063,
0.0956074595451355,
-0.17583763599395752,
0.06351587176322937,
-0.04022538289427757,
0.20049145817756653,
-0.010833733715116978,
0.10645389556884766,
0.008897301740944386,
-0.023065859451889992,
-0.07205602526664734,
0.02903950959444046,
-0.008115035481750965,
-0.06843436509370804,
0.09224143624305725,
0.053353067487478256,
-0.11203189194202423,
0.10973019152879715,
-0.043625835329294205,
-0.009920766577124596,
0.06201248988509178,
-0.11316580325365067,
-0.04717466980218887,
0.02537732571363449,
0.007058736402541399,
0.0637633427977562,
-0.003447669092565775,
0.07021448761224747,
-0.08204460889101028,
0.020116008818149567,
0.03658100217580795,
0.027207447215914726,
0.019029822200536728,
-0.08048044890165329,
0.022706348448991776,
-0.06524642556905746,
-0.011179142631590366,
-0.1338682919740677,
-0.07039027661085129,
0.031776413321495056,
0.015341412276029587,
0.019151432439684868,
0.0466839000582695,
-0.06963367760181427,
0.019278299063444138,
-0.06136637553572655,
-0.018494969233870506,
-0.02032341994345188,
-0.03995135426521301,
0.07759054005146027,
0.055874645709991455,
0.1230841800570488,
-0.14929479360580444,
0.03413935378193855,
-0.1385192722082138,
0.06660091131925583,
-0.09811440110206604,
0.0955568253993988,
-0.06523144245147705,
-0.0657738596200943,
-0.08049789071083069,
-0.030694495886564255,
-0.04527772217988968,
0.044585537165403366,
0.018732856959104538,
0.11079234629869461,
-0.1308692842721939,
-0.037292975932359695,
0.25789007544517517,
-0.08836602419614792,
-0.15594623982906342,
0.2211908996105194,
-0.041322071105241776,
0.04567546769976616,
0.05902434512972832,
0.23186475038528442,
0.041396256536245346,
-0.12027619779109955,
0.07943335920572281,
0.016507595777511597,
0.028661487624049187,
0.05120803415775299,
0.04630304500460625,
-0.008720200508832932,
-0.0626409575343132,
0.08123788237571716,
-0.07712419331073761,
0.06599807739257812,
-0.051940858364105225,
-0.05414024740457535,
-0.052081141620874405,
-0.09412384033203125,
0.07584022730588913,
-0.03637050837278366,
0.13691839575767517,
-0.07051480561494827,
-0.0013318578712642193,
0.028098084032535553,
0.048234086483716965,
-0.06022839620709419,
0.040943898260593414,
-0.14558641612529755,
0.11366643756628036,
-0.11022701114416122,
-0.02066829800605774,
-0.12099308520555496,
-0.028645453974604607,
0.06666440516710281,
0.09408716857433319,
0.058454789221286774,
0.0922151580452919,
0.011995627544820309,
-0.016774479299783707,
-0.0694996789097786,
0.013939405791461468,
0.0724467933177948,
-0.010234484449028969,
-0.04252621904015541,
0.00007005521183600649,
0.048926759511232376,
-0.04625575244426727,
0.02804357185959816,
-0.06121159717440605,
0.016206828877329826,
0.024319564923644066,
0.11023594439029694,
-0.0024379089009016752,
0.061302341520786285,
0.05021112039685249,
0.10289932787418365,
0.00921647809445858,
0.023051811382174492,
0.07655071467161179,
0.006367839407175779,
-0.13586774468421936,
0.12165157496929169,
0.00345978862605989,
0.11310858279466629,
0.15342438220977783,
-0.11589246243238449,
0.005145267583429813,
-0.0343044213950634,
-0.01748947612941265,
-0.019075313583016396,
-0.07969807088375092,
0.0686161145567894,
0.22372055053710938,
0.024773597717285156,
0.17289790511131287,
-0.11198346316814423,
0.017506537958979607,
-0.0028676935471594334,
-0.05822506546974182,
-0.0037246225401759148,
0.058264754712581635,
0.010792091488838196,
-0.21574755012989044,
0.08032090216875076,
0.1549215167760849,
-0.058272622525691986,
0.17593656480312347,
-0.08916492015123367,
-0.03101867251098156,
0.02556871622800827,
0.17613963782787323,
-0.03787930682301521,
0.024795973673462868,
-0.15426601469516754,
-0.0003492153773549944,
0.04428365081548691,
0.07240583002567291,
0.01899510808289051,
-0.13395246863365173,
-0.01511264406144619,
-0.015797333791851997,
-0.09317909181118011,
-0.11116094142198563,
0.08223235607147217,
0.06116079166531563,
0.0597844272851944,
0.009025502018630505,
-0.006547327619045973,
0.050328925251960754,
0.008720848709344864,
-0.11180055141448975,
0.18378974497318268,
-0.0655929446220398,
-0.259940505027771,
-0.06998153775930405,
-0.01231854222714901,
-0.12416993826627731,
-0.0296468622982502,
0.06173368915915489,
-0.1505131870508194,
0.03431570529937744,
0.0023977039381861687,
0.013197098858654499,
-0.016900228336453438,
0.016689220443367958,
-0.033126942813396454,
-0.015344074927270412,
0.03505316004157066,
-0.10313896834850311,
-0.052626822143793106,
-0.028963571414351463,
-0.09293573349714279,
0.08914027363061905,
-0.17447681725025177,
0.00542455492541194,
0.010723855346441269,
-0.09667026251554489,
0.031412798911333084,
-0.026741521432995796,
0.22572150826454163,
-0.047204889357089996,
0.0007292676600627601,
0.05577582120895386,
-0.15885306894779205,
0.010651380755007267,
0.11780877411365509,
-0.019018907099962234,
-0.08953067660331726,
0.0395481213927269,
0.05219845473766327,
-0.020764293149113655,
-0.2175045907497406,
-0.012331885285675526,
-0.044038668274879456,
0.043852243572473526,
-0.01421824749559164,
0.035063184797763824,
-0.011926013045012951,
0.0796186700463295,
0.023279258981347084,
-0.02834441512823105,
-0.06037428230047226,
0.03946256265044212,
0.12701037526130676,
0.01032868679612875,
0.12794065475463867,
-0.039714861661195755,
-0.06480121612548828,
-0.004506916273385286,
0.16366282105445862,
0.08593833446502686,
0.05094882473349571,
-0.042986687272787094,
0.107514888048172,
0.24572771787643433,
0.18366871774196625,
0.08986043184995651,
-0.10545051097869873,
-0.04487120360136032,
0.00901670753955841,
-0.003082054201513529,
-0.06449858844280243,
-0.00745055265724659,
0.05565797537565231,
-0.03685824200510979,
-0.0539705716073513,
-0.04055612161755562,
0.0651995837688446,
0.26984018087387085,
0.05295487493276596,
-0.08887216448783875,
-0.059579409658908844,
0.022923612967133522,
-0.10773938149213791,
-0.07785290479660034,
0.08493836224079132,
0.0694679543375969,
-0.13517938554286957,
0.040692288428545,
0.0013248092727735639,
0.1305154263973236,
-0.0833408311009407,
0.04746183753013611,
-0.061884548515081406,
-0.17557930946350098,
0.0019094495801255107,
0.04468317702412605,
-0.35599851608276367,
0.1936044842004776,
0.02740686759352684,
0.014569350518286228,
-0.048328399658203125,
-0.0386180616915226,
-0.030864937230944633,
0.06424128264188766,
0.07447896897792816,
-0.014569185674190521,
0.019251344725489616,
-0.16661541163921356,
-0.003962242044508457,
0.13132227957248688,
0.08753053098917007,
0.11605768650770187,
0.04947018623352051,
0.01994915120303631,
0.043776985257864,
-0.016386453062295914,
0.026884278282523155,
-0.17633582651615143,
-0.04171985387802124,
-0.014818855561316013,
0.05285300314426422,
0.05355542153120041,
-0.027220021933317184,
-0.0333956778049469,
-0.1514870673418045,
0.20280371606349945,
-0.09265882521867752,
-0.08630526065826416,
-0.06588874012231827,
0.04145391285419464,
0.05577021464705467,
-0.07201692461967468,
0.006211943458765745,
-0.08010726422071457,
0.003552970476448536,
-0.03617620840668678,
-0.026851218193769455,
0.07034933567047119,
-0.03701596334576607,
-0.042749661952257156,
0.01571037992835045,
0.09781044721603394,
-0.012063423171639442,
0.042371489107608795,
0.10177825391292572,
-0.049307435750961304,
0.05898231267929077,
-0.06590750813484192,
-0.09036316722631454,
-0.05273769795894623,
-0.024851912632584572,
0.0013355747796595097,
-0.14771437644958496,
0.04200868308544159,
-0.11561119556427002,
-0.05811868607997894,
0.2311701476573944,
0.11219474673271179,
-0.016904378309845924,
0.06779978424310684,
0.16072030365467072,
-0.01753264293074608,
-0.15013708174228668,
-0.04180161654949188,
0.027183644473552704,
0.06909003108739853,
-0.10863517224788666,
-0.05449000746011734,
0.12061293423175812,
0.0395980142056942,
0.002950432011857629,
-0.05138343945145607,
-0.026284996420145035,
-0.1056867390871048,
0.15701867640018463,
0.045175157487392426,
0.25717586278915405,
-0.1307990401983261,
-0.04570832848548889,
0.017102530226111412,
-0.19911222159862518,
0.050476446747779846,
-0.0490991473197937,
0.09965912252664566,
-0.019501157104969025,
0.05863332375884056,
0.003542788792401552,
-0.006574058439582586,
0.13699792325496674,
0.030392050743103027,
-0.0068506584502756596,
-0.038833048194646835,
-0.013676468282938004,
-0.03950118273496628,
0.049086377024650574,
0.09381680935621262,
-0.06642710417509079,
0.03524613752961159,
-0.17026104032993317,
-0.042268164455890656,
-0.08341963589191437,
-0.03833679109811783,
-0.006484251469373703,
-0.03466956689953804,
-0.00757422624155879,
0.016750382259488106,
-0.00006868499622214586,
0.0265926793217659,
0.036849405616521835,
-0.10556686669588089,
0.08399336040019989,
0.07824179530143738,
0.1314833015203476,
-0.0791393592953682,
0.06423810124397278,
-0.02870943583548069,
-0.05136486142873764,
0.12817710638046265,
-0.11767442524433136,
0.07274977117776871,
0.10611160099506378,
-0.02992124669253826,
0.12441401183605194,
0.05761834233999252,
-0.0010352034587413073,
0.08669773489236832,
0.0361921526491642,
-0.05368182808160782,
-0.0998542457818985,
0.06064373999834061,
-0.14486397802829742,
0.012705570086836815,
0.013597175478935242,
0.10576944798231125,
-0.030044300481677055,
-0.036195188760757446,
-0.004877607338130474,
-0.0434148833155632,
-0.12402024120092392,
0.09528705477714539,
0.04640316590666771,
0.054883625358343124,
-0.0788234993815422,
0.08180250227451324,
0.012571301311254501,
-0.14096565544605255,
0.038974110037088394,
-0.024073977023363113,
-0.14537085592746735,
-0.06095457449555397,
0.009886907413601875,
0.15142174065113068,
0.008817890658974648,
-0.09781042486429214,
-0.0879732221364975,
-0.13647355139255524,
0.03030356392264366,
0.18180759251117706,
0.07364580035209656,
0.031174365431070328,
-0.0066324202343821526,
-0.0409308560192585,
0.00490835215896368,
0.0088111637160182,
0.06953863799571991,
0.06330914050340652,
-0.05223161727190018,
-0.028732815757393837,
0.013630557805299759,
0.16379402577877045,
-0.03003464825451374,
-0.05066323280334473,
-0.1480221450328827,
0.07852315157651901,
-0.27400147914886475,
0.057197097688913345,
-0.020878145471215248,
-0.009842921048402786,
0.019949687644839287,
-0.14441467821598053,
-0.011761341243982315,
-0.0016655653016641736,
-0.07349279522895813,
0.0420280396938324,
-0.024533936753869057,
0.014645167626440525,
-0.06412151455879211,
-0.016218110918998718,
0.11920624226331711,
-0.05828028917312622,
0.07195734977722168,
0.059562068432569504,
-0.12248484045267105,
0.12224572151899338,
-0.14915388822555542,
-0.08088691532611847,
0.030361182987689972,
0.024908972904086113,
0.051628150045871735,
-0.008987760171294212,
0.047545671463012695,
0.08131899684667587,
0.09859823435544968,
0.052370380610227585,
0.26106008887290955,
-0.07147371768951416,
-0.09288308769464493,
-0.05241633579134941,
-0.052182842046022415,
-0.06773081421852112,
-0.0337803028523922,
0.15526439249515533,
0.09518890827894211,
0.08460675179958344,
-0.08966857939958572,
0.047268375754356384,
-0.05966488644480705,
0.00014112664212007076,
0.0035483238752931356,
-0.11332836002111435,
0.04536999762058258,
-0.1082926020026207,
0.006318346597254276,
-0.02936708740890026,
0.1632632315158844,
-0.12869977951049805,
0.07350388169288635,
0.004540655296295881,
-0.09246598929166794,
-0.007247034925967455,
0.047531209886074066,
0.29755833745002747,
0.06091389060020447,
-0.020710976794362068,
-0.09741367399692535,
-0.0158374160528183,
-0.023460695520043373,
0.09144476801156998,
-0.01617656648159027,
0.25702741742134094,
-0.12018223851919174,
0.091916523873806,
0.04538395628333092,
0.11546105891466141,
0.012953347526490688,
-0.09704096615314484,
-0.06032553315162659,
0.03290780633687973,
0.004971497226506472,
0.07464764267206192,
0.1770961582660675,
-0.12137804180383682,
0.049524445086717606,
-0.04980526864528656,
-0.10486865788698196,
-0.12303828448057175,
-0.016258394345641136,
-0.11113693565130234,
-0.09568066895008087,
0.06905017793178558,
-0.0834018662571907,
-0.02945883385837078,
0.1128176674246788,
0.12425302714109421,
0.010184187442064285,
0.21397832036018372,
0.017965180799365044,
-0.053142763674259186,
0.0841996893286705,
-0.05284390226006508,
0.021736474707722664,
0.05740226060152054,
-0.0064533306285738945,
0.01033822912722826,
-0.007589591667056084,
0.004821484908461571,
0.02983379177749157,
-0.01385031919926405,
-0.029052264988422394,
-0.12623687088489532,
-0.04562069848179817,
-0.02212993986904621,
0.012738210149109364,
0.03948032483458519,
0.1674480438232422,
0.04814209043979645,
0.001848279032856226,
0.0001935918553499505,
0.0728079155087471,
-0.005344010889530182,
-0.20429293811321259,
-0.23878084123134613,
0.10604707896709442,
-0.006744184996932745,
0.052729591727256775,
0.007015070877969265,
-0.006986464373767376,
-0.030306026339530945,
0.3716488480567932,
0.1428333818912506,
-0.040359850972890854,
0.052692096680402756,
0.06905472278594971,
0.03630049154162407,
-0.0007867027306929231,
0.0779048427939415,
0.08946917951107025,
0.03956504538655281,
-0.08475612103939056,
-0.09643252193927765,
-0.05274728685617447,
-0.0984383150935173,
-0.09232279658317566,
0.07873864471912384,
0.04941510036587715,
-0.013116061687469482,
-0.08896270394325256,
0.04901362955570221,
-0.05826418474316597,
0.007086563855409622,
-0.025821641087532043,
-0.09523569047451019,
-0.11178514361381531,
-0.049301113933324814,
-0.07537784427404404,
0.017423095181584358,
-0.006117271725088358,
-0.035351939499378204,
0.034588523209095,
0.019433531910181046,
0.007967256009578705,
-0.012481644749641418,
-0.023721983656287193,
0.10114546865224838,
0.039649102836847305,
-0.07283630967140198,
0.09100867062807083,
0.160472109913826,
0.06674570590257645,
0.09566760808229446,
0.047359321266412735,
0.11635775119066238,
0.03726443648338318,
0.07185497134923935,
-0.060663122683763504,
0.08749471604824066,
0.01742054894566536,
0.050171349197626114,
0.050773076713085175,
-0.1146550178527832,
0.0867956355214119,
-0.054710566997528076,
-0.05034667253494263,
-0.14477795362472534,
0.08838773518800735,
-0.06385026127099991,
0.09168974310159683,
0.17850570380687714,
0.0014499584212899208,
0.026995358988642693,
-0.06059662252664566,
0.07198817282915115,
0.00024731268058530986,
-0.1293184757232666,
-0.1126694604754448,
-0.16501551866531372,
0.012731141410768032,
0.02662409469485283,
-0.02514851652085781,
-0.25004515051841736,
-0.03103766031563282,
0.0007452722056768835,
0.022292297333478928,
-0.03162538260221481,
0.07982040196657181,
0.08766624331474304,
0.03677806630730629,
-0.009157705120742321,
-0.2961367964744568,
0.04181364178657532,
0.04439375177025795,
-0.08398659527301788,
-0.09793419390916824
] |
null | null |
transformers
|
# Finetuend `bert-base-multilignual-cased` model on Thai sequence and token classification datasets
<br>
Finetuned XLM Roberta BASE model on Thai sequence and token classification datasets
The script and documentation can be found at [this repository](https://github.com/vistec-AI/thai2transformers).
<br>
## Model description
<br>
We use the pretrained cross-lingual BERT model (mBERT) as proposed by [[Devlin et al., 2018]](https://arxiv.org/abs/1810.04805). We download the pretrained PyTorch model via HuggingFace's Model Hub (https://huggingface.co/bert-base-multilignual-cased)
<br>
## Intended uses & limitations
<br>
You can use the finetuned models for multiclass/multilabel text classification and token classification task.
<br>
**Multiclass text classification**
- `wisesight_sentiment`
4-class text classification task (`positive`, `neutral`, `negative`, and `question`) based on social media posts and tweets.
- `wongnai_reivews`
Users' review rating classification task (scale is ranging from 1 to 5)
- `generated_reviews_enth` : (`review_star` as label)
Generated users' review rating classification task (scale is ranging from 1 to 5).
**Multilabel text classification**
- `prachathai67k`
Thai topic classification with 12 labels based on news article corpus from prachathai.com. The detail is described in this [page](https://huggingface.co/datasets/prachathai67k).
**Token classification**
- `thainer`
Named-entity recognition tagging with 13 named-entities as descibed in this [page](https://huggingface.co/datasets/thainer).
- `lst20` : NER NER and POS tagging
Named-entity recognition tagging with 10 named-entities and Part-of-Speech tagging with 16 tags as descibed in this [page](https://huggingface.co/datasets/lst20).
<br>
## How to use
<br>
The example notebook demonstrating how to use finetuned model for inference can be found at this [Colab notebook](https://colab.research.google.com/drive/1Kbk6sBspZLwcnOE61adAQo30xxqOQ9ko)
<br>
**BibTeX entry and citation info**
```
@misc{lowphansirikul2021wangchanberta,
title={WangchanBERTa: Pretraining transformer-based Thai Language Models},
author={Lalita Lowphansirikul and Charin Polpanumas and Nawat Jantrakulchai and Sarana Nutanong},
year={2021},
eprint={2101.09635},
archivePrefix={arXiv},
primaryClass={cs.CL}
}
```
|
{}
|
fill-mask
|
airesearch/bert-base-multilingual-cased-finetuned
|
[
"transformers",
"bert",
"fill-mask",
"arxiv:1810.04805",
"arxiv:2101.09635",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[
"1810.04805",
"2101.09635"
] |
[] |
TAGS
#transformers #bert #fill-mask #arxiv-1810.04805 #arxiv-2101.09635 #autotrain_compatible #endpoints_compatible #region-us
|
# Finetuend 'bert-base-multilignual-cased' model on Thai sequence and token classification datasets
<br>
Finetuned XLM Roberta BASE model on Thai sequence and token classification datasets
The script and documentation can be found at this repository.
<br>
## Model description
<br>
We use the pretrained cross-lingual BERT model (mBERT) as proposed by [[Devlin et al., 2018]](URL We download the pretrained PyTorch model via HuggingFace's Model Hub (URL
<br>
## Intended uses & limitations
<br>
You can use the finetuned models for multiclass/multilabel text classification and token classification task.
<br>
Multiclass text classification
- 'wisesight_sentiment'
4-class text classification task ('positive', 'neutral', 'negative', and 'question') based on social media posts and tweets.
- 'wongnai_reivews'
Users' review rating classification task (scale is ranging from 1 to 5)
- 'generated_reviews_enth' : ('review_star' as label)
Generated users' review rating classification task (scale is ranging from 1 to 5).
Multilabel text classification
- 'prachathai67k'
Thai topic classification with 12 labels based on news article corpus from URL. The detail is described in this page.
Token classification
- 'thainer'
Named-entity recognition tagging with 13 named-entities as descibed in this page.
- 'lst20' : NER NER and POS tagging
Named-entity recognition tagging with 10 named-entities and Part-of-Speech tagging with 16 tags as descibed in this page.
<br>
## How to use
<br>
The example notebook demonstrating how to use finetuned model for inference can be found at this Colab notebook
<br>
BibTeX entry and citation info
|
[
"# Finetuend 'bert-base-multilignual-cased' model on Thai sequence and token classification datasets\n\n<br>\n\nFinetuned XLM Roberta BASE model on Thai sequence and token classification datasets\nThe script and documentation can be found at this repository.\n\n<br>",
"## Model description\n\n<br>\n\nWe use the pretrained cross-lingual BERT model (mBERT) as proposed by [[Devlin et al., 2018]](URL We download the pretrained PyTorch model via HuggingFace's Model Hub (URL\n<br>",
"## Intended uses & limitations\n\n<br>\n\nYou can use the finetuned models for multiclass/multilabel text classification and token classification task.\n\n<br>\n\nMulticlass text classification\n\n\n- 'wisesight_sentiment' \n \n 4-class text classification task ('positive', 'neutral', 'negative', and 'question') based on social media posts and tweets.\n\n- 'wongnai_reivews' \n\n Users' review rating classification task (scale is ranging from 1 to 5)\n\n- 'generated_reviews_enth' : ('review_star' as label)\n\n Generated users' review rating classification task (scale is ranging from 1 to 5).\n\nMultilabel text classification\n\n- 'prachathai67k'\n\n Thai topic classification with 12 labels based on news article corpus from URL. The detail is described in this page.\n \n\nToken classification\n\n- 'thainer' \n\n Named-entity recognition tagging with 13 named-entities as descibed in this page.\n\n- 'lst20' : NER NER and POS tagging\n \n Named-entity recognition tagging with 10 named-entities and Part-of-Speech tagging with 16 tags as descibed in this page.\n\n<br>",
"## How to use\n\n<br>\n\nThe example notebook demonstrating how to use finetuned model for inference can be found at this Colab notebook\n\n<br>\n\n\nBibTeX entry and citation info"
] |
[
"TAGS\n#transformers #bert #fill-mask #arxiv-1810.04805 #arxiv-2101.09635 #autotrain_compatible #endpoints_compatible #region-us \n",
"# Finetuend 'bert-base-multilignual-cased' model on Thai sequence and token classification datasets\n\n<br>\n\nFinetuned XLM Roberta BASE model on Thai sequence and token classification datasets\nThe script and documentation can be found at this repository.\n\n<br>",
"## Model description\n\n<br>\n\nWe use the pretrained cross-lingual BERT model (mBERT) as proposed by [[Devlin et al., 2018]](URL We download the pretrained PyTorch model via HuggingFace's Model Hub (URL\n<br>",
"## Intended uses & limitations\n\n<br>\n\nYou can use the finetuned models for multiclass/multilabel text classification and token classification task.\n\n<br>\n\nMulticlass text classification\n\n\n- 'wisesight_sentiment' \n \n 4-class text classification task ('positive', 'neutral', 'negative', and 'question') based on social media posts and tweets.\n\n- 'wongnai_reivews' \n\n Users' review rating classification task (scale is ranging from 1 to 5)\n\n- 'generated_reviews_enth' : ('review_star' as label)\n\n Generated users' review rating classification task (scale is ranging from 1 to 5).\n\nMultilabel text classification\n\n- 'prachathai67k'\n\n Thai topic classification with 12 labels based on news article corpus from URL. The detail is described in this page.\n \n\nToken classification\n\n- 'thainer' \n\n Named-entity recognition tagging with 13 named-entities as descibed in this page.\n\n- 'lst20' : NER NER and POS tagging\n \n Named-entity recognition tagging with 10 named-entities and Part-of-Speech tagging with 16 tags as descibed in this page.\n\n<br>",
"## How to use\n\n<br>\n\nThe example notebook demonstrating how to use finetuned model for inference can be found at this Colab notebook\n\n<br>\n\n\nBibTeX entry and citation info"
] |
[
50,
73,
63,
280,
42
] |
[
"passage: TAGS\n#transformers #bert #fill-mask #arxiv-1810.04805 #arxiv-2101.09635 #autotrain_compatible #endpoints_compatible #region-us \n# Finetuend 'bert-base-multilignual-cased' model on Thai sequence and token classification datasets\n\n<br>\n\nFinetuned XLM Roberta BASE model on Thai sequence and token classification datasets\nThe script and documentation can be found at this repository.\n\n<br>## Model description\n\n<br>\n\nWe use the pretrained cross-lingual BERT model (mBERT) as proposed by [[Devlin et al., 2018]](URL We download the pretrained PyTorch model via HuggingFace's Model Hub (URL\n<br>## Intended uses & limitations\n\n<br>\n\nYou can use the finetuned models for multiclass/multilabel text classification and token classification task.\n\n<br>\n\nMulticlass text classification\n\n\n- 'wisesight_sentiment' \n \n 4-class text classification task ('positive', 'neutral', 'negative', and 'question') based on social media posts and tweets.\n\n- 'wongnai_reivews' \n\n Users' review rating classification task (scale is ranging from 1 to 5)\n\n- 'generated_reviews_enth' : ('review_star' as label)\n\n Generated users' review rating classification task (scale is ranging from 1 to 5).\n\nMultilabel text classification\n\n- 'prachathai67k'\n\n Thai topic classification with 12 labels based on news article corpus from URL. The detail is described in this page.\n \n\nToken classification\n\n- 'thainer' \n\n Named-entity recognition tagging with 13 named-entities as descibed in this page.\n\n- 'lst20' : NER NER and POS tagging\n \n Named-entity recognition tagging with 10 named-entities and Part-of-Speech tagging with 16 tags as descibed in this page.\n\n<br>"
] |
[
0.01973884180188179,
0.03747386112809181,
-0.006810711696743965,
0.03847236931324005,
0.13413040339946747,
0.008826037868857384,
0.13902384042739868,
0.05721278116106987,
-0.0032545181456953287,
0.0878702774643898,
-0.07230349630117416,
-0.022790387272834778,
0.07942900061607361,
0.12261626869440079,
0.003908894490450621,
-0.24048921465873718,
0.05173255503177643,
-0.03468870371580124,
0.06088270619511604,
0.10817516595125198,
0.10362689197063446,
-0.07822856307029724,
0.11365898698568344,
0.04728800430893898,
-0.05366497486829758,
0.03189034387469292,
0.003646383062005043,
-0.06459230184555054,
0.04737016558647156,
0.030330533161759377,
0.15016978979110718,
0.051803283393383026,
0.015739580616354942,
-0.13661044836044312,
0.021597299724817276,
0.058441925793886185,
0.009274384938180447,
0.030588096007704735,
0.14677907526493073,
-0.0830044373869896,
0.07616756111383438,
-0.06704281270503998,
0.12210175395011902,
0.06981360167264938,
-0.10205795615911484,
-0.06971199065446854,
-0.12340761721134186,
0.14886142313480377,
0.11854029446840286,
0.009159078821539879,
-0.06390659511089325,
0.07321444898843765,
-0.13404470682144165,
0.04648903012275696,
0.006214330904185772,
-0.21334509551525116,
-0.018201952800154686,
0.08961115032434464,
-0.03502275049686432,
0.07309519499540329,
-0.08521212637424469,
-0.00643151206895709,
0.013766360469162464,
0.0019225666765123606,
0.027892984449863434,
-0.04321788623929024,
-0.001601335359737277,
-0.05912426486611366,
-0.10564948618412018,
0.020938055589795113,
0.12810499966144562,
0.050903335213661194,
-0.09813860058784485,
-0.13418248295783997,
-0.03577777370810509,
0.002710501430556178,
0.004585998132824898,
-0.028046300634741783,
-0.01422971487045288,
0.016438394784927368,
0.06395553052425385,
-0.02084447257220745,
-0.07635115832090378,
-0.0136931287124753,
-0.07123620063066483,
0.015240847133100033,
0.01717447303235531,
-0.0025838827714323997,
-0.08003541827201843,
0.09087470173835754,
0.01907368004322052,
-0.0538877509534359,
0.010376997292041779,
-0.058863550424575806,
-0.1800820231437683,
-0.0656541958451271,
0.02458583377301693,
-0.12477491050958633,
0.0048325578682124615,
0.16499696671962738,
-0.039715010672807693,
0.12086569517850876,
-0.1244310736656189,
-0.007872235029935837,
0.0864938423037529,
0.1632358580827713,
-0.06746294349431992,
0.016013534739613533,
-0.029547953978180885,
-0.015913762152194977,
-0.019016548991203308,
-0.006156079471111298,
0.018416954204440117,
-0.029499150812625885,
0.06787410378456116,
0.03749478980898857,
-0.029037972912192345,
0.11024987697601318,
-0.09118331968784332,
-0.06722810119390488,
0.2484252154827118,
-0.11822515726089478,
-0.007756668608635664,
0.0666659027338028,
-0.04470950365066528,
0.09354835748672485,
0.029228441417217255,
0.0032765204086899757,
-0.03790387138724327,
0.15794944763183594,
-0.08211082220077515,
-0.004853897262364626,
-0.018370138481259346,
-0.10069665312767029,
0.02412555366754532,
-0.07951381057500839,
-0.10305343568325043,
-0.054128795862197876,
-0.1103539690375328,
-0.0548136904835701,
-0.036421388387680054,
-0.04896525293588638,
-0.007346694357693195,
0.012591359205543995,
0.03835192322731018,
-0.00404205359518528,
0.010966372676193714,
-0.06995417922735214,
-0.01734478771686554,
0.03266121819615364,
-0.04347139969468117,
0.06014706566929817,
0.007560586091130972,
0.04842343553900719,
-0.11648377776145935,
0.035019438713788986,
-0.2980114221572876,
0.1665310263633728,
-0.10013182461261749,
0.03731430321931839,
-0.12982743978500366,
-0.026159735396504402,
-0.010605025105178356,
-0.015022208914160728,
-0.04800448939204216,
0.12183055281639099,
-0.08868225663900375,
-0.04440733790397644,
0.15311948955059052,
-0.1323779970407486,
0.015245389193296432,
0.13479821383953094,
-0.04468206688761711,
0.0010905783856287599,
0.12181422859430313,
0.1621352583169937,
0.2056882679462433,
0.01243507955223322,
-0.024715738371014595,
-0.028195695951581,
-0.055861614644527435,
0.1416206955909729,
0.09552918374538422,
-0.04112335667014122,
-0.012428846210241318,
0.01689876988530159,
-0.051046743988990784,
-0.016117915511131287,
0.0006218920461833477,
-0.013086442835628986,
0.02529028244316578,
0.0028768726624548435,
0.104308120906353,
-0.048361461609601974,
0.03266724944114685,
-0.007230543531477451,
-0.039023153483867645,
0.09437074512243271,
0.08133827894926071,
-0.02481606788933277,
0.005559565499424934,
-0.129927858710289,
0.0070410119369626045,
0.009787446819245815,
-0.009459373541176319,
-0.20783846080303192,
-0.0903208926320076,
0.08559960871934891,
-0.11741425096988678,
0.1310291290283203,
0.027541616931557655,
-0.014218946918845177,
-0.022101424634456635,
-0.06536883860826492,
-0.012287542223930359,
0.003031899454072118,
-0.04769086837768555,
0.00009688583668321371,
-0.1000962033867836,
0.008773843757808208,
-0.033768974244594574,
0.13089345395565033,
-0.14120601117610931,
0.027133727446198463,
0.1639135628938675,
0.11349943280220032,
0.04710279032588005,
-0.023917142301797867,
0.093487448990345,
0.0205271914601326,
0.03200192004442215,
-0.02079218067228794,
0.00040843882015906274,
-0.007055259775370359,
-0.0839390829205513,
0.09318424761295319,
-0.10148827731609344,
-0.07128269970417023,
0.029675785452127457,
0.04450882971286774,
-0.10254970192909241,
0.039753299206495285,
-0.005131205543875694,
-0.025735953822731972,
-0.12894974648952484,
0.006341206841170788,
0.1653142124414444,
0.025538617745041847,
0.10533827543258667,
-0.0685899406671524,
-0.024292994290590286,
-0.0353483110666275,
-0.0038607323076575994,
-0.012155148200690746,
0.0984664335846901,
-0.015849480405449867,
-0.2981550693511963,
0.07465708255767822,
-0.006100220140069723,
0.039911460131406784,
0.15438610315322876,
0.0012964482884854078,
-0.004455607384443283,
-0.008584407158195972,
0.06363195925951004,
-0.03482258692383766,
-0.05227719992399216,
-0.01765739545226097,
0.006202494725584984,
0.030982360243797302,
0.03563752397894859,
-0.011631145142018795,
-0.051732953637838364,
0.05658306926488876,
0.000021104842744534835,
-0.038676582276821136,
-0.10728884488344193,
0.07117640227079391,
0.08045291155576706,
0.09113943576812744,
0.037867795675992966,
0.11956445872783661,
-0.0031380588188767433,
-0.019389592111110687,
-0.13295239210128784,
0.14822565019130707,
-0.09285278618335724,
-0.2855008840560913,
-0.15014050900936127,
0.017080433666706085,
-0.10121849924325943,
-0.019106367602944374,
-0.021715378388762474,
-0.10917367786169052,
-0.07042468339204788,
-0.06529753655195236,
0.09915918856859207,
-0.03880513459444046,
-0.03171840310096741,
-0.06890824437141418,
-0.0029215128161013126,
0.023216741159558296,
-0.07601134479045868,
0.008601286448538303,
0.006439937744289637,
-0.08670835942029953,
0.029160037636756897,
-0.05239780619740486,
0.001264727208763361,
0.1404096633195877,
-0.03502099961042404,
-0.02100999839603901,
-0.04166547209024429,
0.15410228073596954,
-0.07917825132608414,
0.08589966595172882,
0.0836399719119072,
-0.0670962706208229,
0.07259412854909897,
0.15244700014591217,
0.03581231087446213,
-0.02734781987965107,
0.0967462956905365,
0.12492503225803375,
-0.009717321023344994,
-0.29166051745414734,
-0.08544500917196274,
-0.012105610221624374,
0.025371462106704712,
0.047499340027570724,
0.06384884566068649,
0.08697480708360672,
0.031757280230522156,
-0.06763120740652084,
0.01536203920841217,
0.0900593101978302,
0.05040601268410683,
0.11663731932640076,
0.04885111004114151,
0.08709591627120972,
-0.11991573125123978,
-0.01869383454322815,
0.10856316238641739,
0.010642889887094498,
0.14012734591960907,
0.04735646769404411,
0.0968373641371727,
0.08687349408864975,
0.01945585198700428,
0.11434564739465714,
0.011547038331627846,
-0.03089972212910652,
0.03465691953897476,
-0.01801874302327633,
-0.025694729760289192,
0.025797653943300247,
-0.011746161617338657,
0.11265891790390015,
-0.061377156525850296,
0.0237625353038311,
-0.06385748088359833,
0.09383045136928558,
0.214312344789505,
-0.005055722314864397,
-0.1191922202706337,
-0.039529211819171906,
0.07293514162302017,
-0.06194737181067467,
-0.0493316613137722,
-0.012788931839168072,
0.021396422758698463,
-0.14022104442119598,
0.18250691890716553,
-0.011823557317256927,
0.11713548749685287,
-0.165800541639328,
-0.024805400520563126,
-0.022089725360274315,
-0.03142532333731651,
-0.03474852815270424,
0.04906446486711502,
-0.1436343938112259,
0.13960157334804535,
0.03451293334364891,
-0.01875349134206772,
-0.021831927821040154,
0.003174322424456477,
0.021439632400870323,
0.12806594371795654,
0.08318015187978745,
0.007021673489362001,
0.013667385093867779,
-0.06891491264104843,
-0.036808814853429794,
-0.02074875310063362,
0.087021604180336,
-0.11205170303583145,
0.033350732177495956,
-0.021067222580313683,
0.009837555699050426,
-0.05925504118204117,
-0.07515626400709152,
-0.16156649589538574,
-0.07222834974527359,
0.02231692522764206,
-0.0469357855618,
-0.0019014987628906965,
0.009787238202989101,
-0.03931015729904175,
-0.08440589904785156,
0.12281773239374161,
-0.14974118769168854,
-0.07123634964227676,
-0.13321244716644287,
-0.006715184077620506,
0.07079237699508667,
-0.08417030423879623,
0.00601443275809288,
-0.07768697291612625,
0.10119494795799255,
-0.008093024604022503,
-0.07990048080682755,
0.037459831684827805,
-0.0013696816749870777,
-0.13987617194652557,
-0.0013576139463111758,
0.09190385788679123,
0.11657409369945526,
0.014501707628369331,
-0.009959442541003227,
0.006274668499827385,
0.08190211653709412,
-0.12784011662006378,
-0.052203163504600525,
0.08642333745956421,
-0.028998197987675667,
0.14930501580238342,
-0.043684061616659164,
-0.2562909722328186,
-0.12280313670635223,
-0.018206948414444923,
0.046324003487825394,
0.12195558100938797,
-0.0384107269346714,
0.10921534895896912,
0.14171643555164337,
-0.06112120673060417,
-0.16749027371406555,
0.034446533769369125,
0.025288129225373268,
-0.001785891130566597,
0.07010555267333984,
-0.16678835451602936,
0.09259328991174698,
0.055690668523311615,
-0.009667942300438881,
-0.14601199328899384,
-0.162592813372612,
-0.13691408932209015,
0.03797868266701698,
-0.000056222332204924896,
-0.021202387288212776,
-0.113515205681324,
-0.10420285910367966,
-0.05549079179763794,
-0.06985116750001907,
0.162297323346138,
-0.024127988144755363,
0.03874918073415756,
0.01989181898534298,
0.031161583960056305,
0.005785924848169088,
0.01206235121935606,
0.10950586944818497,
0.08277566730976105,
0.06925373524427414,
-0.04836045950651169,
-0.052613064646720886,
0.12389527261257172,
-0.01016561035066843,
0.12311659008264542,
0.045953378081321716,
0.05278484523296356,
-0.17121827602386475,
-0.058496274054050446,
-0.07767488807439804,
-0.0003551691770553589,
-0.019614486023783684,
-0.012887217104434967,
-0.13024991750717163,
0.08848003298044205,
0.02434857003390789,
-0.014152981340885162,
0.1002165675163269,
-0.09074416756629944,
0.017710737884044647,
-0.004459180403500795,
0.14501875638961792,
0.13367493450641632,
-0.09514006972312927,
-0.07581525295972824,
-0.029162533581256866,
0.055993203073740005,
-0.2001083642244339,
0.08208893984556198,
0.07644543051719666,
0.03123546577990055,
0.18675735592842102,
-0.012791551649570465,
-0.09658289700746536,
0.04407132789492607,
0.08577883988618851,
-0.1005435585975647,
-0.1586104929447174,
0.02811165526509285,
0.0008682052721269429,
-0.045654311776161194,
0.04870633780956268,
0.10815645009279251,
0.0007725761970505118,
-0.04784427583217621,
0.04967082664370537,
0.0424896664917469,
-0.03332766517996788,
0.06326644867658615,
-0.00903383083641529,
0.060544174164533615,
-0.07686205953359604,
0.17054425179958344,
0.20097050070762634,
-0.07604319602251053,
-0.04465627670288086,
0.10987559705972672,
-0.14986597001552582,
-0.04271896183490753,
-0.08874938637018204,
0.08874235302209854,
0.014714445918798447,
-0.06327071040868759,
0.001546576269902289,
-0.11245256662368774,
0.025953497737646103,
0.22931577265262604,
0.03260141983628273,
0.07754107564687729,
-0.06315405666828156,
-0.03719410300254822,
0.006608362775295973,
0.018435101956129074,
0.05503750219941139,
0.024585098028182983,
-0.12181439995765686,
0.0228696558624506,
0.10082169622182846,
0.02060801163315773,
-0.023547431454062462,
-0.0642470270395279,
-0.09413829445838928,
-0.0028710716869682074,
-0.013915237039327621,
0.03836842253804207,
-0.04721041023731232,
0.02373517118394375,
-0.029727891087532043,
-0.051696088165044785,
-0.03031446598470211,
-0.021851856261491776,
-0.041523631662130356,
-0.012295428663492203,
-0.01164985541254282,
0.15767401456832886,
-0.1792425513267517,
0.001524482504464686,
0.09915360063314438,
-0.055188510566949844,
0.10622632503509521,
-0.004552349913865328,
-0.05405951663851738,
0.048627134412527084,
-0.11889861524105072,
0.014275521971285343,
0.03141351416707039,
0.006756911985576153,
-0.02611483633518219,
-0.11648926138877869,
0.007169513497501612,
-0.036567527800798416,
0.04546412080526352,
0.04388350248336792,
0.08350183069705963,
-0.07716014236211777,
0.051772717386484146,
-0.06897546350955963,
-0.0682053342461586,
-0.0625060424208641,
0.03518298268318176,
0.10598281025886536,
0.05329137668013573,
0.08164004981517792,
-0.05109057202935219,
-0.0027996134012937546,
-0.11221863329410553,
-0.017720453441143036,
0.008477459661662579,
-0.03767336905002594,
-0.07524148374795914,
-0.02280856855213642,
0.026602808386087418,
-0.02184171974658966,
0.12098929286003113,
-0.0335930772125721,
-0.00811722967773676,
0.07491178065538406,
0.12980090081691742,
-0.14537151157855988,
0.036619026213884354,
0.054795775562524796,
-0.0041527701541781425,
-0.028922058641910553,
0.05395950749516487,
-0.0640583336353302,
-0.10058747231960297,
0.028407858684659004,
0.12187404185533524,
0.1847643107175827,
-0.1055106446146965,
-0.03301233425736427,
0.05915530025959015,
0.013351654633879662,
-0.06553961336612701,
0.02812156453728676,
-0.13399791717529297,
0.0004938435740768909,
-0.06463275849819183,
0.06560371816158295,
0.1459743231534958,
-0.09362123906612396,
0.09493490308523178,
0.04825827106833458,
-0.08007290214300156,
-0.08003472536802292,
-0.17660105228424072,
-0.08151695877313614,
-0.040077172219753265,
0.003009065752848983,
-0.08562590926885605,
0.03795695677399635,
0.15598064661026,
0.07460758835077286,
-0.026935391128063202,
0.1618087738752365,
-0.15294353663921356,
-0.0904473140835762,
0.1046295017004013,
0.01420530118048191,
-0.017804518342018127,
0.042537059634923935,
0.054476696997880936,
0.023917069658637047,
0.12970682978630066,
0.03903220221400261,
0.024615544825792313,
0.018507514148950577,
0.008215394802391529,
-0.07482925057411194,
-0.06390855461359024,
0.020137988030910492,
-0.01457787211984396,
-0.040778741240501404,
0.14274804294109344,
0.04187554121017456,
-0.011895131319761276,
-0.016321860253810883,
0.15362103283405304,
-0.0018049072241410613,
0.002344416920095682,
-0.19054165482521057,
0.1682918220758438,
0.01077274326235056,
0.014487450011074543,
0.061584390699863434,
-0.11070483922958374,
-0.040483083575963974,
0.10568682849407196,
0.10856649279594421,
0.051653459668159485,
0.01704341359436512,
-0.032980285584926605,
0.02988329902291298,
0.06519708037376404,
0.10143031179904938,
-0.055669233202934265,
0.09477993100881577,
-0.0056171659380197525,
0.1000317707657814,
-0.03012046404182911,
-0.04211979731917381,
0.0346665233373642,
0.06406079977750778,
-0.003545250976458192,
0.02604660764336586,
-0.10288766771554947,
0.17236053943634033,
-0.11628220230340958,
-0.16561788320541382,
0.007039907854050398,
-0.06784667819738388,
-0.10181199014186859,
-0.024508411064743996,
-0.06987444311380386,
-0.004881398286670446,
0.0030543578322976828,
0.03785634785890579,
-0.0017513190396130085,
0.04659002646803856,
0.05358278006315231,
-0.06215610355138779,
-0.09597016870975494,
0.08543892949819565,
0.021688099950551987,
0.12123152613639832,
0.03449162095785141,
0.05752523988485336,
0.06897572427988052,
-0.023472899571061134,
-0.04339137300848961,
0.05773046612739563,
-0.010319873690605164,
0.043238334357738495,
-0.040216173976659775,
0.13929317891597748,
0.027433408424258232,
0.09510890394449234,
0.06509196013212204,
-0.115141361951828,
0.05632970854640007,
-0.061647359281778336,
0.006882125046104193,
-0.16502541303634644,
0.1231946274638176,
-0.10622970014810562,
0.08068542182445526,
0.17070026695728302,
0.0015640108613297343,
0.00837548729032278,
-0.027967864647507668,
0.01220750156790018,
-0.02886342629790306,
0.0411955863237381,
-0.029178863391280174,
-0.15280671417713165,
0.04380108043551445,
-0.09065862745046616,
0.09080002456903458,
-0.22536788880825043,
0.005343299824744463,
-0.017368989065289497,
0.030136747285723686,
-0.033304739743471146,
0.12928399443626404,
-0.07162775099277496,
0.021449729800224304,
-0.00424589030444622,
-0.3281933069229126,
0.03421168774366379,
0.09646645188331604,
-0.11437516659498215,
-0.020090777426958084
] |
null | null |
transformers
|
# WangchanBERTa base model: `wangchanberta-base-att-spm-uncased`
<br>
Pretrained RoBERTa BASE model on assorted Thai texts (78.5 GB).
The script and documentation can be found at [this repository](https://github.com/vistec-AI/thai2transformers).
<br>
## Model description
<br>
The architecture of the pretrained model is based on RoBERTa [[Liu et al., 2019]](https://arxiv.org/abs/1907.11692).
<br>
## Intended uses & limitations
<br>
You can use the pretrained model for masked language modeling (i.e. predicting a mask token in the input text). In addition, we also provide finetuned models for multiclass/multilabel text classification and token classification task.
<br>
**Multiclass text classification**
- `wisesight_sentiment`
4-class text classification task (`positive`, `neutral`, `negative`, and `question`) based on social media posts and tweets.
- `wongnai_reivews`
Users' review rating classification task (scale is ranging from 1 to 5)
- `generated_reviews_enth` : (`review_star` as label)
Generated users' review rating classification task (scale is ranging from 1 to 5).
**Multilabel text classification**
- `prachathai67k`
Thai topic classification with 12 labels based on news article corpus from prachathai.com. The detail is described in this [page](https://huggingface.co/datasets/prachathai67k).
**Token classification**
- `thainer`
Named-entity recognition tagging with 13 named-entities as described in this [page](https://huggingface.co/datasets/thainer).
- `lst20` : NER NER and POS tagging
Named-entity recognition tagging with 10 named-entities and Part-of-Speech tagging with 16 tags as described in this [page](https://huggingface.co/datasets/lst20).
<br>
## How to use
<br>
The getting started notebook of WangchanBERTa model can be found at this [Colab notebook](https://colab.research.google.com/drive/1Kbk6sBspZLwcnOE61adAQo30xxqOQ9ko)
<br>
## Training data
`wangchanberta-base-att-spm-uncased` model was pretrained on assorted Thai text dataset. The total size of uncompressed text is 78.5GB.
### Preprocessing
Texts are preprocessed with the following rules:
- Replace HTML forms of characters with the actual characters such asnbsp;with a space and \\\\\\\\\\\\\\\\<br /> with a line break [[Howard and Ruder, 2018]](https://arxiv.org/abs/1801.06146).
- Remove empty brackets ((), {}, and []) than sometimes come up as a result of text extraction such as from Wikipedia.
- Replace line breaks with spaces.
- Replace more than one spaces with a single space
- Remove more than 3 repetitive characters such as ดีมากกก to ดีมาก [Howard and Ruder, 2018]](https://arxiv.org/abs/1801.06146).
- Word-level tokenization using [[Phatthiyaphaibun et al., 2020]](https://zenodo.org/record/4319685#.YA4xEGQzaDU) ’s `newmm` dictionary-based maximal matching tokenizer.
- Replace repetitive words; this is done post-tokenization unlike [[Howard and Ruder, 2018]](https://arxiv.org/abs/1801.06146). since there is no delimitation by space in Thai as in English.
- Replace spaces with <\\\\\\\\\\\\\\\\_>. The SentencePiece tokenizer combines the spaces with other tokens. Since spaces serve as punctuation in Thai such as sentence boundaries similar to periods in English, combining it with other tokens will omit an important feature for tasks such as word tokenization and sentence breaking. Therefore, we opt to explicitly mark spaces with <\\\\\\\\\\\\\\\\_>.
<br>
Regarding the vocabulary, we use SentencePiece [[Kudo, 2018]](https://arxiv.org/abs/1808.06226) to train SentencePiece unigram model.
The tokenizer has a vocabulary size of 25,000 subwords, trained on 15M sentences sampled from the training set.
The length of each sequence is limited up to 416 subword tokens.
Regarding the masking procedure, for each sequence, we sampled 15% of the tokens and replace them with<mask>token.Out of the 15%, 80% is replaced with a<mask>token, 10% is left unchanged and 10% is replaced with a random token.
<br>
**Train/Val/Test splits**
After preprocessing and deduplication, we have a training set of 381,034,638 unique, mostly Thai sentences with sequence length of 5 to 300 words (78.5GB). The training set has a total of 16,957,775,412 words as tokenized by dictionary-based maximal matching [[Phatthiyaphaibun et al., 2020]](https://zenodo.org/record/4319685#.YA4xEGQzaDU), 8,680,485,067 subwords as tokenized by SentencePiece tokenizer, and 53,035,823,287 characters.
<br>
**Pretraining**
The model was trained on 8 V100 GPUs for 500,000 steps with the batch size of 4,096 (32 sequences per device with 16 accumulation steps) and a sequence length of 416 tokens. The optimizer we used is Adam with the learning rate of $3e-4$, $\\\\\\\\\\\\\\\\beta_1 = 0.9$, $\\\\\\\\\\\\\\\\beta_2= 0.999$ and $\\\\\\\\\\\\\\\\epsilon = 1e-6$. The learning rate is warmed up for the first 24,000 steps and linearly decayed to zero. The model checkpoint with minimum validation loss will be selected as the best model checkpoint.
As of Sun 24 Jan 2021, we release the model from the checkpoint @360,000 steps due to the model pretraining has not yet been completed
<br>
**BibTeX entry and citation info**
```
@misc{lowphansirikul2021wangchanberta,
title={WangchanBERTa: Pretraining transformer-based Thai Language Models},
author={Lalita Lowphansirikul and Charin Polpanumas and Nawat Jantrakulchai and Sarana Nutanong},
year={2021},
eprint={2101.09635},
archivePrefix={arXiv},
primaryClass={cs.CL}
}
```
|
{"language": "th", "widget": [{"text": "\u0e1c\u0e39\u0e49\u0e43\u0e0a\u0e49\u0e07\u0e32\u0e19\u0e17\u0e48\u0e32\u0e2d\u0e32\u0e01\u0e32\u0e28\u0e22\u0e32\u0e19\u0e19\u0e32\u0e19\u0e32\u0e0a\u0e32\u0e15\u0e34<mask>\u0e21\u0e35\u0e01\u0e27\u0e48\u0e32\u0e2a\u0e32\u0e21\u0e25\u0e49\u0e32\u0e19\u0e04\u0e19<pad>"}]}
|
fill-mask
|
airesearch/wangchanberta-base-att-spm-uncased
|
[
"transformers",
"pytorch",
"safetensors",
"camembert",
"fill-mask",
"th",
"arxiv:1907.11692",
"arxiv:1801.06146",
"arxiv:1808.06226",
"arxiv:2101.09635",
"autotrain_compatible",
"endpoints_compatible",
"has_space",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[
"1907.11692",
"1801.06146",
"1808.06226",
"2101.09635"
] |
[
"th"
] |
TAGS
#transformers #pytorch #safetensors #camembert #fill-mask #th #arxiv-1907.11692 #arxiv-1801.06146 #arxiv-1808.06226 #arxiv-2101.09635 #autotrain_compatible #endpoints_compatible #has_space #region-us
|
# WangchanBERTa base model: 'wangchanberta-base-att-spm-uncased'
<br>
Pretrained RoBERTa BASE model on assorted Thai texts (78.5 GB).
The script and documentation can be found at this repository.
<br>
## Model description
<br>
The architecture of the pretrained model is based on RoBERTa [[Liu et al., 2019]](URL
<br>
## Intended uses & limitations
<br>
You can use the pretrained model for masked language modeling (i.e. predicting a mask token in the input text). In addition, we also provide finetuned models for multiclass/multilabel text classification and token classification task.
<br>
Multiclass text classification
- 'wisesight_sentiment'
4-class text classification task ('positive', 'neutral', 'negative', and 'question') based on social media posts and tweets.
- 'wongnai_reivews'
Users' review rating classification task (scale is ranging from 1 to 5)
- 'generated_reviews_enth' : ('review_star' as label)
Generated users' review rating classification task (scale is ranging from 1 to 5).
Multilabel text classification
- 'prachathai67k'
Thai topic classification with 12 labels based on news article corpus from URL. The detail is described in this page.
Token classification
- 'thainer'
Named-entity recognition tagging with 13 named-entities as described in this page.
- 'lst20' : NER NER and POS tagging
Named-entity recognition tagging with 10 named-entities and Part-of-Speech tagging with 16 tags as described in this page.
<br>
## How to use
<br>
The getting started notebook of WangchanBERTa model can be found at this Colab notebook
<br>
## Training data
'wangchanberta-base-att-spm-uncased' model was pretrained on assorted Thai text dataset. The total size of uncompressed text is 78.5GB.
### Preprocessing
Texts are preprocessed with the following rules:
- Replace HTML forms of characters with the actual characters such asnbsp;with a space and \\\\\\\\\\\\\\\\<br /> with a line break [[Howard and Ruder, 2018]](URL
- Remove empty brackets ((), {}, and []) than sometimes come up as a result of text extraction such as from Wikipedia.
- Replace line breaks with spaces.
- Replace more than one spaces with a single space
- Remove more than 3 repetitive characters such as ดีมากกก to ดีมาก [Howard and Ruder, 2018]](URL
- Word-level tokenization using [[Phatthiyaphaibun et al., 2020]](URL ’s 'newmm' dictionary-based maximal matching tokenizer.
- Replace repetitive words; this is done post-tokenization unlike [[Howard and Ruder, 2018]](URL since there is no delimitation by space in Thai as in English.
- Replace spaces with <\\\\\\\\\\\\\\\\_>. The SentencePiece tokenizer combines the spaces with other tokens. Since spaces serve as punctuation in Thai such as sentence boundaries similar to periods in English, combining it with other tokens will omit an important feature for tasks such as word tokenization and sentence breaking. Therefore, we opt to explicitly mark spaces with <\\\\\\\\\\\\\\\\_>.
<br>
Regarding the vocabulary, we use SentencePiece [[Kudo, 2018]](URL to train SentencePiece unigram model.
The tokenizer has a vocabulary size of 25,000 subwords, trained on 15M sentences sampled from the training set.
The length of each sequence is limited up to 416 subword tokens.
Regarding the masking procedure, for each sequence, we sampled 15% of the tokens and replace them with<mask>token.Out of the 15%, 80% is replaced with a<mask>token, 10% is left unchanged and 10% is replaced with a random token.
<br>
Train/Val/Test splits
After preprocessing and deduplication, we have a training set of 381,034,638 unique, mostly Thai sentences with sequence length of 5 to 300 words (78.5GB). The training set has a total of 16,957,775,412 words as tokenized by dictionary-based maximal matching [[Phatthiyaphaibun et al., 2020]](URL 8,680,485,067 subwords as tokenized by SentencePiece tokenizer, and 53,035,823,287 characters.
<br>
Pretraining
The model was trained on 8 V100 GPUs for 500,000 steps with the batch size of 4,096 (32 sequences per device with 16 accumulation steps) and a sequence length of 416 tokens. The optimizer we used is Adam with the learning rate of $3e-4$, $\\\\\\\\\\\\\\\\beta_1 = 0.9$, $\\\\\\\\\\\\\\\\beta_2= 0.999$ and $\\\\\\\\\\\\\\\\epsilon = 1e-6$. The learning rate is warmed up for the first 24,000 steps and linearly decayed to zero. The model checkpoint with minimum validation loss will be selected as the best model checkpoint.
As of Sun 24 Jan 2021, we release the model from the checkpoint @360,000 steps due to the model pretraining has not yet been completed
<br>
BibTeX entry and citation info
|
[
"# WangchanBERTa base model: 'wangchanberta-base-att-spm-uncased'\n\n<br>\n\nPretrained RoBERTa BASE model on assorted Thai texts (78.5 GB).\nThe script and documentation can be found at this repository.\n<br>",
"## Model description\n\n<br>\n\nThe architecture of the pretrained model is based on RoBERTa [[Liu et al., 2019]](URL \n\n<br>",
"## Intended uses & limitations\n\n<br>\n\nYou can use the pretrained model for masked language modeling (i.e. predicting a mask token in the input text). In addition, we also provide finetuned models for multiclass/multilabel text classification and token classification task.\n\n<br>\n\nMulticlass text classification\n\n\n- 'wisesight_sentiment' \n \n 4-class text classification task ('positive', 'neutral', 'negative', and 'question') based on social media posts and tweets.\n\n- 'wongnai_reivews' \n\n Users' review rating classification task (scale is ranging from 1 to 5)\n\n- 'generated_reviews_enth' : ('review_star' as label)\n\n Generated users' review rating classification task (scale is ranging from 1 to 5).\n\nMultilabel text classification\n\n- 'prachathai67k'\n\n Thai topic classification with 12 labels based on news article corpus from URL. The detail is described in this page.\n \n\n\n\nToken classification\n\n- 'thainer' \n\n Named-entity recognition tagging with 13 named-entities as described in this page.\n\n- 'lst20' : NER NER and POS tagging\n \n Named-entity recognition tagging with 10 named-entities and Part-of-Speech tagging with 16 tags as described in this page.\n\n<br>",
"## How to use\n\n<br>\n\nThe getting started notebook of WangchanBERTa model can be found at this Colab notebook\n\n<br>",
"## Training data\n\n'wangchanberta-base-att-spm-uncased' model was pretrained on assorted Thai text dataset. The total size of uncompressed text is 78.5GB.",
"### Preprocessing\n\nTexts are preprocessed with the following rules:\n\n- Replace HTML forms of characters with the actual characters such asnbsp;with a space and \\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\<br /> with a line break [[Howard and Ruder, 2018]](URL\n- Remove empty brackets ((), {}, and []) than sometimes come up as a result of text extraction such as from Wikipedia.\n- Replace line breaks with spaces.\n- Replace more than one spaces with a single space\n- Remove more than 3 repetitive characters such as ดีมากกก to ดีมาก [Howard and Ruder, 2018]](URL\n- Word-level tokenization using [[Phatthiyaphaibun et al., 2020]](URL ’s 'newmm' dictionary-based maximal matching tokenizer.\n- Replace repetitive words; this is done post-tokenization unlike [[Howard and Ruder, 2018]](URL since there is no delimitation by space in Thai as in English.\n- Replace spaces with <\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\_>. The SentencePiece tokenizer combines the spaces with other tokens. Since spaces serve as punctuation in Thai such as sentence boundaries similar to periods in English, combining it with other tokens will omit an important feature for tasks such as word tokenization and sentence breaking. Therefore, we opt to explicitly mark spaces with <\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\_>.\n\n<br>\n\n\nRegarding the vocabulary, we use SentencePiece [[Kudo, 2018]](URL to train SentencePiece unigram model.\nThe tokenizer has a vocabulary size of 25,000 subwords, trained on 15M sentences sampled from the training set.\n\n\nThe length of each sequence is limited up to 416 subword tokens.\n\nRegarding the masking procedure, for each sequence, we sampled 15% of the tokens and replace them with<mask>token.Out of the 15%, 80% is replaced with a<mask>token, 10% is left unchanged and 10% is replaced with a random token.\n\n<br>\n\nTrain/Val/Test splits\n\nAfter preprocessing and deduplication, we have a training set of 381,034,638 unique, mostly Thai sentences with sequence length of 5 to 300 words (78.5GB). The training set has a total of 16,957,775,412 words as tokenized by dictionary-based maximal matching [[Phatthiyaphaibun et al., 2020]](URL 8,680,485,067 subwords as tokenized by SentencePiece tokenizer, and 53,035,823,287 characters.\n<br>\n\nPretraining\n\nThe model was trained on 8 V100 GPUs for 500,000 steps with the batch size of 4,096 (32 sequences per device with 16 accumulation steps) and a sequence length of 416 tokens. The optimizer we used is Adam with the learning rate of $3e-4$, $\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\beta_1 = 0.9$, $\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\beta_2= 0.999$ and $\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\epsilon = 1e-6$. The learning rate is warmed up for the first 24,000 steps and linearly decayed to zero. The model checkpoint with minimum validation loss will be selected as the best model checkpoint. \n\nAs of Sun 24 Jan 2021, we release the model from the checkpoint @360,000 steps due to the model pretraining has not yet been completed\n\n<br>\n\nBibTeX entry and citation info"
] |
[
"TAGS\n#transformers #pytorch #safetensors #camembert #fill-mask #th #arxiv-1907.11692 #arxiv-1801.06146 #arxiv-1808.06226 #arxiv-2101.09635 #autotrain_compatible #endpoints_compatible #has_space #region-us \n",
"# WangchanBERTa base model: 'wangchanberta-base-att-spm-uncased'\n\n<br>\n\nPretrained RoBERTa BASE model on assorted Thai texts (78.5 GB).\nThe script and documentation can be found at this repository.\n<br>",
"## Model description\n\n<br>\n\nThe architecture of the pretrained model is based on RoBERTa [[Liu et al., 2019]](URL \n\n<br>",
"## Intended uses & limitations\n\n<br>\n\nYou can use the pretrained model for masked language modeling (i.e. predicting a mask token in the input text). In addition, we also provide finetuned models for multiclass/multilabel text classification and token classification task.\n\n<br>\n\nMulticlass text classification\n\n\n- 'wisesight_sentiment' \n \n 4-class text classification task ('positive', 'neutral', 'negative', and 'question') based on social media posts and tweets.\n\n- 'wongnai_reivews' \n\n Users' review rating classification task (scale is ranging from 1 to 5)\n\n- 'generated_reviews_enth' : ('review_star' as label)\n\n Generated users' review rating classification task (scale is ranging from 1 to 5).\n\nMultilabel text classification\n\n- 'prachathai67k'\n\n Thai topic classification with 12 labels based on news article corpus from URL. The detail is described in this page.\n \n\n\n\nToken classification\n\n- 'thainer' \n\n Named-entity recognition tagging with 13 named-entities as described in this page.\n\n- 'lst20' : NER NER and POS tagging\n \n Named-entity recognition tagging with 10 named-entities and Part-of-Speech tagging with 16 tags as described in this page.\n\n<br>",
"## How to use\n\n<br>\n\nThe getting started notebook of WangchanBERTa model can be found at this Colab notebook\n\n<br>",
"## Training data\n\n'wangchanberta-base-att-spm-uncased' model was pretrained on assorted Thai text dataset. The total size of uncompressed text is 78.5GB.",
"### Preprocessing\n\nTexts are preprocessed with the following rules:\n\n- Replace HTML forms of characters with the actual characters such asnbsp;with a space and \\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\<br /> with a line break [[Howard and Ruder, 2018]](URL\n- Remove empty brackets ((), {}, and []) than sometimes come up as a result of text extraction such as from Wikipedia.\n- Replace line breaks with spaces.\n- Replace more than one spaces with a single space\n- Remove more than 3 repetitive characters such as ดีมากกก to ดีมาก [Howard and Ruder, 2018]](URL\n- Word-level tokenization using [[Phatthiyaphaibun et al., 2020]](URL ’s 'newmm' dictionary-based maximal matching tokenizer.\n- Replace repetitive words; this is done post-tokenization unlike [[Howard and Ruder, 2018]](URL since there is no delimitation by space in Thai as in English.\n- Replace spaces with <\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\_>. The SentencePiece tokenizer combines the spaces with other tokens. Since spaces serve as punctuation in Thai such as sentence boundaries similar to periods in English, combining it with other tokens will omit an important feature for tasks such as word tokenization and sentence breaking. Therefore, we opt to explicitly mark spaces with <\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\_>.\n\n<br>\n\n\nRegarding the vocabulary, we use SentencePiece [[Kudo, 2018]](URL to train SentencePiece unigram model.\nThe tokenizer has a vocabulary size of 25,000 subwords, trained on 15M sentences sampled from the training set.\n\n\nThe length of each sequence is limited up to 416 subword tokens.\n\nRegarding the masking procedure, for each sequence, we sampled 15% of the tokens and replace them with<mask>token.Out of the 15%, 80% is replaced with a<mask>token, 10% is left unchanged and 10% is replaced with a random token.\n\n<br>\n\nTrain/Val/Test splits\n\nAfter preprocessing and deduplication, we have a training set of 381,034,638 unique, mostly Thai sentences with sequence length of 5 to 300 words (78.5GB). The training set has a total of 16,957,775,412 words as tokenized by dictionary-based maximal matching [[Phatthiyaphaibun et al., 2020]](URL 8,680,485,067 subwords as tokenized by SentencePiece tokenizer, and 53,035,823,287 characters.\n<br>\n\nPretraining\n\nThe model was trained on 8 V100 GPUs for 500,000 steps with the batch size of 4,096 (32 sequences per device with 16 accumulation steps) and a sequence length of 416 tokens. The optimizer we used is Adam with the learning rate of $3e-4$, $\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\beta_1 = 0.9$, $\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\beta_2= 0.999$ and $\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\epsilon = 1e-6$. The learning rate is warmed up for the first 24,000 steps and linearly decayed to zero. The model checkpoint with minimum validation loss will be selected as the best model checkpoint. \n\nAs of Sun 24 Jan 2021, we release the model from the checkpoint @360,000 steps due to the model pretraining has not yet been completed\n\n<br>\n\nBibTeX entry and citation info"
] |
[
84,
64,
35,
308,
28,
48,
791
] |
[
"passage: TAGS\n#transformers #pytorch #safetensors #camembert #fill-mask #th #arxiv-1907.11692 #arxiv-1801.06146 #arxiv-1808.06226 #arxiv-2101.09635 #autotrain_compatible #endpoints_compatible #has_space #region-us \n# WangchanBERTa base model: 'wangchanberta-base-att-spm-uncased'\n\n<br>\n\nPretrained RoBERTa BASE model on assorted Thai texts (78.5 GB).\nThe script and documentation can be found at this repository.\n<br>## Model description\n\n<br>\n\nThe architecture of the pretrained model is based on RoBERTa [[Liu et al., 2019]](URL \n\n<br>## Intended uses & limitations\n\n<br>\n\nYou can use the pretrained model for masked language modeling (i.e. predicting a mask token in the input text). In addition, we also provide finetuned models for multiclass/multilabel text classification and token classification task.\n\n<br>\n\nMulticlass text classification\n\n\n- 'wisesight_sentiment' \n \n 4-class text classification task ('positive', 'neutral', 'negative', and 'question') based on social media posts and tweets.\n\n- 'wongnai_reivews' \n\n Users' review rating classification task (scale is ranging from 1 to 5)\n\n- 'generated_reviews_enth' : ('review_star' as label)\n\n Generated users' review rating classification task (scale is ranging from 1 to 5).\n\nMultilabel text classification\n\n- 'prachathai67k'\n\n Thai topic classification with 12 labels based on news article corpus from URL. The detail is described in this page.\n \n\n\n\nToken classification\n\n- 'thainer' \n\n Named-entity recognition tagging with 13 named-entities as described in this page.\n\n- 'lst20' : NER NER and POS tagging\n \n Named-entity recognition tagging with 10 named-entities and Part-of-Speech tagging with 16 tags as described in this page.\n\n<br>",
"passage: ## How to use\n\n<br>\n\nThe getting started notebook of WangchanBERTa model can be found at this Colab notebook\n\n<br>## Training data\n\n'wangchanberta-base-att-spm-uncased' model was pretrained on assorted Thai text dataset. The total size of uncompressed text is 78.5GB."
] |
[
-0.030044732615351677,
0.015154910273849964,
-0.00017992849461734295,
0.05405587702989578,
0.11194481700658798,
-0.018401794135570526,
0.08894066512584686,
0.08212566375732422,
-0.037844013422727585,
0.07239190489053726,
0.02490588277578354,
-0.08873388171195984,
0.07263679802417755,
0.18112435936927795,
0.05375161021947861,
-0.2268577367067337,
0.04350130259990692,
-0.0007563028484582901,
-0.005496703088283539,
0.06801608949899673,
0.05859101191163063,
-0.08859046548604965,
0.0897766724228859,
0.028435466811060905,
-0.0790763720870018,
-0.02655988559126854,
-0.08635416626930237,
-0.053182777017354965,
-0.01622907631099224,
-0.015142936259508133,
0.08030463755130768,
-0.020736102014780045,
0.06338470429182053,
-0.06460940092802048,
0.016818519681692123,
0.0019933097064495087,
0.02587992139160633,
0.04305391386151314,
0.02533840760588646,
0.06733256578445435,
0.15599258244037628,
-0.07957224547863007,
0.06594385951757431,
0.036102332174777985,
-0.07463892549276352,
-0.029575351625680923,
-0.05357309803366661,
0.11876486241817474,
0.08409759402275085,
0.07668446004390717,
-0.07111997902393341,
0.08579204231500626,
-0.04207488149404526,
0.03493238240480423,
0.048596419394016266,
-0.21621860563755035,
-0.04480283334851265,
0.07546406984329224,
-0.007700627204030752,
0.060479287058115005,
-0.07241053879261017,
-0.00337683386169374,
0.042195335030555725,
0.013778646476566792,
0.031386762857437134,
-0.013208143413066864,
-0.08616980910301208,
-0.0625719428062439,
-0.09169763326644897,
0.04188986122608185,
0.2452629804611206,
0.03675985708832741,
-0.0389326848089695,
-0.1135808676481247,
-0.027673371136188507,
-0.007146687246859074,
-0.021191075444221497,
0.028338715434074402,
-0.014370245859026909,
0.00900120846927166,
0.020086901262402534,
-0.07443996518850327,
-0.08035746216773987,
0.006239862181246281,
-0.025430645793676376,
-0.0018128082156181335,
-0.002880427986383438,
0.01373338233679533,
-0.01705445908010006,
0.03704890236258507,
-0.08406943082809448,
-0.07303915172815323,
-0.013880373910069466,
-0.05498263239860535,
-0.04386672377586365,
0.0037421099841594696,
0.0048271287232637405,
-0.09864938259124756,
0.00430746516212821,
0.06126175448298454,
0.019377263262867928,
0.06857999414205551,
-0.0344049334526062,
0.025062279775738716,
0.04254448041319847,
0.1391378939151764,
-0.06947720050811768,
0.0002469504252076149,
0.009210235439240932,
0.02846122905611992,
0.03769009932875633,
-0.034052394330501556,
-0.07719114422798157,
-0.015029298141598701,
0.03254622593522072,
-0.03067319095134735,
-0.040056489408016205,
0.0802585780620575,
-0.0024694986641407013,
-0.05692760646343231,
0.20353162288665771,
-0.10231691598892212,
-0.040799330919981,
0.015207745134830475,
-0.03835226222872734,
-0.006755545735359192,
0.09097333997488022,
0.0038448814302682877,
-0.025946270674467087,
0.019586922600865364,
-0.06326227635145187,
-0.01136010605841875,
-0.046863853931427,
-0.07715506851673126,
-0.0015537431463599205,
-0.06537924706935883,
-0.0490284189581871,
-0.07682452350854874,
-0.10671646893024445,
-0.02060938999056816,
0.02337542548775673,
-0.031910140067338943,
-0.0001707654446363449,
-0.047050170600414276,
0.03503032028675079,
-0.011933788657188416,
0.025472920387983322,
0.009511047974228859,
0.015835218131542206,
0.043719831854104996,
0.01860358938574791,
0.1045396476984024,
-0.05941244214773178,
0.03807399421930313,
-0.1049138680100441,
0.023174390196800232,
-0.25454747676849365,
0.07779572159051895,
-0.031140541657805443,
0.1016746312379837,
-0.04762908071279526,
-0.016114281490445137,
-0.0885377898812294,
0.001611834391951561,
0.03934543579816818,
0.0849483385682106,
-0.08064159750938416,
-0.030686520040035248,
0.18159496784210205,
-0.12656733393669128,
-0.10878628492355347,
0.12206138670444489,
-0.008547693490982056,
0.17739762365818024,
0.0486379936337471,
0.22046780586242676,
0.04313180223107338,
-0.008375382050871849,
-0.05292024090886116,
0.03592808172106743,
0.015457559376955032,
0.036999113857746124,
0.029101639986038208,
-0.010259905830025673,
-0.044834110885858536,
0.04902277886867523,
-0.03598400577902794,
0.018752582371234894,
-0.01427281741052866,
-0.04546822980046272,
-0.01604212075471878,
-0.10011237859725952,
-0.010303810238838196,
-0.027552545070648193,
0.0707850530743599,
-0.013025203719735146,
-0.039639443159103394,
0.06127536669373512,
0.06705901771783829,
-0.07709936797618866,
-0.0009085442870855331,
-0.07066841423511505,
0.004263928160071373,
-0.022437870502471924,
-0.005560806952416897,
-0.12193455547094345,
0.011732757091522217,
0.042761851102113724,
0.01438695564866066,
0.08915847539901733,
0.006490384228527546,
0.008613884449005127,
0.0361519381403923,
-0.05371594801545143,
0.044029347598552704,
0.0373736210167408,
-0.01742265745997429,
-0.041157305240631104,
-0.039150092750787735,
0.03024451993405819,
-0.03957246616482735,
0.16662248969078064,
-0.14208351075649261,
0.05488051101565361,
-0.006638258695602417,
0.029205622151494026,
0.0248744897544384,
-0.03172028809785843,
0.11329582333564758,
0.01061059907078743,
0.026496145874261856,
-0.03185856342315674,
0.01784815825521946,
0.049855198711156845,
-0.10333488881587982,
0.022736111655831337,
-0.13592278957366943,
-0.06387340277433395,
0.11578461527824402,
0.02315540239214897,
-0.09216392040252686,
-0.03725803643465042,
-0.0037733600474894047,
-0.05711207538843155,
-0.07513844966888428,
0.01131725125014782,
0.06545981764793396,
-0.032783493399620056,
0.13107533752918243,
-0.06036469340324402,
0.012661607004702091,
-0.004426954314112663,
-0.02127993293106556,
0.022611238062381744,
0.054431892931461334,
0.05577295273542404,
-0.210978701710701,
0.05164004862308502,
0.04497682675719261,
-0.04496874660253525,
0.12410677969455719,
-0.005634218454360962,
-0.03595925495028496,
0.0025410852394998074,
0.03969937562942505,
0.014406570233404636,
0.004767094738781452,
-0.14877653121948242,
-0.048005323857069016,
0.023034997284412384,
-0.003762694075703621,
0.04043664410710335,
-0.04419402778148651,
-0.0019619371742010117,
-0.007481830660253763,
-0.010587197728455067,
-0.05124998092651367,
0.0237565990537405,
-0.03516102209687233,
0.03240921348333359,
0.04736378788948059,
0.1148582249879837,
-0.00753164105117321,
0.013892078772187233,
-0.10779762268066406,
0.09901055693626404,
-0.06481973826885223,
-0.2885730266571045,
-0.08435997366905212,
-0.010380953550338745,
0.01674686372280121,
0.023867568001151085,
0.002685905434191227,
-0.10207152366638184,
-0.059647250920534134,
-0.082008495926857,
0.04756850749254227,
-0.013303475454449654,
-0.005112878978252411,
0.033753652125597,
0.031627144664525986,
0.032144345343112946,
-0.06486032158136368,
0.020052015781402588,
-0.013048302382230759,
-0.06670582294464111,
0.03838934004306793,
-0.12860941886901855,
0.045744411647319794,
0.09505556523799896,
-0.048805467784404755,
0.010608342476189137,
0.005177091807126999,
0.14396290481090546,
-0.06887418031692505,
0.08716325461864471,
0.05248980596661568,
-0.04284938424825668,
0.02756580151617527,
0.10915835201740265,
0.010160230100154877,
-0.029366403818130493,
0.08582152426242828,
0.015818173065781593,
-0.06440696120262146,
-0.19620972871780396,
-0.05059719830751419,
-0.05842658504843712,
0.06597969681024551,
0.08879665285348892,
0.034434184432029724,
0.04327429085969925,
0.0870891660451889,
-0.02429916523396969,
0.06838265061378479,
0.022642681375145912,
0.054171331226825714,
0.0449582077562809,
0.013894504867494106,
0.07657937705516815,
-0.08413535356521606,
-0.0029216306284070015,
0.056913360953330994,
-0.037126801908016205,
0.14854252338409424,
-0.01441890187561512,
0.07973901927471161,
0.032910801470279694,
0.051479876041412354,
0.07175879180431366,
0.08327822387218475,
-0.09571453928947449,
-0.009316058829426765,
-0.02627759799361229,
-0.06694165617227554,
-0.020948376506567,
0.03991520404815674,
-0.011796178296208382,
-0.019132081419229507,
-0.013083122670650482,
0.05901119112968445,
0.03235281631350517,
0.11119046807289124,
0.10392387211322784,
-0.056919727474451065,
-0.07515427470207214,
0.012991786934435368,
0.030293963849544525,
-0.07997418940067291,
0.03200444579124451,
0.07479392737150192,
-0.07646751403808594,
0.06525488197803497,
0.020030464977025986,
0.10333357751369476,
-0.07558837532997131,
-0.004849132616072893,
-0.022034283727407455,
-0.010337287560105324,
-0.01853286102414131,
0.07795523852109909,
-0.10122539103031158,
0.12114012241363525,
0.024919278919696808,
0.014904354698956013,
-0.019411083310842514,
-0.013383112847805023,
0.017430931329727173,
0.039705563336610794,
0.07375826686620712,
0.002886060159653425,
-0.003040062263607979,
-0.0433824360370636,
-0.12152865529060364,
0.007295601069927216,
0.045032795518636703,
-0.007062878459692001,
0.015076451934874058,
-0.0006458284333348274,
0.006517630070447922,
0.008393758907914162,
-0.049516357481479645,
-0.14755740761756897,
-0.14951831102371216,
-0.016395771875977516,
-0.031157221645116806,
-0.031027790158987045,
0.0011405604891479015,
-0.04651816934347153,
-0.021406160667538643,
0.18489794433116913,
0.016227804124355316,
-0.06901580095291138,
-0.04411941021680832,
0.05038321390748024,
0.07239538431167603,
-0.03383069112896919,
0.04500877857208252,
-0.042690709233284,
0.03403688222169876,
-0.018093539401888847,
-0.018176265060901642,
0.06976255774497986,
-0.023121599107980728,
-0.05176931619644165,
0.014775548130273819,
0.021600957959890366,
0.10108757019042969,
0.01893562823534012,
0.0013293987140059471,
0.011049633845686913,
0.01642509736120701,
-0.09255009889602661,
-0.031690239906311035,
0.0800589919090271,
0.008320883847773075,
0.08491133898496628,
-0.07201048731803894,
-0.10667692869901657,
-0.07034239917993546,
-0.06036767363548279,
0.043816838413476944,
0.043659910559654236,
-0.03346303105354309,
0.09332043677568436,
0.19793792068958282,
-0.03229266405105591,
-0.18062525987625122,
-0.023056652396917343,
0.03296884149312973,
0.02767665684223175,
-0.001268584281206131,
-0.15702947974205017,
0.12365524470806122,
0.02679213508963585,
-0.0006379410624504089,
-0.008327294141054153,
-0.15081003308296204,
-0.09831357002258301,
0.08188219368457794,
-0.009605555795133114,
0.051243722438812256,
-0.11998757719993591,
-0.05558817461133003,
0.004063671454787254,
-0.07181832194328308,
0.06744774430990219,
-0.0954873189330101,
0.0799577534198761,
0.017643224447965622,
-0.0025396980345249176,
0.022410519421100616,
-0.017526797950267792,
0.1130949929356575,
-0.03159722685813904,
-0.03578237444162369,
-0.08959129452705383,
-0.03517638146877289,
0.13780412077903748,
-0.02029457688331604,
0.17991434037685394,
-0.017038140445947647,
0.02825000323355198,
-0.09959723055362701,
-0.06097017973661423,
-0.025108814239501953,
-0.016626548022031784,
-0.030052822083234787,
-0.05271465331315994,
-0.062208399176597595,
0.056753866374492645,
0.0041786557994782925,
0.016860481351614,
-0.0030422527343034744,
-0.044570114463567734,
0.013155180960893631,
0.07775378227233887,
0.07330571860074997,
0.040307555347681046,
0.06149936467409134,
-0.013316860422492027,
-0.04819680377840996,
0.05474599450826645,
-0.18872958421707153,
0.006598535925149918,
0.03195321559906006,
0.02921842597424984,
0.08268767595291138,
0.003179325722157955,
-0.07498744875192642,
0.03638032451272011,
0.0768103301525116,
-0.09153245389461517,
-0.049088004976511,
-0.01255841925740242,
0.05444609373807907,
-0.10229244828224182,
-0.03427423909306526,
0.03638116270303726,
-0.05955922231078148,
-0.013279035687446594,
0.026988156139850616,
0.022804241627454758,
-0.050301387906074524,
0.05307629704475403,
0.09140362590551376,
0.030282963067293167,
-0.052681777626276016,
0.06861035525798798,
0.16736602783203125,
-0.036822933703660965,
0.015990393236279488,
0.12057244032621384,
-0.12245878577232361,
-0.03062579780817032,
-0.01624218374490738,
0.10980016738176346,
0.04903664439916611,
-0.045996829867362976,
0.007558163721114397,
-0.04520894214510918,
0.006335826590657234,
0.05095764249563217,
0.056818872690200806,
0.00013207457959651947,
-0.017213642597198486,
0.03030824474990368,
-0.08435087651014328,
0.06245993450284004,
0.07065068185329437,
0.04601838439702988,
-0.06892814487218857,
-0.0007772929966449738,
0.08658669143915176,
0.060149140655994415,
-0.026754826307296753,
-0.046910472214221954,
-0.09946532547473907,
-0.013575396500527859,
-0.04799149930477142,
0.06994061917066574,
-0.0761859193444252,
-0.023121733218431473,
-0.03819764405488968,
-0.029593801125884056,
-0.01714385114610195,
-0.03446090221405029,
-0.00696165207773447,
0.025686213746666908,
-0.04446645826101303,
0.03148113191127777,
-0.07986298948526382,
-0.010976631194353104,
0.0653015524148941,
-0.06190136820077896,
0.06624521315097809,
0.01068582758307457,
-0.03010428696870804,
0.005176262930035591,
-0.05961019545793533,
-0.009233110584318638,
0.01559397391974926,
0.01889568381011486,
0.004796421155333519,
-0.050720974802970886,
-0.028485199436545372,
-0.04892204329371452,
0.0495828241109848,
0.03183816000819206,
0.13856099545955658,
-0.06699544191360474,
0.056741416454315186,
-0.06077663600444794,
-0.058528635650873184,
-0.058686427772045135,
0.06530526280403137,
0.04751729965209961,
0.045638058334589005,
0.0011143721640110016,
-0.06834134459495544,
0.028673294931650162,
-0.09643092751502991,
0.004544766154140234,
-0.00549466535449028,
-0.021102242171764374,
-0.01716539077460766,
-0.04510476440191269,
0.035916246473789215,
0.009479643777012825,
0.1432306170463562,
-0.006316961254924536,
-0.0338619090616703,
0.02714000642299652,
-0.01647133380174637,
-0.06875301897525787,
0.013274330645799637,
0.20819415152072906,
0.015584343113005161,
-0.012184026651084423,
0.0219729021191597,
0.025630943477153778,
-0.0447162389755249,
0.07690602540969849,
0.08094830811023712,
0.07038769125938416,
-0.07810939103364944,
0.007501734420657158,
-0.021337339654564857,
0.020092584192752838,
-0.10337813198566437,
0.0926918238401413,
-0.0656614676117897,
0.007130051963031292,
-0.08767926692962646,
0.00825035572052002,
0.13996219635009766,
-0.06377670913934708,
0.031272612512111664,
-0.007686206139624119,
-0.11014080792665482,
-0.06214175373315811,
-0.12308432906866074,
-0.08156060427427292,
-0.04006710648536682,
-0.017889803275465965,
-0.06081507354974747,
-0.041470903903245926,
0.06611819565296173,
0.0452476367354393,
-0.011428803205490112,
0.19136746227741241,
-0.024846479296684265,
-0.04055364802479744,
0.0464133657515049,
-0.017904793843626976,
-0.03537656366825104,
-0.04013637453317642,
0.007673825137317181,
-0.001801283098757267,
0.11206193268299103,
0.0433269627392292,
0.044493936002254486,
-0.003191692754626274,
0.03697158396244049,
0.000819917768239975,
-0.05410609021782875,
-0.03630826249718666,
0.013158408924937248,
0.04801098257303238,
0.13818421959877014,
0.026562880724668503,
-0.03740406781435013,
-0.030762571841478348,
0.15542343258857727,
-0.03133358433842659,
-0.09557486325502396,
-0.15063220262527466,
0.0685679167509079,
-0.03624022752046585,
-0.04246368259191513,
0.01384371891617775,
-0.08735489845275879,
-0.0360456258058548,
0.2533021569252014,
0.19200755655765533,
0.008760622702538967,
0.014276520349085331,
0.017736531794071198,
0.001566929742693901,
0.009242042899131775,
0.09760121256113052,
0.05960272252559662,
0.1278037428855896,
-0.05048166587948799,
-0.019851502031087875,
-0.08431253582239151,
-0.05945322662591934,
-0.0913696140050888,
-0.06754384189844131,
0.06961016356945038,
0.020190617069602013,
-0.08370596170425415,
0.1043941006064415,
-0.06657150387763977,
-0.1631692498922348,
-0.04876666143536568,
-0.06963635981082916,
-0.08853882551193237,
-0.009476298466324806,
-0.01357725914567709,
0.07673479616641998,
0.007027881219983101,
-0.02829417586326599,
0.05450760945677757,
0.06749732792377472,
0.0351407565176487,
-0.05260245501995087,
-0.02368186227977276,
0.07087988406419754,
-0.041084978729486465,
0.09131921082735062,
0.02056177332997322,
0.05110364779829979,
0.03143629431724548,
0.01238247100263834,
-0.0500449612736702,
0.05580812692642212,
-0.004341489635407925,
0.11292432993650436,
-0.019172310829162598,
0.11995936185121536,
-0.025041257962584496,
0.01964252069592476,
0.028899403288960457,
-0.11090876907110214,
0.023289881646633148,
0.030272236093878746,
0.044186148792505264,
-0.12079909443855286,
0.10035699605941772,
-0.09210574626922607,
0.12334854900836945,
0.17526647448539734,
-0.023524565622210503,
-0.033465903252363205,
-0.010957643389701843,
0.016345465555787086,
-0.012414764612913132,
-0.05132032558321953,
-0.04723425954580307,
-0.09231962263584137,
-0.025163613259792328,
-0.029362156987190247,
0.05012267455458641,
-0.30041027069091797,
-0.00047110035666264594,
-0.04713437333703041,
-0.023396039381623268,
-0.03670284152030945,
0.09343869984149933,
0.008829738944768906,
0.01741030253469944,
-0.03847932070493698,
-0.05397272855043411,
0.0055156610906124115,
0.04570532590150833,
-0.12135788798332214,
-0.08325205743312836
] |
null | null |
transformers
|
# wangchanberta-base-wiki-20210520-spm-finetune-qa
Finetuning `airesearchth/wangchanberta-base-wiki-20210520-spmd` with the training set of `iapp_wiki_qa_squad`, `thaiqa_squad`, and `nsc_qa` (removed examples which have cosine similarity with validation and test examples over 0.8; contexts of the latter two are trimmed to be around 300 `newmm` words). Benchmarks shared on [wandb](https://wandb.ai/cstorm125/wangchanberta-qa) using validation and test sets of `iapp_wiki_qa_squad`.
Trained with [thai2transformers](https://github.com/vistec-AI/thai2transformers/blob/dev/scripts/downstream/train_question_answering_lm_finetuning.py).
Run with:
```
export MODEL_NAME=airesearchth/wangchanberta-base-wiki-20210520-news-spm
CUDA_LAUNCH_BLOCKING=1 python train_question_answering_lm_finetuning.py \\n --model_name $MODEL_NAME \\n --dataset_name chimera_qa \\n --output_dir $MODEL_NAME-finetune-chimera_qa-model \\n --log_dir $MODEL_NAME-finetune-chimera_qa-log \\n --model_max_length 400 \\n --pad_on_right \\n --fp16
```
|
{"language": "th", "widget": [{"text": "\u0e2a\u0e27\u0e19\u0e01\u0e38\u0e2b\u0e25\u0e32\u0e1a\u0e40\u0e1b\u0e47\u0e19\u0e42\u0e23\u0e07\u0e40\u0e23\u0e35\u0e22\u0e19\u0e2d\u0e30\u0e44\u0e23", "context": "\u0e42\u0e23\u0e07\u0e40\u0e23\u0e35\u0e22\u0e19\u0e2a\u0e27\u0e19\u0e01\u0e38\u0e2b\u0e25\u0e32\u0e1a\u0e27\u0e34\u0e17\u0e22\u0e32\u0e25\u0e31\u0e22 (Suankularb Wittayalai School) (\u0e2d\u0e31\u0e01\u0e29\u0e23\u0e22\u0e48\u0e2d : \u0e2a.\u0e01. / S.K.) \u0e40\u0e1b\u0e47\u0e19\u0e42\u0e23\u0e07\u0e40\u0e23\u0e35\u0e22\u0e19\u0e0a\u0e32\u0e22\u0e25\u0e49\u0e27\u0e19 \u0e23\u0e30\u0e14\u0e31\u0e1a\u0e0a\u0e31\u0e49\u0e19\u0e21\u0e31\u0e18\u0e22\u0e21\u0e28\u0e36\u0e01\u0e29\u0e32\u0e02\u0e19\u0e32\u0e14\u0e43\u0e2b\u0e0d\u0e48\u0e1e\u0e34\u0e40\u0e28\u0e29 \u0e2a\u0e31\u0e07\u0e01\u0e31\u0e14\u0e2a\u0e33\u0e19\u0e31\u0e01\u0e07\u0e32\u0e19\u0e40\u0e02\u0e15\u0e1e\u0e37\u0e49\u0e19\u0e17\u0e35\u0e48\u0e01\u0e32\u0e23\u0e28\u0e36\u0e01\u0e29\u0e32\u0e21\u0e31\u0e18\u0e22\u0e21\u0e28\u0e36\u0e01\u0e29\u0e32\u0e40\u0e02\u0e15 1 \u0e2a\u0e33\u0e19\u0e31\u0e01\u0e07\u0e32\u0e19\u0e04\u0e13\u0e30\u0e01\u0e23\u0e23\u0e21\u0e01\u0e32\u0e23\u0e01\u0e32\u0e23\u0e28\u0e36\u0e01\u0e29\u0e32\u0e02\u0e31\u0e49\u0e19\u0e1e\u0e37\u0e49\u0e19\u0e10\u0e32\u0e19 (\u0e0a\u0e37\u0e48\u0e2d\u0e40\u0e14\u0e34\u0e21: \u0e01\u0e23\u0e21\u0e2a\u0e32\u0e21\u0e31\u0e0d\u0e28\u0e36\u0e01\u0e29\u0e32) \u0e01\u0e23\u0e30\u0e17\u0e23\u0e27\u0e07\u0e28\u0e36\u0e01\u0e29\u0e32\u0e18\u0e34\u0e01\u0e32\u0e23 \u0e01\u0e48\u0e2d\u0e15\u0e31\u0e49\u0e07\u0e42\u0e14\u0e22 \u0e1e\u0e23\u0e30\u0e1a\u0e32\u0e17\u0e2a\u0e21\u0e40\u0e14\u0e47\u0e08\u0e1e\u0e23\u0e30\u0e08\u0e38\u0e25\u0e08\u0e2d\u0e21\u0e40\u0e01\u0e25\u0e49\u0e32\u0e40\u0e08\u0e49\u0e32\u0e2d\u0e22\u0e39\u0e48\u0e2b\u0e31\u0e27 \u0e44\u0e14\u0e49\u0e23\u0e31\u0e1a\u0e01\u0e32\u0e23\u0e2a\u0e16\u0e32\u0e1b\u0e19\u0e32\u0e02\u0e36\u0e49\u0e19\u0e43\u0e19\u0e27\u0e31\u0e19\u0e17\u0e35\u0e48 8 \u0e21\u0e35\u0e19\u0e32\u0e04\u0e21 \u0e1e.\u0e28. 2424 (\u0e02\u0e13\u0e30\u0e19\u0e31\u0e49\u0e19\u0e19\u0e31\u0e1a\u0e27\u0e31\u0e19\u0e17\u0e35\u0e48 1 \u0e40\u0e21\u0e29\u0e32\u0e22\u0e19 \u0e40\u0e1b\u0e47\u0e19\u0e27\u0e31\u0e19\u0e02\u0e36\u0e49\u0e19\u0e1b\u0e35\u0e43\u0e2b\u0e21\u0e48 \u0e40\u0e21\u0e37\u0e48\u0e2d\u0e19\u0e31\u0e1a\u0e2d\u0e22\u0e48\u0e32\u0e07\u0e2a\u0e32\u0e01\u0e25\u0e16\u0e37\u0e2d\u0e40\u0e1b\u0e47\u0e19 \u0e1e.\u0e28. 2425) \u0e42\u0e14\u0e22\u0e40\u0e1b\u0e47\u0e19\u0e42\u0e23\u0e07\u0e40\u0e23\u0e35\u0e22\u0e19\u0e23\u0e31\u0e10\u0e1a\u0e32\u0e25\u0e41\u0e2b\u0e48\u0e07\u0e41\u0e23\u0e01\u0e02\u0e2d\u0e07\u0e1b\u0e23\u0e30\u0e40\u0e17\u0e28\u0e44\u0e17\u0e22"}]}
|
question-answering
|
airesearch/wangchanberta-base-wiki-20210520-spm-finetune-qa
|
[
"transformers",
"pytorch",
"camembert",
"question-answering",
"th",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"th"
] |
TAGS
#transformers #pytorch #camembert #question-answering #th #endpoints_compatible #region-us
|
# wangchanberta-base-wiki-20210520-spm-finetune-qa
Finetuning 'airesearchth/wangchanberta-base-wiki-20210520-spmd' with the training set of 'iapp_wiki_qa_squad', 'thaiqa_squad', and 'nsc_qa' (removed examples which have cosine similarity with validation and test examples over 0.8; contexts of the latter two are trimmed to be around 300 'newmm' words). Benchmarks shared on wandb using validation and test sets of 'iapp_wiki_qa_squad'.
Trained with thai2transformers.
Run with:
|
[
"# wangchanberta-base-wiki-20210520-spm-finetune-qa\n\nFinetuning 'airesearchth/wangchanberta-base-wiki-20210520-spmd' with the training set of 'iapp_wiki_qa_squad', 'thaiqa_squad', and 'nsc_qa' (removed examples which have cosine similarity with validation and test examples over 0.8; contexts of the latter two are trimmed to be around 300 'newmm' words). Benchmarks shared on wandb using validation and test sets of 'iapp_wiki_qa_squad'.\nTrained with thai2transformers.\n\nRun with:"
] |
[
"TAGS\n#transformers #pytorch #camembert #question-answering #th #endpoints_compatible #region-us \n",
"# wangchanberta-base-wiki-20210520-spm-finetune-qa\n\nFinetuning 'airesearchth/wangchanberta-base-wiki-20210520-spmd' with the training set of 'iapp_wiki_qa_squad', 'thaiqa_squad', and 'nsc_qa' (removed examples which have cosine similarity with validation and test examples over 0.8; contexts of the latter two are trimmed to be around 300 'newmm' words). Benchmarks shared on wandb using validation and test sets of 'iapp_wiki_qa_squad'.\nTrained with thai2transformers.\n\nRun with:"
] |
[
33,
155
] |
[
"passage: TAGS\n#transformers #pytorch #camembert #question-answering #th #endpoints_compatible #region-us \n# wangchanberta-base-wiki-20210520-spm-finetune-qa\n\nFinetuning 'airesearchth/wangchanberta-base-wiki-20210520-spmd' with the training set of 'iapp_wiki_qa_squad', 'thaiqa_squad', and 'nsc_qa' (removed examples which have cosine similarity with validation and test examples over 0.8; contexts of the latter two are trimmed to be around 300 'newmm' words). Benchmarks shared on wandb using validation and test sets of 'iapp_wiki_qa_squad'.\nTrained with thai2transformers.\n\nRun with:"
] |
[
-0.0715736523270607,
-0.0977410078048706,
-0.0035827960819005966,
0.06710181385278702,
0.040639474987983704,
-0.003910433035343885,
0.06296094506978989,
0.0976727306842804,
0.012417140416800976,
0.022776832804083824,
0.0660126656293869,
0.015025228261947632,
0.07901310920715332,
0.06465069204568863,
-0.08085986971855164,
-0.18021763861179352,
0.06987514346837997,
0.09931261092424393,
-0.11482619494199753,
0.15438872575759888,
0.10612523555755615,
-0.0598316490650177,
0.08186923712491989,
0.07457754760980606,
-0.12735283374786377,
-0.01994602382183075,
0.023575440049171448,
-0.09989392757415771,
0.13659881055355072,
-0.0029966169968247414,
0.17907898128032684,
0.06926203519105911,
-0.048423681408166885,
-0.2050129622220993,
0.05180621147155762,
-0.017679398879408836,
0.06876343488693237,
0.005027709994465113,
-0.0061941021122038364,
0.08955299109220505,
-0.025146977975964546,
0.050402089953422546,
0.0386003740131855,
0.003461143234744668,
-0.09555382281541824,
0.0005449150339700282,
-0.046480994671583176,
0.03167391195893288,
0.10931693762540817,
0.0510873980820179,
-0.02015320397913456,
0.21283744275569916,
-0.19880181550979614,
0.04155144840478897,
0.10256574302911758,
-0.3938882052898407,
-0.024759285151958466,
0.04422488436102867,
0.008484371937811375,
0.03410922363400459,
-0.06091269850730896,
-0.009197939187288284,
0.03605743497610092,
0.040781617164611816,
-0.038136500865221024,
-0.06852629035711288,
-0.07495097070932388,
-0.019072001799941063,
-0.1045461893081665,
0.011601711623370647,
0.19816531240940094,
0.05657084658741951,
-0.06214052438735962,
-0.06701697409152985,
-0.03094162605702877,
0.028027748689055443,
0.0049143568612635136,
-0.12145061045885086,
-0.04884342849254608,
0.0031760402489453554,
-0.019185034558176994,
0.014687909744679928,
-0.1115182563662529,
-0.07091408222913742,
-0.11939168721437454,
0.07246258109807968,
0.07924854010343552,
0.034553609788417816,
-0.10995018482208252,
0.0027702355291694403,
-0.048991549760103226,
-0.10739127546548843,
-0.07348326593637466,
-0.06292596459388733,
-0.027306726202368736,
-0.005221404135227203,
-0.07985616475343704,
-0.030259093269705772,
0.1148715391755104,
0.06521420925855637,
-0.10718554258346558,
0.04470665752887726,
-0.060123294591903687,
0.052232950925827026,
-0.06843124330043793,
0.1642298400402069,
0.024086369201540947,
0.1146235466003418,
0.02675073780119419,
0.03996722027659416,
-0.05353543162345886,
0.022249290719628334,
0.014838531613349915,
-0.07423758506774902,
0.04924831911921501,
0.02781049720942974,
-0.10624375194311142,
0.06939435750246048,
-0.06011691689491272,
-0.03261758014559746,
0.12097197771072388,
-0.10576079040765762,
-0.09093054383993149,
0.013836872763931751,
-0.02883751504123211,
0.04388609528541565,
0.025407209992408752,
0.09066624194383621,
-0.04062250256538391,
0.044804785400629044,
-0.009010826237499714,
0.006088243331760168,
0.0449068583548069,
-0.02340986579656601,
0.014705074019730091,
-0.07475605607032776,
-0.0037384843453764915,
-0.15413059294223785,
-0.09591826796531677,
0.007338449824601412,
0.005459323059767485,
0.05807412043213844,
-0.026352765038609505,
-0.024621406570076942,
0.009919267147779465,
-0.08260350674390793,
-0.0017235957784578204,
-0.014163915067911148,
-0.024869034066796303,
0.09878508001565933,
0.049768999218940735,
0.11769723892211914,
-0.09736206382513046,
0.04004998877644539,
-0.1512468010187149,
0.0595998652279377,
-0.12246695905923843,
0.03416379913687706,
-0.05524478480219841,
-0.029580725356936455,
-0.061900120228528976,
-0.03940728306770325,
-0.09710929542779922,
0.004447388928383589,
0.0045260596089065075,
0.1137532964348793,
-0.1056450605392456,
-0.06779098510742188,
0.2213650941848755,
-0.13547581434249878,
-0.1878652125597,
0.1992047280073166,
-0.035576231777668,
0.07505500316619873,
0.05998517572879791,
0.27223166823387146,
0.021888980641961098,
-0.12933743000030518,
0.05353764817118645,
-0.03166981413960457,
-0.0011443309485912323,
0.021242482587695122,
0.06749539077281952,
0.016776192933321,
-0.036866042762994766,
0.07919233292341232,
-0.11177358776330948,
0.050878483802080154,
-0.05389878526329994,
-0.1021924838423729,
-0.06960244476795197,
-0.12731511890888214,
0.08936313539743423,
-0.0415763221681118,
0.12774302065372467,
-0.05657735466957092,
-0.01324713695794344,
-0.0483006052672863,
0.0749804899096489,
-0.0038306303322315216,
0.003983290866017342,
-0.14406298100948334,
0.07523708790540695,
-0.04636271297931671,
-0.026400962844491005,
-0.10450306534767151,
-0.01218921598047018,
0.05473976209759712,
-0.01530515868216753,
0.05926840379834175,
0.06859961897134781,
0.00584167055785656,
-0.017856081947684288,
-0.05558260157704353,
0.03579973801970482,
-0.016719747334718704,
-0.01110211294144392,
-0.04830767214298248,
-0.006831180304288864,
0.06463689357042313,
-0.01959335245192051,
0.11084765195846558,
-0.12035656720399857,
0.03037978895008564,
0.014699348248541355,
0.12661543488502502,
0.020521117374300957,
0.03181028738617897,
0.06608317047357559,
0.08978112787008286,
0.03653695806860924,
0.0313122533261776,
0.048677314072847366,
0.010244585573673248,
-0.13631518185138702,
0.07897305488586426,
-0.0010486483806744218,
0.16054807603359222,
0.14947301149368286,
-0.0713779404759407,
0.010930410586297512,
-0.033804088830947876,
-0.04941980540752411,
-0.03248920291662216,
-0.08153894543647766,
0.09516990929841995,
0.16238518059253693,
0.011288606561720371,
0.1467551738023758,
-0.11361084133386612,
-0.0079275444149971,
-0.01542194839566946,
-0.03242611512541771,
0.006890787277370691,
0.04965367913246155,
0.0665031149983406,
-0.17451262474060059,
0.05432812497019768,
0.21174734830856323,
-0.056072309613227844,
0.1325615495443344,
-0.09157568216323853,
-0.05477141961455345,
0.017855415120720863,
0.13424108922481537,
-0.042535025626420975,
0.04948028549551964,
-0.10484208911657333,
0.016147315502166748,
0.06343771517276764,
0.04077443107962608,
0.038804423063993454,
-0.16054068505764008,
-0.06676512956619263,
0.008145569823682308,
-0.05912557616829872,
-0.11088835448026657,
0.09528643637895584,
0.047470200806856155,
0.07326919585466385,
0.027122491970658302,
0.014090067707002163,
0.02021150477230549,
0.01561419665813446,
-0.12091630697250366,
0.14343811571598053,
-0.040242139250040054,
-0.31087347865104675,
-0.040114518254995346,
0.1304459422826767,
-0.07663485407829285,
-0.057526785880327225,
0.036919932812452316,
-0.18016476929187775,
0.028881123289465904,
0.01860547624528408,
0.026694118976593018,
0.007217903155833483,
0.01230013370513916,
0.041046250611543655,
0.020937765017151833,
0.03161781653761864,
-0.07568281888961792,
-0.03829266503453255,
-0.0727071762084961,
-0.06287357956171036,
0.09774556010961533,
-0.12851756811141968,
0.0419919528067112,
0.03678474202752113,
-0.08596941828727722,
0.03204040229320526,
-0.010366546921432018,
0.25744369626045227,
-0.06739994883537292,
0.013756058178842068,
0.12811793386936188,
-0.12871359288692474,
0.032182056456804276,
0.09670513868331909,
-0.02423427253961563,
-0.09714954346418381,
0.04905938729643822,
0.0254307072609663,
-0.030388014391064644,
-0.243521586060524,
-0.024112628772854805,
-0.02155674807727337,
-0.0039301239885389805,
-0.04486757516860962,
0.031008871272206306,
0.011284802109003067,
0.11452249437570572,
0.012925419956445694,
-0.07258948683738708,
-0.09348445385694504,
0.008462388999760151,
0.12944617867469788,
0.049441128969192505,
0.11697498708963394,
-0.042084384709596634,
-0.03746713325381279,
0.021741213276982307,
0.1464739292860031,
0.12761425971984863,
0.033153433352708817,
-0.026472248136997223,
0.10460453480482101,
0.28963086009025574,
0.1798495650291443,
0.04026554897427559,
-0.06970071792602539,
-0.06300176680088043,
0.002948533045127988,
-0.020373253151774406,
-0.10805464535951614,
0.05781983211636543,
0.0034022743348032236,
-0.008632379584014416,
-0.020806336775422096,
0.08774258941411972,
0.05290525034070015,
0.23478226363658905,
0.017090165987610817,
-0.07901399582624435,
-0.1107240542769432,
-0.0012197935720905662,
-0.1088908389210701,
0.004654363263398409,
0.08342207223176956,
0.0643049031496048,
-0.15831047296524048,
0.01915019005537033,
0.005222888197749853,
0.12621918320655823,
-0.05751118063926697,
0.05058259144425392,
-0.0898856595158577,
-0.13503916561603546,
0.02238467149436474,
0.02575111947953701,
-0.3113175332546234,
0.20817482471466064,
0.0064862859435379505,
0.020993730053305626,
-0.042863693088293076,
-0.04030705988407135,
0.003111787373200059,
0.03974275663495064,
0.07717541605234146,
-0.003282577032223344,
-0.03126072883605957,
-0.12515860795974731,
-0.01161706168204546,
0.11812210828065872,
0.08576887845993042,
0.06093796715140343,
0.02633567340672016,
0.022871604189276695,
0.06200337037444115,
0.004398948047310114,
0.05879955366253853,
-0.19200044870376587,
-0.01789279468357563,
0.025004269555211067,
0.010804480873048306,
0.022018035873770714,
-0.05419401824474335,
-0.03662684187293053,
-0.20066320896148682,
0.14811602234840393,
-0.08605217933654785,
-0.09605297446250916,
-0.06269532442092896,
0.03827758878469467,
0.04842846468091011,
-0.07826558500528336,
0.027835572138428688,
-0.04044541344046593,
0.004339380655437708,
-0.06343737244606018,
-0.026410408318042755,
0.061462294310331345,
-0.03422195464372635,
-0.09281203895807266,
0.020074892789125443,
0.10460448265075684,
0.001277776900678873,
0.0732894167304039,
0.08349285274744034,
-0.02100544236600399,
0.030304545536637306,
-0.07884525507688522,
-0.022936565801501274,
-0.04040737450122833,
-0.06935156136751175,
-0.0039058716502040625,
-0.10927204042673111,
0.05892329290509224,
-0.13894234597682953,
-0.11365500837564468,
0.19749034941196442,
0.14713749289512634,
-0.046945225447416306,
0.08846622705459595,
0.1429641842842102,
0.01880418136715889,
-0.11291098594665527,
-0.0021838729735463858,
0.029538489878177643,
0.05557669326663017,
-0.06798525899648666,
-0.06785529106855392,
0.1455763727426529,
0.10032233595848083,
-0.024156689643859863,
-0.023840023204684258,
-0.10123532265424728,
-0.1085188090801239,
0.10048103332519531,
0.05511922016739845,
0.21919561922550201,
-0.11923811584711075,
-0.04066343605518341,
0.038332536816596985,
-0.23120826482772827,
0.04778203368186951,
-0.049203138798475266,
0.0662914365530014,
-0.037694428116083145,
0.08202414959669113,
-0.0005237977020442486,
-0.014065221883356571,
0.14376957714557648,
0.031927645206451416,
0.006445467472076416,
-0.034613292664289474,
-0.0038336655125021935,
0.003202318912371993,
0.04863874241709709,
0.0998888611793518,
-0.0014678422594442964,
0.061993926763534546,
-0.1526104360818863,
-0.03261321038007736,
-0.09940966218709946,
-0.014711935073137283,
-0.026163173839449883,
-0.06057661399245262,
-0.03945241868495941,
0.05061275139451027,
0.0012502131285145879,
0.02337651140987873,
-0.024118414148688316,
-0.07728741317987442,
0.10819213837385178,
0.06187883019447327,
0.08931133896112442,
-0.06233946606516838,
0.04796283319592476,
-0.04409724473953247,
-0.05101615563035011,
0.11815377324819565,
-0.18915104866027832,
0.03872131183743477,
0.11769015341997147,
0.006980400066822767,
0.08879866451025009,
0.014964663423597813,
-0.01014583557844162,
0.07907404005527496,
0.053006961941719055,
-0.08559691905975342,
-0.12640531361103058,
0.02558060549199581,
-0.09112877398729324,
-0.001691337674856186,
0.008982441388070583,
0.0998220220208168,
-0.03761420026421547,
-0.043859660625457764,
-0.015391036868095398,
-0.036648526787757874,
-0.08925368636846542,
0.09857890754938126,
0.09160379320383072,
0.048052240163087845,
-0.09524700045585632,
0.06632665544748306,
0.03104063682258129,
-0.1530231535434723,
0.013731279410421848,
0.017190715298056602,
-0.17883725464344025,
-0.0507524199783802,
-0.06989223510026932,
0.06694023311138153,
-0.019531307741999626,
-0.06576523929834366,
-0.09799332171678543,
-0.13904090225696564,
0.03289886936545372,
0.15906240046024323,
0.07283056527376175,
0.022503027692437172,
0.03528856486082077,
-0.007972788996994495,
-0.006299839820712805,
0.007566943299025297,
0.073096364736557,
0.059503138065338135,
-0.11704995483160019,
0.004054876044392586,
0.007744785863906145,
0.16511009633541107,
-0.030335642397403717,
-0.022093961015343666,
-0.08964815735816956,
0.06710029393434525,
-0.19891464710235596,
0.06215665116906166,
-0.043497487902641296,
-0.014203890226781368,
-0.010491564869880676,
-0.1684778779745102,
-0.04797595739364624,
0.02201368473470211,
-0.08136940747499466,
0.032361164689064026,
-0.034555111080408096,
0.0004363381303846836,
-0.049237608909606934,
-0.012610118836164474,
0.14497590065002441,
-0.0587642602622509,
0.08844316005706787,
0.052629027515649796,
-0.08617061376571655,
0.1370999813079834,
-0.06949877738952637,
-0.05145331099629402,
0.03664497658610344,
0.04885721579194069,
0.03236450254917145,
-0.008956707082688808,
0.04960717633366585,
0.041396740823984146,
0.07136198878288269,
0.06455361843109131,
0.24213163554668427,
-0.1007530614733696,
-0.10527366399765015,
-0.02846607007086277,
-0.054709479212760925,
-0.06301024556159973,
-0.06310723721981049,
0.1735820770263672,
0.03994062915444374,
0.10662894695997238,
-0.0631466805934906,
0.053001519292593,
-0.12425713986158371,
-0.009400746785104275,
0.011389310471713543,
-0.09670406579971313,
0.017297791317105293,
-0.06541994214057922,
0.017027998343110085,
-0.04698193445801735,
0.1617594212293625,
-0.14494366943836212,
0.010224924422800541,
0.03889811784029007,
-0.032670799642801285,
-0.04802624508738518,
0.03040320985019207,
0.2781517505645752,
0.13196556270122528,
-0.018888313323259354,
-0.05998387932777405,
-0.0478176511824131,
-0.03755880147218704,
0.010345703922212124,
0.04431160166859627,
0.3017730116844177,
-0.05285124480724335,
0.06775142252445221,
0.0746287852525711,
0.1182495579123497,
-0.015121323056519032,
-0.019751902669668198,
-0.1069415807723999,
-0.00481545552611351,
0.03803315386176109,
-0.019430553540587425,
0.2567386329174042,
-0.1172102689743042,
0.039759811013936996,
-0.05565261468291283,
-0.07710320502519608,
-0.11849195510149002,
-0.01608654297888279,
-0.12463591247797012,
-0.09139829874038696,
0.07804112881422043,
-0.06504127383232117,
-0.062123220413923264,
0.15006804466247559,
0.07910589873790741,
-0.01340857520699501,
0.20826582610607147,
0.062340233474969864,
-0.05767202749848366,
0.0817180946469307,
-0.05035431310534477,
-0.029042145237326622,
0.028630055487155914,
-0.011966484598815441,
0.04874451830983162,
0.007332766894251108,
0.008229056373238564,
-0.007558353245258331,
-0.058233071118593216,
-0.05142222344875336,
-0.10803807526826859,
-0.03694989159703255,
-0.013651144690811634,
-0.00217161956243217,
0.013645119965076447,
0.06862657517194748,
0.026185864582657814,
0.041885118931531906,
-0.008306869305670261,
0.08984754234552383,
0.01272572111338377,
-0.18821746110916138,
-0.22601187229156494,
0.050804588943719864,
0.004395661875605583,
0.028303449973464012,
0.039317306131124496,
-0.016569217666983604,
-0.010414590127766132,
0.34782829880714417,
0.15846307575702667,
-0.0645720362663269,
0.04822789505124092,
0.05901652202010155,
0.034203995019197464,
-0.060911472886800766,
0.06030786409974098,
0.11444578319787979,
0.04677845537662506,
-0.08597499132156372,
-0.0994858518242836,
-0.08623040467500687,
-0.09608090668916702,
-0.08193250745534897,
0.04297539219260216,
0.08429542928934097,
-0.007833137176930904,
-0.10032973438501358,
0.05723411217331886,
-0.03166339918971062,
0.01830175146460533,
-0.0709419995546341,
-0.16106034815311432,
-0.1417982131242752,
-0.06374861299991608,
-0.10108300298452377,
0.030993059277534485,
0.0010837945155799389,
-0.020182523876428604,
0.003980198409408331,
0.009103555232286453,
0.019875414669513702,
-0.03595295548439026,
-0.006604786962270737,
0.14412491023540497,
0.04877692461013794,
-0.0702001079916954,
0.07123563438653946,
0.16712595522403717,
0.044786497950553894,
0.08816548436880112,
0.041771162301301956,
0.1331654191017151,
0.06619472056627274,
0.06946681439876556,
-0.09377437084913254,
0.09829597920179367,
0.04204115271568298,
0.056734051555395126,
0.0958138108253479,
-0.11643508076667786,
0.04977273568511009,
-0.006104299332946539,
-0.03406457602977753,
-0.11960610002279282,
0.12633799016475677,
-0.049270305782556534,
0.10864275693893433,
0.16312415897846222,
-0.020090611651539803,
0.056201010942459106,
-0.05297853425145149,
0.07775328308343887,
0.031978245824575424,
-0.0948048010468483,
-0.0884101390838623,
-0.16981452703475952,
0.01470054592937231,
-0.01913934014737606,
-0.027309797704219818,
-0.22312390804290771,
-0.06489630788564682,
-0.031944260001182556,
-0.0004662737192120403,
-0.01674867793917656,
0.08655881136655807,
0.10971853137016296,
0.04229249060153961,
0.001686146017163992,
-0.22678877413272858,
0.03270381689071655,
0.04271261394023895,
-0.09907051175832748,
-0.07809581607580185
] |
null | null |
transformers
|
# WangchanBERTa base model: `wangchanberta-base-wiki-newmm`
<br>
Pretrained RoBERTa BASE model on Thai Wikipedia corpus.
The script and documentation can be found at [this reposiryory](https://github.com/vistec-AI/thai2transformers).
<br>
## Model description
<br>
The architecture of the pretrained model is based on RoBERTa [[Liu et al., 2019]](https://arxiv.org/abs/1907.11692).
<br>
## Intended uses & limitations
<br>
You can use the pretrained model for masked language modeling (i.e. predicting a mask token in the input text). In addition, we also provide finetuned models for multiclass/multilabel text classification and token classification task.
<br>
**Multiclass text classification**
- `wisesight_sentiment`
4-class text classification task (`positive`, `neutral`, `negative`, and `question`) based on social media posts and tweets.
- `wongnai_reivews`
Users' review rating classification task (scale is ranging from 1 to 5)
- `generated_reviews_enth` : (`review_star` as label)
Generated users' review rating classification task (scale is ranging from 1 to 5).
**Multilabel text classification**
- `prachathai67k`
Thai topic classification with 12 labels based on news article corpus from prachathai.com. The detail is described in this [page](https://huggingface.co/datasets/prachathai67k).
**Token classification**
- `thainer`
Named-entity recognition tagging with 13 named-entities as descibed in this [page](https://huggingface.co/datasets/thainer).
- `lst20` : NER NER and POS tagging
Named-entity recognition tagging with 10 named-entities and Part-of-Speech tagging with 16 tags as descibed in this [page](https://huggingface.co/datasets/lst20).
<br>
## How to use
<br>
The getting started notebook of WangchanBERTa model can be found at this [Colab notebook](https://colab.research.google.com/drive/1Kbk6sBspZLwcnOE61adAQo30xxqOQ9ko)
<br>
## Training data
`wangchanberta-base-wiki-newmm` model was pretrained on Thai Wikipedia. Specifically, we use the Wikipedia dump articles on 20 August 2020 (dumps.wikimedia.org/thwiki/20200820/). We opt out lists, and tables.
### Preprocessing
Texts are preprocessed with the following rules:
- Replace non-breaking space, zero-width non-breaking space, and soft hyphen with spaces.
- Remove an empty parenthesis that occur right after the title of the first paragraph.
- Replace spaces wtth <_>.
<br>
Regarding the vocabulary, we use wordl-level token from [PyThaiNLP](https://github.com/PyThaiNLP/pythainlp)'s dictionary-based tokenizer namedly `newmm`. The total number of word-level tokens in the vocabulary is 97,982.
We sample sentences contigously to have the length of at most 512 tokens. For some sentences that overlap the boundary of 512 tokens, we split such sentence with an additional token as document separator. This is the same approach as proposed by [[Liu et al., 2019]](https://arxiv.org/abs/1907.11692) (called "FULL-SENTENCES").
Regarding the masking procedure, for each sequence, we sampled 15% of the tokens and replace them with<mask>token.Out of the 15%, 80% is replaced with a<mask>token, 10% is left unchanged and 10% is replaced with a random token.
<br>
**Train/Val/Test splits**
We split sequencially 944,782 sentences for training set, 24,863 sentences for validation set and 24,862 sentences for test set.
<br>
**Pretraining**
The model was trained on 32 V100 GPUs for 31,250 steps with the batch size of 8,192 (16 sequences per device with 16 accumulation steps) and a sequence length of 512 tokens. The optimizer we used is Adam with the learning rate of $7e-4$, $\beta_1 = 0.9$, $\beta_2= 0.98$ and $\epsilon = 1e-6$. The learning rate is warmed up for the first 1250 steps and linearly decayed to zero. The model checkpoint with minimum validation loss will be selected as the best model checkpoint.
<br>
**BibTeX entry and citation info**
```
@misc{lowphansirikul2021wangchanberta,
title={WangchanBERTa: Pretraining transformer-based Thai Language Models},
author={Lalita Lowphansirikul and Charin Polpanumas and Nawat Jantrakulchai and Sarana Nutanong},
year={2021},
eprint={2101.09635},
archivePrefix={arXiv},
primaryClass={cs.CL}
}
```
|
{"language": "th"}
|
fill-mask
|
airesearch/wangchanberta-base-wiki-newmm
|
[
"transformers",
"pytorch",
"jax",
"roberta",
"fill-mask",
"th",
"arxiv:1907.11692",
"arxiv:2101.09635",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[
"1907.11692",
"2101.09635"
] |
[
"th"
] |
TAGS
#transformers #pytorch #jax #roberta #fill-mask #th #arxiv-1907.11692 #arxiv-2101.09635 #autotrain_compatible #endpoints_compatible #region-us
|
# WangchanBERTa base model: 'wangchanberta-base-wiki-newmm'
<br>
Pretrained RoBERTa BASE model on Thai Wikipedia corpus.
The script and documentation can be found at this reposiryory.
<br>
## Model description
<br>
The architecture of the pretrained model is based on RoBERTa [[Liu et al., 2019]](URL
<br>
## Intended uses & limitations
<br>
You can use the pretrained model for masked language modeling (i.e. predicting a mask token in the input text). In addition, we also provide finetuned models for multiclass/multilabel text classification and token classification task.
<br>
Multiclass text classification
- 'wisesight_sentiment'
4-class text classification task ('positive', 'neutral', 'negative', and 'question') based on social media posts and tweets.
- 'wongnai_reivews'
Users' review rating classification task (scale is ranging from 1 to 5)
- 'generated_reviews_enth' : ('review_star' as label)
Generated users' review rating classification task (scale is ranging from 1 to 5).
Multilabel text classification
- 'prachathai67k'
Thai topic classification with 12 labels based on news article corpus from URL. The detail is described in this page.
Token classification
- 'thainer'
Named-entity recognition tagging with 13 named-entities as descibed in this page.
- 'lst20' : NER NER and POS tagging
Named-entity recognition tagging with 10 named-entities and Part-of-Speech tagging with 16 tags as descibed in this page.
<br>
## How to use
<br>
The getting started notebook of WangchanBERTa model can be found at this Colab notebook
<br>
## Training data
'wangchanberta-base-wiki-newmm' model was pretrained on Thai Wikipedia. Specifically, we use the Wikipedia dump articles on 20 August 2020 (URL We opt out lists, and tables.
### Preprocessing
Texts are preprocessed with the following rules:
- Replace non-breaking space, zero-width non-breaking space, and soft hyphen with spaces.
- Remove an empty parenthesis that occur right after the title of the first paragraph.
- Replace spaces wtth <_>.
<br>
Regarding the vocabulary, we use wordl-level token from PyThaiNLP's dictionary-based tokenizer namedly 'newmm'. The total number of word-level tokens in the vocabulary is 97,982.
We sample sentences contigously to have the length of at most 512 tokens. For some sentences that overlap the boundary of 512 tokens, we split such sentence with an additional token as document separator. This is the same approach as proposed by [[Liu et al., 2019]](URL (called "FULL-SENTENCES").
Regarding the masking procedure, for each sequence, we sampled 15% of the tokens and replace them with<mask>token.Out of the 15%, 80% is replaced with a<mask>token, 10% is left unchanged and 10% is replaced with a random token.
<br>
Train/Val/Test splits
We split sequencially 944,782 sentences for training set, 24,863 sentences for validation set and 24,862 sentences for test set.
<br>
Pretraining
The model was trained on 32 V100 GPUs for 31,250 steps with the batch size of 8,192 (16 sequences per device with 16 accumulation steps) and a sequence length of 512 tokens. The optimizer we used is Adam with the learning rate of $7e-4$, $\beta_1 = 0.9$, $\beta_2= 0.98$ and $\epsilon = 1e-6$. The learning rate is warmed up for the first 1250 steps and linearly decayed to zero. The model checkpoint with minimum validation loss will be selected as the best model checkpoint.
<br>
BibTeX entry and citation info
|
[
"# WangchanBERTa base model: 'wangchanberta-base-wiki-newmm'\n\n<br>\n\nPretrained RoBERTa BASE model on Thai Wikipedia corpus.\nThe script and documentation can be found at this reposiryory.\n<br>",
"## Model description\n\n<br>\n\nThe architecture of the pretrained model is based on RoBERTa [[Liu et al., 2019]](URL \n\n<br>",
"## Intended uses & limitations\n\n<br>\n\nYou can use the pretrained model for masked language modeling (i.e. predicting a mask token in the input text). In addition, we also provide finetuned models for multiclass/multilabel text classification and token classification task.\n\n<br>\n\nMulticlass text classification\n\n\n- 'wisesight_sentiment' \n \n 4-class text classification task ('positive', 'neutral', 'negative', and 'question') based on social media posts and tweets.\n\n- 'wongnai_reivews' \n\n Users' review rating classification task (scale is ranging from 1 to 5)\n\n- 'generated_reviews_enth' : ('review_star' as label)\n\n Generated users' review rating classification task (scale is ranging from 1 to 5).\n\nMultilabel text classification\n\n- 'prachathai67k'\n\n Thai topic classification with 12 labels based on news article corpus from URL. The detail is described in this page.\n \n\n\n\nToken classification\n\n- 'thainer' \n\n Named-entity recognition tagging with 13 named-entities as descibed in this page.\n\n- 'lst20' : NER NER and POS tagging\n \n Named-entity recognition tagging with 10 named-entities and Part-of-Speech tagging with 16 tags as descibed in this page.\n\n<br>",
"## How to use\n\n<br>\n\nThe getting started notebook of WangchanBERTa model can be found at this Colab notebook\n\n<br>",
"## Training data\n\n'wangchanberta-base-wiki-newmm' model was pretrained on Thai Wikipedia. Specifically, we use the Wikipedia dump articles on 20 August 2020 (URL We opt out lists, and tables.",
"### Preprocessing\n\nTexts are preprocessed with the following rules:\n\n- Replace non-breaking space, zero-width non-breaking space, and soft hyphen with spaces.\n- Remove an empty parenthesis that occur right after the title of the first paragraph.\n- Replace spaces wtth <_>.\n\n<br>\n\n\nRegarding the vocabulary, we use wordl-level token from PyThaiNLP's dictionary-based tokenizer namedly 'newmm'. The total number of word-level tokens in the vocabulary is 97,982. \n\n\nWe sample sentences contigously to have the length of at most 512 tokens. For some sentences that overlap the boundary of 512 tokens, we split such sentence with an additional token as document separator. This is the same approach as proposed by [[Liu et al., 2019]](URL (called \"FULL-SENTENCES\"). \n\nRegarding the masking procedure, for each sequence, we sampled 15% of the tokens and replace them with<mask>token.Out of the 15%, 80% is replaced with a<mask>token, 10% is left unchanged and 10% is replaced with a random token.\n\n<br>\n\nTrain/Val/Test splits\n\nWe split sequencially 944,782 sentences for training set, 24,863 sentences for validation set and 24,862 sentences for test set.\n\n<br>\n\nPretraining\n\nThe model was trained on 32 V100 GPUs for 31,250 steps with the batch size of 8,192 (16 sequences per device with 16 accumulation steps) and a sequence length of 512 tokens. The optimizer we used is Adam with the learning rate of $7e-4$, $\\beta_1 = 0.9$, $\\beta_2= 0.98$ and $\\epsilon = 1e-6$. The learning rate is warmed up for the first 1250 steps and linearly decayed to zero. The model checkpoint with minimum validation loss will be selected as the best model checkpoint. \n\n<br>\n\nBibTeX entry and citation info"
] |
[
"TAGS\n#transformers #pytorch #jax #roberta #fill-mask #th #arxiv-1907.11692 #arxiv-2101.09635 #autotrain_compatible #endpoints_compatible #region-us \n",
"# WangchanBERTa base model: 'wangchanberta-base-wiki-newmm'\n\n<br>\n\nPretrained RoBERTa BASE model on Thai Wikipedia corpus.\nThe script and documentation can be found at this reposiryory.\n<br>",
"## Model description\n\n<br>\n\nThe architecture of the pretrained model is based on RoBERTa [[Liu et al., 2019]](URL \n\n<br>",
"## Intended uses & limitations\n\n<br>\n\nYou can use the pretrained model for masked language modeling (i.e. predicting a mask token in the input text). In addition, we also provide finetuned models for multiclass/multilabel text classification and token classification task.\n\n<br>\n\nMulticlass text classification\n\n\n- 'wisesight_sentiment' \n \n 4-class text classification task ('positive', 'neutral', 'negative', and 'question') based on social media posts and tweets.\n\n- 'wongnai_reivews' \n\n Users' review rating classification task (scale is ranging from 1 to 5)\n\n- 'generated_reviews_enth' : ('review_star' as label)\n\n Generated users' review rating classification task (scale is ranging from 1 to 5).\n\nMultilabel text classification\n\n- 'prachathai67k'\n\n Thai topic classification with 12 labels based on news article corpus from URL. The detail is described in this page.\n \n\n\n\nToken classification\n\n- 'thainer' \n\n Named-entity recognition tagging with 13 named-entities as descibed in this page.\n\n- 'lst20' : NER NER and POS tagging\n \n Named-entity recognition tagging with 10 named-entities and Part-of-Speech tagging with 16 tags as descibed in this page.\n\n<br>",
"## How to use\n\n<br>\n\nThe getting started notebook of WangchanBERTa model can be found at this Colab notebook\n\n<br>",
"## Training data\n\n'wangchanberta-base-wiki-newmm' model was pretrained on Thai Wikipedia. Specifically, we use the Wikipedia dump articles on 20 August 2020 (URL We opt out lists, and tables.",
"### Preprocessing\n\nTexts are preprocessed with the following rules:\n\n- Replace non-breaking space, zero-width non-breaking space, and soft hyphen with spaces.\n- Remove an empty parenthesis that occur right after the title of the first paragraph.\n- Replace spaces wtth <_>.\n\n<br>\n\n\nRegarding the vocabulary, we use wordl-level token from PyThaiNLP's dictionary-based tokenizer namedly 'newmm'. The total number of word-level tokens in the vocabulary is 97,982. \n\n\nWe sample sentences contigously to have the length of at most 512 tokens. For some sentences that overlap the boundary of 512 tokens, we split such sentence with an additional token as document separator. This is the same approach as proposed by [[Liu et al., 2019]](URL (called \"FULL-SENTENCES\"). \n\nRegarding the masking procedure, for each sequence, we sampled 15% of the tokens and replace them with<mask>token.Out of the 15%, 80% is replaced with a<mask>token, 10% is left unchanged and 10% is replaced with a random token.\n\n<br>\n\nTrain/Val/Test splits\n\nWe split sequencially 944,782 sentences for training set, 24,863 sentences for validation set and 24,862 sentences for test set.\n\n<br>\n\nPretraining\n\nThe model was trained on 32 V100 GPUs for 31,250 steps with the batch size of 8,192 (16 sequences per device with 16 accumulation steps) and a sequence length of 512 tokens. The optimizer we used is Adam with the learning rate of $7e-4$, $\\beta_1 = 0.9$, $\\beta_2= 0.98$ and $\\epsilon = 1e-6$. The learning rate is warmed up for the first 1250 steps and linearly decayed to zero. The model checkpoint with minimum validation loss will be selected as the best model checkpoint. \n\n<br>\n\nBibTeX entry and citation info"
] |
[
59,
55,
35,
312,
28,
51,
470
] |
[
"passage: TAGS\n#transformers #pytorch #jax #roberta #fill-mask #th #arxiv-1907.11692 #arxiv-2101.09635 #autotrain_compatible #endpoints_compatible #region-us \n# WangchanBERTa base model: 'wangchanberta-base-wiki-newmm'\n\n<br>\n\nPretrained RoBERTa BASE model on Thai Wikipedia corpus.\nThe script and documentation can be found at this reposiryory.\n<br>## Model description\n\n<br>\n\nThe architecture of the pretrained model is based on RoBERTa [[Liu et al., 2019]](URL \n\n<br>## Intended uses & limitations\n\n<br>\n\nYou can use the pretrained model for masked language modeling (i.e. predicting a mask token in the input text). In addition, we also provide finetuned models for multiclass/multilabel text classification and token classification task.\n\n<br>\n\nMulticlass text classification\n\n\n- 'wisesight_sentiment' \n \n 4-class text classification task ('positive', 'neutral', 'negative', and 'question') based on social media posts and tweets.\n\n- 'wongnai_reivews' \n\n Users' review rating classification task (scale is ranging from 1 to 5)\n\n- 'generated_reviews_enth' : ('review_star' as label)\n\n Generated users' review rating classification task (scale is ranging from 1 to 5).\n\nMultilabel text classification\n\n- 'prachathai67k'\n\n Thai topic classification with 12 labels based on news article corpus from URL. The detail is described in this page.\n \n\n\n\nToken classification\n\n- 'thainer' \n\n Named-entity recognition tagging with 13 named-entities as descibed in this page.\n\n- 'lst20' : NER NER and POS tagging\n \n Named-entity recognition tagging with 10 named-entities and Part-of-Speech tagging with 16 tags as descibed in this page.\n\n<br>## How to use\n\n<br>\n\nThe getting started notebook of WangchanBERTa model can be found at this Colab notebook\n\n<br>"
] |
[
-0.0032038248609751463,
0.0395154245197773,
-0.005416155327111483,
0.024531925097107887,
0.11065678298473358,
0.01704934425652027,
0.14432910084724426,
0.06096592918038368,
0.024648191407322884,
0.11120306700468063,
-0.061923548579216,
0.007323144003748894,
0.09381863474845886,
0.12345745414495468,
0.05201716721057892,
-0.20732362568378448,
0.05628509074449539,
-0.05005006492137909,
0.0198488961905241,
0.080966055393219,
0.08240209519863129,
-0.08620675653219223,
0.08654217422008514,
0.05924321338534355,
-0.06504356116056442,
0.021179523319005966,
-0.01792290061712265,
-0.07837194204330444,
0.05742562562227249,
0.03394105285406113,
0.13074779510498047,
0.011400616727769375,
0.03480686619877815,
-0.14902730286121368,
0.013152956031262875,
0.054511554539203644,
0.022393180057406425,
0.030443906784057617,
0.12175756692886353,
-0.06561855226755142,
0.12066664546728134,
-0.07782816141843796,
0.1105506643652916,
0.06020191311836243,
-0.09526194632053375,
-0.03398609906435013,
-0.11513397842645645,
0.15076637268066406,
0.10886364430189133,
0.04817322641611099,
-0.04614751413464546,
0.0960298553109169,
-0.08108410984277725,
0.037073127925395966,
0.025266824290156364,
-0.255522221326828,
-0.0376422181725502,
0.10059230774641037,
0.0016222784761339426,
0.030802415683865547,
-0.09600890427827835,
0.0017177656991407275,
-0.006079221144318581,
0.021087072789669037,
0.018404342234134674,
-0.05471742898225784,
-0.018258891999721527,
-0.06685761362314224,
-0.09943931549787521,
-0.00027280335780233145,
0.11847595870494843,
0.0252357367426157,
-0.08185776323080063,
-0.14320139586925507,
-0.032731011509895325,
0.031798895448446274,
-0.004932856187224388,
-0.01998465321958065,
-0.020664464682340622,
0.014707271009683609,
0.07189593464136124,
-0.0782131552696228,
-0.09250305593013763,
-0.008927796967327595,
-0.031869202852249146,
0.034899551421403885,
0.029949558898806572,
0.000542004476301372,
-0.026641611009836197,
0.08416910469532013,
-0.0038924964610487223,
-0.03430928662419319,
-0.005658541806042194,
-0.0464160218834877,
-0.1921946406364441,
-0.04190590977668762,
0.037262387573719025,
-0.11634774506092072,
-0.005470073316246271,
0.15384513139724731,
-0.0404578261077404,
0.1114291399717331,
-0.09343299269676208,
0.0050266822800040245,
0.06361730396747589,
0.17071686685085297,
-0.06987042725086212,
-0.027791287750005722,
0.019775284454226494,
-0.03471860662102699,
0.0055534252896904945,
-0.014148421585559845,
-0.002186823869124055,
-0.04496711120009422,
0.06491969525814056,
0.03702371194958687,
0.0022405933123081923,
0.09090328216552734,
-0.05622401833534241,
-0.07218785583972931,
0.2054966390132904,
-0.12341983616352081,
-0.007342010736465454,
0.03988349810242653,
-0.03372514620423317,
0.04124639183282852,
0.057200394570827484,
0.0011245560599491,
-0.06998816877603531,
0.09143627434968948,
-0.08813261240720749,
-0.010942046530544758,
-0.031147943809628487,
-0.08718084543943405,
0.01629948429763317,
-0.09407374262809753,
-0.10758375376462936,
-0.05250198766589165,
-0.11464043706655502,
-0.06150909140706062,
0.006756595801562071,
-0.03271690383553505,
-0.01777530089020729,
0.0005802331725135446,
0.02960255928337574,
-0.00709511199966073,
0.033592384308576584,
-0.04634585604071617,
0.004470687359571457,
0.011423005722463131,
-0.041739095002412796,
0.05909876525402069,
0.026016710326075554,
0.038790613412857056,
-0.09775976836681366,
0.06164815276861191,
-0.3541807234287262,
0.14181050658226013,
-0.083677276968956,
0.05186270922422409,
-0.13708388805389404,
0.005326833110302687,
-0.009651780128479004,
-0.01238291896879673,
-0.02867223136126995,
0.09784238785505295,
-0.01360742561519146,
-0.0409807451069355,
0.16815347969532013,
-0.13453027606010437,
-0.035329144448041916,
0.10910876840353012,
-0.034577302634716034,
0.06917252391576767,
0.10246086120605469,
0.16876494884490967,
0.16293837130069733,
-0.025595514103770256,
-0.04078309237957001,
-0.021503150463104248,
-0.05378299206495285,
0.1119924858212471,
0.06202457845211029,
-0.04131298139691353,
-0.032340820878744125,
0.017420059069991112,
-0.05032331869006157,
-0.0006481524324044585,
0.017809657379984856,
-0.020573554560542107,
0.019342223182320595,
-0.024404441937804222,
0.021656867116689682,
-0.047306761145591736,
0.04276903346180916,
0.025049176067113876,
-0.046262986958026886,
0.05775479972362518,
0.07674483209848404,
-0.05152133107185364,
0.022464117035269737,
-0.13180509209632874,
-0.013139317743480206,
-0.026284383609890938,
-0.004880548920482397,
-0.20367659628391266,
-0.0689917653799057,
0.07595688849687576,
-0.09668011963367462,
0.14633001387119293,
-0.010459201410412788,
-0.009734970517456532,
0.007534679491072893,
-0.047930020838975906,
-0.015030249021947384,
0.02174236997961998,
-0.04776483401656151,
-0.01027179416269064,
-0.1084340289235115,
-0.01713823527097702,
-0.02256682701408863,
0.18951977789402008,
-0.13768872618675232,
0.03573083505034447,
0.0407167449593544,
0.08309044688940048,
0.030426060780882835,
-0.026266828179359436,
0.0919545516371727,
0.014308721758425236,
0.02539738640189171,
-0.033243097364902496,
0.0011872953036800027,
0.018748225644230843,
-0.08280394226312637,
0.08024120330810547,
-0.12565994262695312,
-0.1538250893354416,
0.024158546701073647,
0.051927465945482254,
-0.10913357138633728,
0.05749071016907692,
-0.005171832162886858,
-0.05447816848754883,
-0.11294952780008316,
0.013885676860809326,
0.14738033711910248,
0.03262513503432274,
0.09874895960092545,
-0.049265701323747635,
-0.0030194625724107027,
-0.02966996468603611,
-0.017724452540278435,
0.004139309749007225,
0.08878813683986664,
0.02677408792078495,
-0.29484859108924866,
0.06108880415558815,
-0.05047275498509407,
0.009080180898308754,
0.12468785047531128,
0.023045606911182404,
-0.0309626292437315,
-0.0287343580275774,
0.08004292100667953,
0.005192270502448082,
-0.012316794134676456,
-0.07263489812612534,
-0.005045281257480383,
0.021482447162270546,
0.017579689621925354,
0.006094043143093586,
-0.05381603538990021,
0.051101669669151306,
-0.017026713117957115,
-0.023596804589033127,
-0.027683250606060028,
0.02861536480486393,
0.05804670974612236,
0.06962451338768005,
0.055910829454660416,
0.10886476933956146,
-0.01827000454068184,
-0.020365890115499496,
-0.1421777904033661,
0.13696986436843872,
-0.0996503010392189,
-0.30561089515686035,
-0.14944924414157867,
0.021842313930392265,
-0.06186338886618614,
-0.013715053908526897,
-0.02541249990463257,
-0.043281491845846176,
-0.06715273857116699,
-0.05244405195116997,
0.1220879927277565,
-0.04925200343132019,
-0.07085981965065002,
-0.03856847435235977,
0.007836565375328064,
0.018596310168504715,
-0.07014579325914383,
0.016035089269280434,
0.013142870739102364,
-0.11355331540107727,
0.018822619691491127,
-0.048280078917741776,
0.017210116609930992,
0.13251379132270813,
-0.022346965968608856,
-0.021353833377361298,
-0.02958243153989315,
0.1492815911769867,
-0.08882339298725128,
0.1005108579993248,
0.07216255366802216,
-0.04984022676944733,
0.058410488069057465,
0.13463887572288513,
0.01730174757540226,
-0.022458402439951897,
0.06619364023208618,
0.09443239122629166,
-0.025856783613562584,
-0.29159626364707947,
-0.07117248326539993,
-0.0367034375667572,
0.041029345244169235,
0.06652472913265228,
0.0619254894554615,
0.09393496066331863,
0.041236840188503265,
-0.06672768294811249,
0.07502997666597366,
0.07826586812734604,
0.07142890244722366,
0.0880732387304306,
0.04143930971622467,
0.05904390662908554,
-0.13042345643043518,
-0.023283498361706734,
0.07499537616968155,
0.03357125446200371,
0.16832813620567322,
0.03525616228580475,
0.13075695931911469,
0.08063141256570816,
0.05798793584108353,
0.08347829431295395,
0.028805037960410118,
-0.0380423441529274,
0.029626067727804184,
-0.006933275610208511,
-0.03783385083079338,
-0.0071621970273554325,
0.0287128034979105,
0.09989821165800095,
-0.04885070398449898,
0.05098699405789375,
-0.02729634940624237,
0.0517745316028595,
0.22609159350395203,
0.015497918240725994,
-0.06706191599369049,
-0.061527665704488754,
0.05353051796555519,
-0.05464879423379898,
-0.08116322010755539,
-0.010880201123654842,
0.03689899668097496,
-0.1718396544456482,
0.13972927629947662,
-0.022155992686748505,
0.10689117759466171,
-0.1489582061767578,
-0.049342744052410126,
-0.04089270904660225,
-0.006693333387374878,
-0.007734054699540138,
0.0610499307513237,
-0.117831289768219,
0.10666396468877792,
0.03402777016162872,
0.02703678421676159,
-0.043224938213825226,
0.02441161312162876,
0.012909326702356339,
0.06767633557319641,
0.11256562173366547,
-0.003754175966605544,
-0.029245615005493164,
-0.05962308496236801,
-0.05726198852062225,
-0.020289229229092598,
0.07789891958236694,
-0.09832488000392914,
0.05560113489627838,
-0.012108183465898037,
-0.011472342535853386,
-0.052228767424821854,
-0.05379591882228851,
-0.1670587658882141,
-0.13289213180541992,
0.02864706516265869,
-0.05549450218677521,
-0.03479162976145744,
-0.0029202408622950315,
-0.011646807193756104,
-0.04669436439871788,
0.18001310527324677,
-0.10790667682886124,
-0.06526388972997665,
-0.10543506592512131,
-0.009244734421372414,
0.0785130187869072,
-0.08401406556367874,
0.034746140241622925,
-0.07573292404413223,
0.08036211878061295,
-0.03186824545264244,
-0.06184764206409454,
0.03827514126896858,
-0.01117214560508728,
-0.09885577112436295,
0.014592882245779037,
0.08810152858495712,
0.11999951303005219,
0.03241422399878502,
0.0011064631398767233,
0.018293995410203934,
0.06934516131877899,
-0.11412690579891205,
-0.04905855283141136,
0.11232118308544159,
-0.0406537763774395,
0.12564608454704285,
-0.06544988602399826,
-0.2224806398153305,
-0.10936018824577332,
-0.026622222736477852,
0.08208220452070236,
0.12718933820724487,
-0.0462251752614975,
0.12115161865949631,
0.15198923647403717,
-0.07689370214939117,
-0.18090730905532837,
0.003373696468770504,
0.02381753921508789,
0.030315779149532318,
0.03270258381962776,
-0.1919657438993454,
0.10838843882083893,
0.04012104123830795,
-0.02032456547021866,
-0.06348750740289688,
-0.22062471508979797,
-0.1254892498254776,
0.04394443705677986,
0.02142404578626156,
-0.08951211720705032,
-0.1191050186753273,
-0.08329423516988754,
-0.02592150680720806,
-0.05035390332341194,
0.14439475536346436,
-0.01878734864294529,
0.06398233771324158,
0.026069169864058495,
0.022407878190279007,
0.009575574658811092,
-0.016865616664290428,
0.10045800358057022,
0.034103188663721085,
0.024449095129966736,
-0.04470236971974373,
-0.035545144230127335,
0.16829486191272736,
-0.024636028334498405,
0.14764811098575592,
0.06795402616262436,
0.022979501634836197,
-0.12508822977542877,
-0.060353927314281464,
-0.07117246091365814,
0.02126106433570385,
-0.042060788720846176,
-0.013365527614951134,
-0.10564140975475311,
0.09608807414770126,
0.025662334635853767,
-0.002930444199591875,
0.10852410644292831,
-0.06472273916006088,
0.0635175108909607,
0.0322004072368145,
0.1184685006737709,
0.15860550105571747,
-0.07384306192398071,
-0.06270938366651535,
-0.023623712360858917,
0.039325255900621414,
-0.16835220158100128,
0.06167581304907799,
0.0684511587023735,
0.04104181379079819,
0.1542438417673111,
-0.016788100823760033,
-0.1427585780620575,
0.030167419463396072,
0.07653112709522247,
-0.10067552328109741,
-0.1617375910282135,
0.03229760006070137,
0.04920673370361328,
-0.048612140119075775,
0.021960770711302757,
0.11133939772844315,
-0.02373209409415722,
-0.05477786064147949,
0.05091932788491249,
0.03109748102724552,
-0.04141439497470856,
0.06582920253276825,
0.00938628800213337,
0.04805354028940201,
-0.0672912746667862,
0.15749245882034302,
0.21242810785770416,
-0.07270943373441696,
-0.02744969166815281,
0.16078133881092072,
-0.1342124044895172,
-0.04239736869931221,
-0.09949065744876862,
0.1054663211107254,
0.014095521531999111,
-0.04391096532344818,
0.027094021439552307,
-0.0871172845363617,
0.020961765199899673,
0.172300323843956,
0.009698093868792057,
0.05907607078552246,
-0.05096828565001488,
-0.007953930646181107,
-0.0279091764241457,
0.030015964061021805,
0.03454592451453209,
0.014060474932193756,
-0.09197964519262314,
0.030229395255446434,
0.10809434205293655,
0.04767397791147232,
-0.012098951265215874,
-0.08285143971443176,
-0.09869343787431717,
0.007963458076119423,
-0.015467388555407524,
0.06148383393883705,
-0.10063966363668442,
0.0070332190953195095,
-0.017744680866599083,
-0.03062223270535469,
-0.016238274052739143,
-0.015143584460020065,
-0.033299919217824936,
-0.03751618042588234,
-0.05664776265621185,
0.11652080714702606,
-0.14941498637199402,
-0.014190435409545898,
0.07510443776845932,
-0.07354546338319778,
0.08635464310646057,
0.019357820972800255,
-0.030227793380618095,
0.0346427857875824,
-0.0773366242647171,
0.012126012705266476,
-0.004203920252621174,
0.008941025473177433,
-0.017795896157622337,
-0.137559711933136,
-0.005643426440656185,
-0.04271610826253891,
0.02484528347849846,
0.026902401819825172,
0.03655815124511719,
-0.08002281934022903,
0.045958079397678375,
-0.06055248901247978,
-0.07250422984361649,
-0.06878837943077087,
0.04616524651646614,
0.08301399648189545,
0.027618123218417168,
0.03127731382846832,
-0.055886540561914444,
0.04139842838048935,
-0.10330576449632645,
-0.023597367107868195,
-0.0061956304125487804,
-0.0011545916786417365,
-0.05007927864789963,
-0.05346068739891052,
0.02546541951596737,
0.003713037585839629,
0.0777144581079483,
-0.04191216453909874,
0.012795379385352135,
0.048426296561956406,
0.062174275517463684,
-0.1427350789308548,
0.032627951353788376,
0.043254461139440536,
0.005373495630919933,
-0.0168438833206892,
0.09813565015792847,
-0.039091695100069046,
-0.09305024892091751,
0.04882710054516792,
0.12268775701522827,
0.1388769894838333,
-0.01705714501440525,
0.013354948721826077,
0.04682612791657448,
-0.012563147582113743,
-0.0956137478351593,
0.06300150603055954,
-0.12094978988170624,
-0.012954034842550755,
-0.0721139907836914,
0.052210330963134766,
0.14995881915092468,
-0.10431239753961563,
0.09173668175935745,
0.02797822467982769,
-0.08971898257732391,
-0.08073385804891586,
-0.2036934494972229,
-0.06992046535015106,
0.009544034488499165,
-0.010940240696072578,
-0.07652053982019424,
0.028428493067622185,
0.12691737711429596,
0.06087539345026016,
-0.01628497801721096,
0.17561620473861694,
-0.1175609901547432,
-0.09430471807718277,
0.06972743570804596,
0.02533661760389805,
0.005529910326004028,
0.019413843750953674,
0.05851326510310173,
0.008634398691356182,
0.11912156641483307,
0.06704498082399368,
0.047481466084718704,
0.028436312451958656,
0.046027496457099915,
-0.051294513046741486,
-0.0706002339720726,
0.014400039799511433,
-0.001412444282323122,
-0.010050814598798752,
0.15600337088108063,
0.03999801352620125,
-0.020510336384177208,
-0.03018415905535221,
0.18559163808822632,
-0.01962718740105629,
-0.04624496027827263,
-0.175230011343956,
0.1709425151348114,
-0.011409417726099491,
-0.023881876841187477,
0.04249798133969307,
-0.1028793454170227,
-0.024474605917930603,
0.18218529224395752,
0.13983462750911713,
0.03964175283908844,
0.020625248551368713,
-0.005058272276073694,
0.012002608738839626,
0.02570277266204357,
0.09514062106609344,
-0.03402262553572655,
0.17199842631816864,
-0.04783602058887482,
0.0772828757762909,
-0.05948846787214279,
-0.046587489545345306,
-0.02953401766717434,
0.03386058658361435,
-0.021209077909588814,
0.0357392393052578,
-0.06569479405879974,
0.1382012516260147,
-0.12969589233398438,
-0.24539583921432495,
0.034896090626716614,
-0.06349216401576996,
-0.1064644604921341,
-0.019669990986585617,
-0.018184995278716087,
0.006597247906029224,
0.02464306354522705,
0.029692912474274635,
0.01126973144710064,
0.07873973250389099,
0.0521332286298275,
-0.059139639139175415,
-0.08126101642847061,
0.09211066365242004,
0.019528424367308617,
0.1412615031003952,
0.01943395659327507,
0.03704540804028511,
0.07713545858860016,
0.007009520195424557,
-0.07462134212255478,
0.03220319747924805,
0.013630006462335587,
0.013588584959506989,
-0.027707256376743317,
0.16569414734840393,
0.01254634466022253,
0.12766629457473755,
0.061507806181907654,
-0.11086627095937729,
0.05805405601859093,
-0.012895455583930016,
0.046079229563474655,
-0.14944292604923248,
0.11143175512552261,
-0.11878058314323425,
0.13303320109844208,
0.1671551764011383,
-0.009036022238433361,
-0.006770634092390537,
-0.018990935757756233,
0.001190172042697668,
-0.021364431828260422,
0.045730944722890854,
-0.040447741746902466,
-0.173371821641922,
0.04888929799199104,
-0.12910547852516174,
0.095954030752182,
-0.2284756302833557,
-0.021112021058797836,
0.013485444709658623,
0.010870028287172318,
-0.05548830330371857,
0.1310936063528061,
-0.04432378336787224,
0.025257349014282227,
-0.011536726728081703,
-0.2930542826652527,
0.018868668004870415,
0.08484921604394913,
-0.1043040081858635,
-0.04567519575357437
] |
null | null |
transformers
|
# WangchanBERTa base model: `wangchanberta-base-wiki-sefr`
<br>
Pretrained RoBERTa BASE model on Thai Wikipedia corpus.
The script and documentation can be found at [this reposiryory](https://github.com/vistec-AI/thai2transformers).
<br>
## Model description
<br>
The architecture of the pretrained model is based on RoBERTa [[Liu et al., 2019]](https://arxiv.org/abs/1907.11692).
<br>
## Intended uses & limitations
<br>
You can use the pretrained model for masked language modeling (i.e. predicting a mask token in the input text). In addition, we also provide finetuned models for multiclass/multilabel text classification and token classification task.
<br>
**Multiclass text classification**
- `wisesight_sentiment`
4-class text classification task (`positive`, `neutral`, `negative`, and `question`) based on social media posts and tweets.
- `wongnai_reivews`
Users' review rating classification task (scale is ranging from 1 to 5)
- `generated_reviews_enth` : (`review_star` as label)
Generated users' review rating classification task (scale is ranging from 1 to 5).
**Multilabel text classification**
- `prachathai67k`
Thai topic classification with 12 labels based on news article corpus from prachathai.com. The detail is described in this [page](https://huggingface.co/datasets/prachathai67k).
**Token classification**
- `thainer`
Named-entity recognition tagging with 13 named-entities as descibed in this [page](https://huggingface.co/datasets/thainer).
- `lst20` : NER NER and POS tagging
Named-entity recognition tagging with 10 named-entities and Part-of-Speech tagging with 16 tags as descibed in this [page](https://huggingface.co/datasets/lst20).
<br>
## How to use
<br>
The getting started notebook of WangchanBERTa model can be found at this [Colab notebook](https://colab.research.google.com/drive/1Kbk6sBspZLwcnOE61adAQo30xxqOQ9ko)
<br>
## Training data
`wangchanberta-base-wiki-sefr` model was pretrained on Thai Wikipedia. Specifically, we use the Wikipedia dump articles on 20 August 2020 (dumps.wikimedia.org/thwiki/20200820/). We opt out lists, and tables.
### Preprocessing
Texts are preprocessed with the following rules:
- Replace non-breaking space, zero-width non-breaking space, and soft hyphen with spaces.
- Remove an empty parenthesis that occur right after the title of the first paragraph.
- Replace spaces wtth <_>.
<br>
Regarding the vocabulary, we use Stacked Ensemble Filter and Refine (SEFR) tokenizer `(engine="best") `[[Limkonchotiwat et al., 2020]](https://www.aclweb.org/anthology/2020.emnlp-main.315/) based on probablities from CNN-based `deepcut` [[Kittinaradorn et al., 2019]](http://doi.org/10.5281/zenodo.3457707). The total number of word-level tokens in the vocabulary is 92,177.
We sample sentences contigously to have the length of at most 512 tokens. For some sentences that overlap the boundary of 512 tokens, we split such sentence with an additional token as document separator. This is the same approach as proposed by [[Liu et al., 2019]](https://arxiv.org/abs/1907.11692) (called "FULL-SENTENCES").
Regarding the masking procedure, for each sequence, we sampled 15% of the tokens and replace them with<mask>token.Out of the 15%, 80% is replaced with a<mask>token, 10% is left unchanged and 10% is replaced with a random token.
<br>
**Train/Val/Test splits**
We split sequencially 944,782 sentences for training set, 24,863 sentences for validation set and 24,862 sentences for test set.
<br>
**Pretraining**
The model was trained on 32 V100 GPUs for 31,250 steps with the batch size of 8,192 (16 sequences per device with 16 accumulation steps) and a sequence length of 512 tokens. The optimizer we used is Adam with the learning rate of $7e-4$, $\beta_1 = 0.9$, $\beta_2= 0.98$ and $\epsilon = 1e-6$. The learning rate is warmed up for the first 1250 steps and linearly decayed to zero. The model checkpoint with minimum validation loss will be selected as the best model checkpoint.
<br>
**BibTeX entry and citation info**
```
@misc{lowphansirikul2021wangchanberta,
title={WangchanBERTa: Pretraining transformer-based Thai Language Models},
author={Lalita Lowphansirikul and Charin Polpanumas and Nawat Jantrakulchai and Sarana Nutanong},
year={2021},
eprint={2101.09635},
archivePrefix={arXiv},
primaryClass={cs.CL}
}
```
|
{"language": "th"}
|
fill-mask
|
airesearch/wangchanberta-base-wiki-sefr
|
[
"transformers",
"pytorch",
"jax",
"roberta",
"fill-mask",
"th",
"arxiv:1907.11692",
"arxiv:2101.09635",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[
"1907.11692",
"2101.09635"
] |
[
"th"
] |
TAGS
#transformers #pytorch #jax #roberta #fill-mask #th #arxiv-1907.11692 #arxiv-2101.09635 #autotrain_compatible #endpoints_compatible #region-us
|
# WangchanBERTa base model: 'wangchanberta-base-wiki-sefr'
<br>
Pretrained RoBERTa BASE model on Thai Wikipedia corpus.
The script and documentation can be found at this reposiryory.
<br>
## Model description
<br>
The architecture of the pretrained model is based on RoBERTa [[Liu et al., 2019]](URL
<br>
## Intended uses & limitations
<br>
You can use the pretrained model for masked language modeling (i.e. predicting a mask token in the input text). In addition, we also provide finetuned models for multiclass/multilabel text classification and token classification task.
<br>
Multiclass text classification
- 'wisesight_sentiment'
4-class text classification task ('positive', 'neutral', 'negative', and 'question') based on social media posts and tweets.
- 'wongnai_reivews'
Users' review rating classification task (scale is ranging from 1 to 5)
- 'generated_reviews_enth' : ('review_star' as label)
Generated users' review rating classification task (scale is ranging from 1 to 5).
Multilabel text classification
- 'prachathai67k'
Thai topic classification with 12 labels based on news article corpus from URL. The detail is described in this page.
Token classification
- 'thainer'
Named-entity recognition tagging with 13 named-entities as descibed in this page.
- 'lst20' : NER NER and POS tagging
Named-entity recognition tagging with 10 named-entities and Part-of-Speech tagging with 16 tags as descibed in this page.
<br>
## How to use
<br>
The getting started notebook of WangchanBERTa model can be found at this Colab notebook
<br>
## Training data
'wangchanberta-base-wiki-sefr' model was pretrained on Thai Wikipedia. Specifically, we use the Wikipedia dump articles on 20 August 2020 (URL We opt out lists, and tables.
### Preprocessing
Texts are preprocessed with the following rules:
- Replace non-breaking space, zero-width non-breaking space, and soft hyphen with spaces.
- Remove an empty parenthesis that occur right after the title of the first paragraph.
- Replace spaces wtth <_>.
<br>
Regarding the vocabulary, we use Stacked Ensemble Filter and Refine (SEFR) tokenizer '(engine="best") '[[Limkonchotiwat et al., 2020]](URL based on probablities from CNN-based 'deepcut' [[Kittinaradorn et al., 2019]](URL The total number of word-level tokens in the vocabulary is 92,177.
We sample sentences contigously to have the length of at most 512 tokens. For some sentences that overlap the boundary of 512 tokens, we split such sentence with an additional token as document separator. This is the same approach as proposed by [[Liu et al., 2019]](URL (called "FULL-SENTENCES").
Regarding the masking procedure, for each sequence, we sampled 15% of the tokens and replace them with<mask>token.Out of the 15%, 80% is replaced with a<mask>token, 10% is left unchanged and 10% is replaced with a random token.
<br>
Train/Val/Test splits
We split sequencially 944,782 sentences for training set, 24,863 sentences for validation set and 24,862 sentences for test set.
<br>
Pretraining
The model was trained on 32 V100 GPUs for 31,250 steps with the batch size of 8,192 (16 sequences per device with 16 accumulation steps) and a sequence length of 512 tokens. The optimizer we used is Adam with the learning rate of $7e-4$, $\beta_1 = 0.9$, $\beta_2= 0.98$ and $\epsilon = 1e-6$. The learning rate is warmed up for the first 1250 steps and linearly decayed to zero. The model checkpoint with minimum validation loss will be selected as the best model checkpoint.
<br>
BibTeX entry and citation info
|
[
"# WangchanBERTa base model: 'wangchanberta-base-wiki-sefr'\n\n<br>\n\nPretrained RoBERTa BASE model on Thai Wikipedia corpus.\nThe script and documentation can be found at this reposiryory.\n<br>",
"## Model description\n\n<br>\n\nThe architecture of the pretrained model is based on RoBERTa [[Liu et al., 2019]](URL \n\n<br>",
"## Intended uses & limitations\n\n<br>\n\nYou can use the pretrained model for masked language modeling (i.e. predicting a mask token in the input text). In addition, we also provide finetuned models for multiclass/multilabel text classification and token classification task.\n\n<br>\n\nMulticlass text classification\n\n\n- 'wisesight_sentiment' \n \n 4-class text classification task ('positive', 'neutral', 'negative', and 'question') based on social media posts and tweets.\n\n- 'wongnai_reivews' \n\n Users' review rating classification task (scale is ranging from 1 to 5)\n\n- 'generated_reviews_enth' : ('review_star' as label)\n\n Generated users' review rating classification task (scale is ranging from 1 to 5).\n\nMultilabel text classification\n\n- 'prachathai67k'\n\n Thai topic classification with 12 labels based on news article corpus from URL. The detail is described in this page.\n \n\n\n\nToken classification\n\n- 'thainer' \n\n Named-entity recognition tagging with 13 named-entities as descibed in this page.\n\n- 'lst20' : NER NER and POS tagging\n \n Named-entity recognition tagging with 10 named-entities and Part-of-Speech tagging with 16 tags as descibed in this page.\n\n<br>",
"## How to use\n\n<br>\n\nThe getting started notebook of WangchanBERTa model can be found at this Colab notebook\n\n<br>",
"## Training data\n\n'wangchanberta-base-wiki-sefr' model was pretrained on Thai Wikipedia. Specifically, we use the Wikipedia dump articles on 20 August 2020 (URL We opt out lists, and tables.",
"### Preprocessing\n\nTexts are preprocessed with the following rules:\n\n- Replace non-breaking space, zero-width non-breaking space, and soft hyphen with spaces.\n- Remove an empty parenthesis that occur right after the title of the first paragraph.\n- Replace spaces wtth <_>.\n\n<br>\n\n\nRegarding the vocabulary, we use Stacked Ensemble Filter and Refine (SEFR) tokenizer '(engine=\"best\") '[[Limkonchotiwat et al., 2020]](URL based on probablities from CNN-based 'deepcut' [[Kittinaradorn et al., 2019]](URL The total number of word-level tokens in the vocabulary is 92,177.\n\n\nWe sample sentences contigously to have the length of at most 512 tokens. For some sentences that overlap the boundary of 512 tokens, we split such sentence with an additional token as document separator. This is the same approach as proposed by [[Liu et al., 2019]](URL (called \"FULL-SENTENCES\"). \n\nRegarding the masking procedure, for each sequence, we sampled 15% of the tokens and replace them with<mask>token.Out of the 15%, 80% is replaced with a<mask>token, 10% is left unchanged and 10% is replaced with a random token.\n\n<br>\n\nTrain/Val/Test splits\n\nWe split sequencially 944,782 sentences for training set, 24,863 sentences for validation set and 24,862 sentences for test set.\n\n<br>\n\nPretraining\n\nThe model was trained on 32 V100 GPUs for 31,250 steps with the batch size of 8,192 (16 sequences per device with 16 accumulation steps) and a sequence length of 512 tokens. The optimizer we used is Adam with the learning rate of $7e-4$, $\\beta_1 = 0.9$, $\\beta_2= 0.98$ and $\\epsilon = 1e-6$. The learning rate is warmed up for the first 1250 steps and linearly decayed to zero. The model checkpoint with minimum validation loss will be selected as the best model checkpoint. \n\n<br>\n\nBibTeX entry and citation info"
] |
[
"TAGS\n#transformers #pytorch #jax #roberta #fill-mask #th #arxiv-1907.11692 #arxiv-2101.09635 #autotrain_compatible #endpoints_compatible #region-us \n",
"# WangchanBERTa base model: 'wangchanberta-base-wiki-sefr'\n\n<br>\n\nPretrained RoBERTa BASE model on Thai Wikipedia corpus.\nThe script and documentation can be found at this reposiryory.\n<br>",
"## Model description\n\n<br>\n\nThe architecture of the pretrained model is based on RoBERTa [[Liu et al., 2019]](URL \n\n<br>",
"## Intended uses & limitations\n\n<br>\n\nYou can use the pretrained model for masked language modeling (i.e. predicting a mask token in the input text). In addition, we also provide finetuned models for multiclass/multilabel text classification and token classification task.\n\n<br>\n\nMulticlass text classification\n\n\n- 'wisesight_sentiment' \n \n 4-class text classification task ('positive', 'neutral', 'negative', and 'question') based on social media posts and tweets.\n\n- 'wongnai_reivews' \n\n Users' review rating classification task (scale is ranging from 1 to 5)\n\n- 'generated_reviews_enth' : ('review_star' as label)\n\n Generated users' review rating classification task (scale is ranging from 1 to 5).\n\nMultilabel text classification\n\n- 'prachathai67k'\n\n Thai topic classification with 12 labels based on news article corpus from URL. The detail is described in this page.\n \n\n\n\nToken classification\n\n- 'thainer' \n\n Named-entity recognition tagging with 13 named-entities as descibed in this page.\n\n- 'lst20' : NER NER and POS tagging\n \n Named-entity recognition tagging with 10 named-entities and Part-of-Speech tagging with 16 tags as descibed in this page.\n\n<br>",
"## How to use\n\n<br>\n\nThe getting started notebook of WangchanBERTa model can be found at this Colab notebook\n\n<br>",
"## Training data\n\n'wangchanberta-base-wiki-sefr' model was pretrained on Thai Wikipedia. Specifically, we use the Wikipedia dump articles on 20 August 2020 (URL We opt out lists, and tables.",
"### Preprocessing\n\nTexts are preprocessed with the following rules:\n\n- Replace non-breaking space, zero-width non-breaking space, and soft hyphen with spaces.\n- Remove an empty parenthesis that occur right after the title of the first paragraph.\n- Replace spaces wtth <_>.\n\n<br>\n\n\nRegarding the vocabulary, we use Stacked Ensemble Filter and Refine (SEFR) tokenizer '(engine=\"best\") '[[Limkonchotiwat et al., 2020]](URL based on probablities from CNN-based 'deepcut' [[Kittinaradorn et al., 2019]](URL The total number of word-level tokens in the vocabulary is 92,177.\n\n\nWe sample sentences contigously to have the length of at most 512 tokens. For some sentences that overlap the boundary of 512 tokens, we split such sentence with an additional token as document separator. This is the same approach as proposed by [[Liu et al., 2019]](URL (called \"FULL-SENTENCES\"). \n\nRegarding the masking procedure, for each sequence, we sampled 15% of the tokens and replace them with<mask>token.Out of the 15%, 80% is replaced with a<mask>token, 10% is left unchanged and 10% is replaced with a random token.\n\n<br>\n\nTrain/Val/Test splits\n\nWe split sequencially 944,782 sentences for training set, 24,863 sentences for validation set and 24,862 sentences for test set.\n\n<br>\n\nPretraining\n\nThe model was trained on 32 V100 GPUs for 31,250 steps with the batch size of 8,192 (16 sequences per device with 16 accumulation steps) and a sequence length of 512 tokens. The optimizer we used is Adam with the learning rate of $7e-4$, $\\beta_1 = 0.9$, $\\beta_2= 0.98$ and $\\epsilon = 1e-6$. The learning rate is warmed up for the first 1250 steps and linearly decayed to zero. The model checkpoint with minimum validation loss will be selected as the best model checkpoint. \n\n<br>\n\nBibTeX entry and citation info"
] |
[
59,
55,
35,
312,
28,
51,
504
] |
[
"passage: TAGS\n#transformers #pytorch #jax #roberta #fill-mask #th #arxiv-1907.11692 #arxiv-2101.09635 #autotrain_compatible #endpoints_compatible #region-us \n# WangchanBERTa base model: 'wangchanberta-base-wiki-sefr'\n\n<br>\n\nPretrained RoBERTa BASE model on Thai Wikipedia corpus.\nThe script and documentation can be found at this reposiryory.\n<br>## Model description\n\n<br>\n\nThe architecture of the pretrained model is based on RoBERTa [[Liu et al., 2019]](URL \n\n<br>## Intended uses & limitations\n\n<br>\n\nYou can use the pretrained model for masked language modeling (i.e. predicting a mask token in the input text). In addition, we also provide finetuned models for multiclass/multilabel text classification and token classification task.\n\n<br>\n\nMulticlass text classification\n\n\n- 'wisesight_sentiment' \n \n 4-class text classification task ('positive', 'neutral', 'negative', and 'question') based on social media posts and tweets.\n\n- 'wongnai_reivews' \n\n Users' review rating classification task (scale is ranging from 1 to 5)\n\n- 'generated_reviews_enth' : ('review_star' as label)\n\n Generated users' review rating classification task (scale is ranging from 1 to 5).\n\nMultilabel text classification\n\n- 'prachathai67k'\n\n Thai topic classification with 12 labels based on news article corpus from URL. The detail is described in this page.\n \n\n\n\nToken classification\n\n- 'thainer' \n\n Named-entity recognition tagging with 13 named-entities as descibed in this page.\n\n- 'lst20' : NER NER and POS tagging\n \n Named-entity recognition tagging with 10 named-entities and Part-of-Speech tagging with 16 tags as descibed in this page.\n\n<br>## How to use\n\n<br>\n\nThe getting started notebook of WangchanBERTa model can be found at this Colab notebook\n\n<br>"
] |
[
-0.0019902463536709547,
0.038917187601327896,
-0.005449367221444845,
0.025089019909501076,
0.11160674691200256,
0.016435597091913223,
0.14528773725032806,
0.060071442276239395,
0.02549796737730503,
0.11118301004171371,
-0.06103654205799103,
0.007739630527794361,
0.09399977326393127,
0.12532596290111542,
0.050759993493556976,
-0.20713506639003754,
0.055975012481212616,
-0.05093207582831383,
0.020546158775687218,
0.08091376721858978,
0.08315064758062363,
-0.08523494750261307,
0.08769230544567108,
0.05997198075056076,
-0.06607501953840256,
0.02262088470160961,
-0.017274007201194763,
-0.07825280725955963,
0.05776185542345047,
0.0352763757109642,
0.13158145546913147,
0.011293595656752586,
0.03619169816374779,
-0.1496305614709854,
0.0131306741386652,
0.05472517013549805,
0.021949153393507004,
0.031224796548485756,
0.12186376750469208,
-0.06754038482904434,
0.11945224553346634,
-0.07643584907054901,
0.11049000918865204,
0.060034073889255524,
-0.09525231271982193,
-0.03258393704891205,
-0.11396835744380951,
0.15101325511932373,
0.10836286842823029,
0.048297058790922165,
-0.04626350477337837,
0.09433286637067795,
-0.08125587552785873,
0.037709664553403854,
0.025616182014346123,
-0.25164443254470825,
-0.0376807376742363,
0.0989554151892662,
-0.0007601910037919879,
0.030130494385957718,
-0.09662670642137527,
0.00012043581227771938,
-0.0051392680034041405,
0.020327232778072357,
0.017280831933021545,
-0.05582163482904434,
-0.02426922507584095,
-0.0679018571972847,
-0.09883812814950943,
-0.00008692195842741057,
0.11854250729084015,
0.025215994566679,
-0.08280403167009354,
-0.14223159849643707,
-0.034018535166978836,
0.03218720853328705,
-0.0037727952003479004,
-0.01960710808634758,
-0.019755464047193527,
0.014637785032391548,
0.0737125352025032,
-0.07903137803077698,
-0.09335087239742279,
-0.009868253022432327,
-0.03139307349920273,
0.03588620200753212,
0.03034466691315174,
0.0018365540308877826,
-0.02635899931192398,
0.0843079686164856,
-0.005770983174443245,
-0.03392994776368141,
-0.004882324021309614,
-0.04673168435692787,
-0.19377419352531433,
-0.041038282215595245,
0.03680859133601189,
-0.11497914046049118,
-0.006040411535650492,
0.1568470001220703,
-0.042916156351566315,
0.11165774613618851,
-0.09358201175928116,
0.005007511004805565,
0.06408540904521942,
0.1698717176914215,
-0.06980980932712555,
-0.026129286736249924,
0.019382119178771973,
-0.035727761685848236,
0.0048409728333354,
-0.014025221578776836,
-0.0027705600950866938,
-0.04496011510491371,
0.06254983693361282,
0.0348290354013443,
0.00009451962978346273,
0.0900057852268219,
-0.05638381838798523,
-0.07080517709255219,
0.20596736669540405,
-0.12320336699485779,
-0.008324493654072285,
0.040115002542734146,
-0.03452261909842491,
0.04155396297574043,
0.057759255170822144,
0.0015187660465016961,
-0.06949780136346817,
0.09095258265733719,
-0.08744367212057114,
-0.010017468594014645,
-0.030761677771806717,
-0.08740469068288803,
0.01506409328430891,
-0.09717501699924469,
-0.10579679906368256,
-0.052678946405649185,
-0.11475725471973419,
-0.0617731474339962,
0.007413723040372133,
-0.03407474234700203,
-0.017625877633690834,
0.0007582852267660201,
0.029569486156105995,
-0.007234251592308283,
0.03384054824709892,
-0.046994779258966446,
0.005139749031513929,
0.010958079248666763,
-0.040020231157541275,
0.059242721647024155,
0.024235010147094727,
0.03866388648748398,
-0.09878726303577423,
0.060660477727651596,
-0.3537618815898895,
0.14257118105888367,
-0.08346422016620636,
0.0504181869328022,
-0.13695873320102692,
0.005521693266928196,
-0.010075921192765236,
-0.012178115546703339,
-0.02881859615445137,
0.09795470535755157,
-0.012415020726621151,
-0.03994638845324516,
0.16857855021953583,
-0.1353614628314972,
-0.03259105980396271,
0.10979069769382477,
-0.0348273329436779,
0.06906411051750183,
0.10098791122436523,
0.17029695212841034,
0.16285757720470428,
-0.026412954553961754,
-0.04008292406797409,
-0.018315311521291733,
-0.05449061840772629,
0.11235073953866959,
0.06283766776323318,
-0.041337888687849045,
-0.031072283163666725,
0.017369689419865608,
-0.051214370876550674,
0.0000740290925023146,
0.01764518953859806,
-0.019736047834157944,
0.019571231678128242,
-0.024035360664129257,
0.020361553877592087,
-0.046719085425138474,
0.043154824525117874,
0.02628193236887455,
-0.045626018196344376,
0.05858723074197769,
0.07724349945783615,
-0.05151068791747093,
0.02251799777150154,
-0.13180148601531982,
-0.013111187145113945,
-0.026435909792780876,
-0.005565466359257698,
-0.2048872709274292,
-0.06664429605007172,
0.07538141310214996,
-0.09570355713367462,
0.14717797935009003,
-0.008444842882454395,
-0.009840017184615135,
0.007579033728688955,
-0.04661775007843971,
-0.014852960593998432,
0.02177446149289608,
-0.04817631468176842,
-0.009639744646847248,
-0.10927100479602814,
-0.016675034537911415,
-0.02278679423034191,
0.19122426211833954,
-0.13912908732891083,
0.03580648824572563,
0.0391588918864727,
0.0833207294344902,
0.030184948816895485,
-0.026249416172504425,
0.09135327488183975,
0.013488628901541233,
0.02487139217555523,
-0.03277641907334328,
0.000051951457862742245,
0.01863175816833973,
-0.08093097060918808,
0.08114100992679596,
-0.12374943494796753,
-0.1540505439043045,
0.023504028096795082,
0.0517088957130909,
-0.11093630641698837,
0.05881437286734581,
-0.005585910752415657,
-0.054040178656578064,
-0.11344829201698303,
0.014376084320247173,
0.1491779088973999,
0.032160669565200806,
0.09959883987903595,
-0.04904543608427048,
-0.0023539073299616575,
-0.028868606314063072,
-0.020048122853040695,
0.004098718985915184,
0.0897238478064537,
0.026327263563871384,
-0.29578763246536255,
0.062126774340867996,
-0.05411361902952194,
0.007939951494336128,
0.12480952590703964,
0.0235768910497427,
-0.03178266063332558,
-0.0277597364038229,
0.08076146245002747,
0.003815824631601572,
-0.012050924822688103,
-0.07145486772060394,
-0.005506742279976606,
0.0210746880620718,
0.016512254253029823,
0.005974277388304472,
-0.054078202694654465,
0.05190712958574295,
-0.017080919817090034,
-0.023641763255000114,
-0.029077215120196342,
0.02864154428243637,
0.05808448791503906,
0.06970671564340591,
0.055331457406282425,
0.10641255229711533,
-0.018785852938890457,
-0.020710568875074387,
-0.1420658677816391,
0.13685137033462524,
-0.0989442691206932,
-0.3046509921550751,
-0.1506856232881546,
0.02097467891871929,
-0.06066429615020752,
-0.012884905561804771,
-0.02580755576491356,
-0.043387919664382935,
-0.0672667920589447,
-0.052025776356458664,
0.12109030783176422,
-0.05045508220791817,
-0.07164935022592545,
-0.03874434903264046,
0.0076017542742192745,
0.019052140414714813,
-0.06890546530485153,
0.016460172832012177,
0.011815004050731659,
-0.11302643269300461,
0.018942832946777344,
-0.048898760229349136,
0.016490597277879715,
0.13077278435230255,
-0.023270029574632645,
-0.021243330091238022,
-0.02976011112332344,
0.14992061257362366,
-0.08859105408191681,
0.09924934059381485,
0.07334302365779877,
-0.05137104541063309,
0.05838160216808319,
0.13549266755580902,
0.016749653965234756,
-0.02232806570827961,
0.06613390892744064,
0.09514596313238144,
-0.026123030111193657,
-0.2915668487548828,
-0.06968966126441956,
-0.03672226890921593,
0.04041329398751259,
0.06637345254421234,
0.061363495886325836,
0.0933026373386383,
0.04206135869026184,
-0.06584154069423676,
0.07561156153678894,
0.07854661345481873,
0.07055553048849106,
0.08730976283550262,
0.04077126458287239,
0.05871342867612839,
-0.13037541508674622,
-0.024462228640913963,
0.07501349598169327,
0.03412250801920891,
0.16807276010513306,
0.03447704762220383,
0.12964856624603271,
0.08036503940820694,
0.05702819302678108,
0.08327134698629379,
0.02827763929963112,
-0.038424890488386154,
0.029692986980080605,
-0.007155046332627535,
-0.03607087954878807,
-0.0076296087354421616,
0.029238281771540642,
0.09952478855848312,
-0.04892650246620178,
0.051815908402204514,
-0.02565760724246502,
0.051554691046476364,
0.2257227897644043,
0.015633152797818184,
-0.06736216694116592,
-0.061169300228357315,
0.05319928377866745,
-0.05592792481184006,
-0.08094754815101624,
-0.010878993198275566,
0.03887181356549263,
-0.17208336293697357,
0.13858379423618317,
-0.02202204428613186,
0.10651382058858871,
-0.14922502636909485,
-0.04972749203443527,
-0.04122724011540413,
-0.005954013671725988,
-0.0076241386123001575,
0.060908522456884384,
-0.11854023486375809,
0.10663264989852905,
0.033295925706624985,
0.026126280426979065,
-0.042559538036584854,
0.025096554309129715,
0.012060428969562054,
0.06738022714853287,
0.11469817906618118,
-0.004022460430860519,
-0.02769763581454754,
-0.06097682937979698,
-0.057100843638181686,
-0.020573046058416367,
0.07868830114603043,
-0.09864707291126251,
0.054292093962430954,
-0.013135514222085476,
-0.010932120494544506,
-0.05224370211362839,
-0.05407445505261421,
-0.16549032926559448,
-0.13357838988304138,
0.027059225365519524,
-0.05601833388209343,
-0.035854969173669815,
-0.002248488599434495,
-0.010678497143089771,
-0.04644394665956497,
0.17873111367225647,
-0.10907258093357086,
-0.06499467045068741,
-0.1059933453798294,
-0.007492806296795607,
0.07859494537115097,
-0.08432089537382126,
0.034536369144916534,
-0.07646121829748154,
0.07992352545261383,
-0.03172871097922325,
-0.06161089241504669,
0.03750376030802727,
-0.011960441246628761,
-0.09858252853155136,
0.014693230390548706,
0.08714252710342407,
0.12100044637918472,
0.03179627284407616,
0.00021684824605472386,
0.018050502985715866,
0.0685284361243248,
-0.11399712413549423,
-0.049072377383708954,
0.11005395650863647,
-0.04192008078098297,
0.1270052194595337,
-0.06397101283073425,
-0.2232924848794937,
-0.10871248692274094,
-0.02607421763241291,
0.08086888492107391,
0.12466204911470413,
-0.04624935984611511,
0.12075672298669815,
0.15311329066753387,
-0.07594854384660721,
-0.18034176528453827,
0.003547385334968567,
0.025709914043545723,
0.030168412253260612,
0.032566770911216736,
-0.19246713817119598,
0.10749475657939911,
0.03980417549610138,
-0.019902711734175682,
-0.06495334953069687,
-0.21876156330108643,
-0.12444043904542923,
0.04656381532549858,
0.021402789279818535,
-0.08923176676034927,
-0.11860661208629608,
-0.08306179195642471,
-0.02582998387515545,
-0.050206542015075684,
0.14541678130626678,
-0.02021818421781063,
0.06388363987207413,
0.026389218866825104,
0.022223595529794693,
0.009420990943908691,
-0.016351597383618355,
0.0996948853135109,
0.034137628972530365,
0.0249149389564991,
-0.043726321309804916,
-0.03598269820213318,
0.16657017171382904,
-0.0234663262963295,
0.14820745587348938,
0.06941009312868118,
0.023274464532732964,
-0.12470506876707077,
-0.060889605432748795,
-0.07056207209825516,
0.021178115159273148,
-0.04137514531612396,
-0.013369222171604633,
-0.10515151172876358,
0.09586766362190247,
0.024309324100613594,
-0.0028791737277060747,
0.10936242341995239,
-0.0659821406006813,
0.06150983273983002,
0.03454074636101723,
0.11713887006044388,
0.16291849315166473,
-0.07337336987257004,
-0.062245577573776245,
-0.023092111572623253,
0.03914050757884979,
-0.16840460896492004,
0.06097076088190079,
0.06749385595321655,
0.041240911930799484,
0.15574529767036438,
-0.017318880185484886,
-0.1431163102388382,
0.030981682240962982,
0.07574167102575302,
-0.10026725381612778,
-0.16094452142715454,
0.03280472010374069,
0.0442657545208931,
-0.04831681028008461,
0.02184169925749302,
0.11185947060585022,
-0.024391504004597664,
-0.05436146631836891,
0.050302013754844666,
0.03127220645546913,
-0.04197441413998604,
0.06564683467149734,
0.010253332555294037,
0.047340862452983856,
-0.06772898137569427,
0.15758369863033295,
0.21196842193603516,
-0.0700436383485794,
-0.027605894953012466,
0.1592215597629547,
-0.13448937237262726,
-0.04189243167638779,
-0.10173816978931427,
0.10357875376939774,
0.012884638272225857,
-0.045355889946222305,
0.02677827700972557,
-0.08733543753623962,
0.019863927736878395,
0.17189353704452515,
0.010418474674224854,
0.0580812506377697,
-0.05133241042494774,
-0.007831425406038761,
-0.028828436508774757,
0.02911374345421791,
0.03416374325752258,
0.013899249024689198,
-0.09203455597162247,
0.02885252796113491,
0.10847997665405273,
0.04832327365875244,
-0.011935041286051273,
-0.08202286064624786,
-0.09954844415187836,
0.008348752744495869,
-0.013130939565598965,
0.06269706785678864,
-0.09948550164699554,
0.007098850328475237,
-0.01679130271077156,
-0.03110051527619362,
-0.015756746754050255,
-0.01456936914473772,
-0.0327245257794857,
-0.03610280156135559,
-0.05522780120372772,
0.1165882870554924,
-0.15004169940948486,
-0.013761614449322224,
0.0751660019159317,
-0.0733872726559639,
0.08595703542232513,
0.02132713608443737,
-0.030136266723275185,
0.03429597243666649,
-0.08157827705144882,
0.01270477008074522,
-0.0034716518130153418,
0.008839822374284267,
-0.01750846393406391,
-0.13652749359607697,
-0.004687616601586342,
-0.043224357068538666,
0.02505425177514553,
0.025464575737714767,
0.03741993010044098,
-0.07978299260139465,
0.04711253568530083,
-0.061483703553676605,
-0.0729781910777092,
-0.06911966949701309,
0.046903692185878754,
0.0826137363910675,
0.029087429866194725,
0.030390499159693718,
-0.05579764023423195,
0.04218113049864769,
-0.10185296088457108,
-0.022942326962947845,
-0.005039137788116932,
-0.00039637641748413444,
-0.051322974264621735,
-0.05430928245186806,
0.024648800492286682,
0.003563201054930687,
0.07888409495353699,
-0.04037798196077347,
0.014186685904860497,
0.04785159230232239,
0.06259483844041824,
-0.14191488921642303,
0.03261267766356468,
0.038506247103214264,
0.0052633206360042095,
-0.016900362446904182,
0.09688809514045715,
-0.03955044969916344,
-0.09239718317985535,
0.04673381894826889,
0.12243805825710297,
0.13959935307502747,
-0.019737808033823967,
0.013026307336986065,
0.04717601090669632,
-0.01413894072175026,
-0.09340593963861465,
0.06303790956735611,
-0.12183699011802673,
-0.01456400379538536,
-0.07253776490688324,
0.05185282602906227,
0.15007877349853516,
-0.1035812571644783,
0.09167846292257309,
0.028606945648789406,
-0.08962274342775345,
-0.0817687138915062,
-0.2023337185382843,
-0.06945968419313431,
0.009976815432310104,
-0.010132457129657269,
-0.0760548934340477,
0.029628634452819824,
0.12885016202926636,
0.060730017721652985,
-0.015528852120041847,
0.17506448924541473,
-0.11691482365131378,
-0.09498825669288635,
0.06851377338171005,
0.025491071864962578,
0.005815028212964535,
0.01850636675953865,
0.05825190991163254,
0.009396015666425228,
0.1186358705163002,
0.06744743138551712,
0.04627256095409393,
0.02978089079260826,
0.04584704712033272,
-0.05104247108101845,
-0.06958014518022537,
0.014271793887019157,
-0.00044327686191536486,
-0.01089959405362606,
0.15671215951442719,
0.040033236145973206,
-0.01976224035024643,
-0.030592622235417366,
0.18578550219535828,
-0.019874652847647667,
-0.04486460983753204,
-0.17683841288089752,
0.17026959359645844,
-0.011057213880121708,
-0.022877627983689308,
0.042485445737838745,
-0.10365355759859085,
-0.025111693888902664,
0.1820993274450302,
0.13946665823459625,
0.03947797045111656,
0.020985357463359833,
-0.004026366397738457,
0.012042108923196793,
0.026888806372880936,
0.0935586616396904,
-0.03440430015325546,
0.1734902560710907,
-0.04811569303274155,
0.07748356461524963,
-0.05843082070350647,
-0.0461549386382103,
-0.027310645207762718,
0.03502821922302246,
-0.02072230353951454,
0.03568680211901665,
-0.06588692963123322,
0.13708311319351196,
-0.13038577139377594,
-0.2490813285112381,
0.03708214312791824,
-0.062247976660728455,
-0.10469039529561996,
-0.020130496472120285,
-0.02060474082827568,
0.007461546920239925,
0.02430650033056736,
0.02928752824664116,
0.010337810032069683,
0.07972373068332672,
0.05215010419487953,
-0.05975251644849777,
-0.08043240010738373,
0.09104550629854202,
0.017647868022322655,
0.1403128206729889,
0.018957749009132385,
0.038456130772829056,
0.07616768777370453,
0.00722442427650094,
-0.073493093252182,
0.03255048021674156,
0.012592324987053871,
0.013780524954199791,
-0.028672348707914352,
0.1655324548482895,
0.012816915288567543,
0.127691850066185,
0.06047425419092178,
-0.11229656636714935,
0.058564409613609314,
-0.012607919052243233,
0.045843254774808884,
-0.1496419906616211,
0.11098412424325943,
-0.12025467306375504,
0.1323038786649704,
0.1681523472070694,
-0.008367706090211868,
-0.006510627921670675,
-0.019115407019853592,
0.0008513148059137166,
-0.020488504320383072,
0.04615945369005203,
-0.039818160235881805,
-0.17241542041301727,
0.048881594091653824,
-0.12822183966636658,
0.09640967100858688,
-0.22861818969249725,
-0.01988135650753975,
0.012569054029881954,
0.01054191030561924,
-0.054355643689632416,
0.1294509768486023,
-0.044714320451021194,
0.027096860110759735,
-0.011155776679515839,
-0.29158148169517517,
0.019265536218881607,
0.08479111641645432,
-0.10263243317604065,
-0.045524243265390396
] |
null | null |
transformers
|
# WangchanBERTa base model: `wangchanberta-base-wiki-spm`
<br>
Pretrained RoBERTa BASE model on Thai Wikipedia corpus.
The script and documentation can be found at [this reposiryory](https://github.com/vistec-AI/thai2transformers).
<br>
## Model description
<br>
The architecture of the pretrained model is based on RoBERTa [[Liu et al., 2019]](https://arxiv.org/abs/1907.11692).
<br>
## Intended uses & limitations
<br>
You can use the pretrained model for masked language modeling (i.e. predicting a mask token in the input text). In addition, we also provide finetuned models for multiclass/multilabel text classification and token classification task.
<br>
**Multiclass text classification**
- `wisesight_sentiment`
4-class text classification task (`positive`, `neutral`, `negative`, and `question`) based on social media posts and tweets.
- `wongnai_reivews`
Users' review rating classification task (scale is ranging from 1 to 5)
- `generated_reviews_enth` : (`review_star` as label)
Generated users' review rating classification task (scale is ranging from 1 to 5).
**Multilabel text classification**
- `prachathai67k`
Thai topic classification with 12 labels based on news article corpus from prachathai.com. The detail is described in this [page](https://huggingface.co/datasets/prachathai67k).
**Token classification**
- `thainer`
Named-entity recognition tagging with 13 named-entities as descibed in this [page](https://huggingface.co/datasets/thainer).
- `lst20` : NER NER and POS tagging
Named-entity recognition tagging with 10 named-entities and Part-of-Speech tagging with 16 tags as descibed in this [page](https://huggingface.co/datasets/lst20).
<br>
## How to use
<br>
The getting started notebook of WangchanBERTa model can be found at this [Colab notebook](https://colab.research.google.com/drive/1Kbk6sBspZLwcnOE61adAQo30xxqOQ9ko)
<br>
## Training data
`wangchanberta-base-wiki-spm` model was pretrained on Thai Wikipedia. Specifically, we use the Wikipedia dump articles on 20 August 2020 (dumps.wikimedia.org/thwiki/20200820/). We opt out lists, and tables.
### Preprocessing
Texts are preprocessed with the following rules:
- Replace non-breaking space, zero-width non-breaking space, and soft hyphen with spaces.
- Remove an empty parenthesis that occur right after the title of the first paragraph.
- Replace spaces wtth <_>.
<br>
Regarding the vocabulary, we use subword token trained with [SentencePice](https://github.com/google/sentencepiece) library on the training set of Thai Wikipedia corpus. The total number of subword tokens is 24,000.
We sample sentences contigously to have the length of at most 512 tokens. For some sentences that overlap the boundary of 512 tokens, we split such sentence with an additional token as document separator. This is the same approach as proposed by [[Liu et al., 2019]](https://arxiv.org/abs/1907.11692) (called "FULL-SENTENCES").
Regarding the masking procedure, for each sequence, we sampled 15% of the tokens and replace them with<mask>token.Out of the 15%, 80% is replaced with a<mask>token, 10% is left unchanged and 10% is replaced with a random token.
<br>
**Train/Val/Test splits**
We split sequencially 944,782 sentences for training set, 24,863 sentences for validation set and 24,862 sentences for test set.
<br>
**Pretraining**
The model was trained on 32 V100 GPUs for 31,250 steps with the batch size of 8,192 (16 sequences per device with 16 accumulation steps) and a sequence length of 512 tokens. The optimizer we used is Adam with the learning rate of $7e-4$, $\beta_1 = 0.9$, $\beta_2= 0.98$ and $\epsilon = 1e-6$. The learning rate is warmed up for the first 1250 steps and linearly decayed to zero. The model checkpoint with minimum validation loss will be selected as the best model checkpoint.
<br>
**BibTeX entry and citation info**
```
@misc{lowphansirikul2021wangchanberta,
title={WangchanBERTa: Pretraining transformer-based Thai Language Models},
author={Lalita Lowphansirikul and Charin Polpanumas and Nawat Jantrakulchai and Sarana Nutanong},
year={2021},
eprint={2101.09635},
archivePrefix={arXiv},
primaryClass={cs.CL}
}
```
|
{"language": "th"}
|
fill-mask
|
airesearch/wangchanberta-base-wiki-spm
|
[
"transformers",
"pytorch",
"jax",
"roberta",
"fill-mask",
"th",
"arxiv:1907.11692",
"arxiv:2101.09635",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[
"1907.11692",
"2101.09635"
] |
[
"th"
] |
TAGS
#transformers #pytorch #jax #roberta #fill-mask #th #arxiv-1907.11692 #arxiv-2101.09635 #autotrain_compatible #endpoints_compatible #region-us
|
# WangchanBERTa base model: 'wangchanberta-base-wiki-spm'
<br>
Pretrained RoBERTa BASE model on Thai Wikipedia corpus.
The script and documentation can be found at this reposiryory.
<br>
## Model description
<br>
The architecture of the pretrained model is based on RoBERTa [[Liu et al., 2019]](URL
<br>
## Intended uses & limitations
<br>
You can use the pretrained model for masked language modeling (i.e. predicting a mask token in the input text). In addition, we also provide finetuned models for multiclass/multilabel text classification and token classification task.
<br>
Multiclass text classification
- 'wisesight_sentiment'
4-class text classification task ('positive', 'neutral', 'negative', and 'question') based on social media posts and tweets.
- 'wongnai_reivews'
Users' review rating classification task (scale is ranging from 1 to 5)
- 'generated_reviews_enth' : ('review_star' as label)
Generated users' review rating classification task (scale is ranging from 1 to 5).
Multilabel text classification
- 'prachathai67k'
Thai topic classification with 12 labels based on news article corpus from URL. The detail is described in this page.
Token classification
- 'thainer'
Named-entity recognition tagging with 13 named-entities as descibed in this page.
- 'lst20' : NER NER and POS tagging
Named-entity recognition tagging with 10 named-entities and Part-of-Speech tagging with 16 tags as descibed in this page.
<br>
## How to use
<br>
The getting started notebook of WangchanBERTa model can be found at this Colab notebook
<br>
## Training data
'wangchanberta-base-wiki-spm' model was pretrained on Thai Wikipedia. Specifically, we use the Wikipedia dump articles on 20 August 2020 (URL We opt out lists, and tables.
### Preprocessing
Texts are preprocessed with the following rules:
- Replace non-breaking space, zero-width non-breaking space, and soft hyphen with spaces.
- Remove an empty parenthesis that occur right after the title of the first paragraph.
- Replace spaces wtth <_>.
<br>
Regarding the vocabulary, we use subword token trained with SentencePice library on the training set of Thai Wikipedia corpus. The total number of subword tokens is 24,000.
We sample sentences contigously to have the length of at most 512 tokens. For some sentences that overlap the boundary of 512 tokens, we split such sentence with an additional token as document separator. This is the same approach as proposed by [[Liu et al., 2019]](URL (called "FULL-SENTENCES").
Regarding the masking procedure, for each sequence, we sampled 15% of the tokens and replace them with<mask>token.Out of the 15%, 80% is replaced with a<mask>token, 10% is left unchanged and 10% is replaced with a random token.
<br>
Train/Val/Test splits
We split sequencially 944,782 sentences for training set, 24,863 sentences for validation set and 24,862 sentences for test set.
<br>
Pretraining
The model was trained on 32 V100 GPUs for 31,250 steps with the batch size of 8,192 (16 sequences per device with 16 accumulation steps) and a sequence length of 512 tokens. The optimizer we used is Adam with the learning rate of $7e-4$, $\beta_1 = 0.9$, $\beta_2= 0.98$ and $\epsilon = 1e-6$. The learning rate is warmed up for the first 1250 steps and linearly decayed to zero. The model checkpoint with minimum validation loss will be selected as the best model checkpoint.
<br>
BibTeX entry and citation info
|
[
"# WangchanBERTa base model: 'wangchanberta-base-wiki-spm'\n\n<br>\n\nPretrained RoBERTa BASE model on Thai Wikipedia corpus.\nThe script and documentation can be found at this reposiryory.\n<br>",
"## Model description\n\n<br>\n\nThe architecture of the pretrained model is based on RoBERTa [[Liu et al., 2019]](URL \n\n<br>",
"## Intended uses & limitations\n\n<br>\n\nYou can use the pretrained model for masked language modeling (i.e. predicting a mask token in the input text). In addition, we also provide finetuned models for multiclass/multilabel text classification and token classification task.\n\n<br>\n\nMulticlass text classification\n\n\n- 'wisesight_sentiment' \n \n 4-class text classification task ('positive', 'neutral', 'negative', and 'question') based on social media posts and tweets.\n\n- 'wongnai_reivews' \n\n Users' review rating classification task (scale is ranging from 1 to 5)\n\n- 'generated_reviews_enth' : ('review_star' as label)\n\n Generated users' review rating classification task (scale is ranging from 1 to 5).\n\nMultilabel text classification\n\n- 'prachathai67k'\n\n Thai topic classification with 12 labels based on news article corpus from URL. The detail is described in this page.\n \n\n\n\nToken classification\n\n- 'thainer' \n\n Named-entity recognition tagging with 13 named-entities as descibed in this page.\n\n- 'lst20' : NER NER and POS tagging\n \n Named-entity recognition tagging with 10 named-entities and Part-of-Speech tagging with 16 tags as descibed in this page.\n\n<br>",
"## How to use\n\n<br>\n\nThe getting started notebook of WangchanBERTa model can be found at this Colab notebook\n\n<br>",
"## Training data\n\n'wangchanberta-base-wiki-spm' model was pretrained on Thai Wikipedia. Specifically, we use the Wikipedia dump articles on 20 August 2020 (URL We opt out lists, and tables.",
"### Preprocessing\n\nTexts are preprocessed with the following rules:\n\n- Replace non-breaking space, zero-width non-breaking space, and soft hyphen with spaces.\n- Remove an empty parenthesis that occur right after the title of the first paragraph.\n- Replace spaces wtth <_>.\n\n<br>\n\n\nRegarding the vocabulary, we use subword token trained with SentencePice library on the training set of Thai Wikipedia corpus. The total number of subword tokens is 24,000. \n\n\nWe sample sentences contigously to have the length of at most 512 tokens. For some sentences that overlap the boundary of 512 tokens, we split such sentence with an additional token as document separator. This is the same approach as proposed by [[Liu et al., 2019]](URL (called \"FULL-SENTENCES\"). \n\nRegarding the masking procedure, for each sequence, we sampled 15% of the tokens and replace them with<mask>token.Out of the 15%, 80% is replaced with a<mask>token, 10% is left unchanged and 10% is replaced with a random token.\n\n<br>\n\nTrain/Val/Test splits\n\nWe split sequencially 944,782 sentences for training set, 24,863 sentences for validation set and 24,862 sentences for test set.\n\n<br>\n\nPretraining\n\nThe model was trained on 32 V100 GPUs for 31,250 steps with the batch size of 8,192 (16 sequences per device with 16 accumulation steps) and a sequence length of 512 tokens. The optimizer we used is Adam with the learning rate of $7e-4$, $\\beta_1 = 0.9$, $\\beta_2= 0.98$ and $\\epsilon = 1e-6$. The learning rate is warmed up for the first 1250 steps and linearly decayed to zero. The model checkpoint with minimum validation loss will be selected as the best model checkpoint. \n\n<br>\n\nBibTeX entry and citation info"
] |
[
"TAGS\n#transformers #pytorch #jax #roberta #fill-mask #th #arxiv-1907.11692 #arxiv-2101.09635 #autotrain_compatible #endpoints_compatible #region-us \n",
"# WangchanBERTa base model: 'wangchanberta-base-wiki-spm'\n\n<br>\n\nPretrained RoBERTa BASE model on Thai Wikipedia corpus.\nThe script and documentation can be found at this reposiryory.\n<br>",
"## Model description\n\n<br>\n\nThe architecture of the pretrained model is based on RoBERTa [[Liu et al., 2019]](URL \n\n<br>",
"## Intended uses & limitations\n\n<br>\n\nYou can use the pretrained model for masked language modeling (i.e. predicting a mask token in the input text). In addition, we also provide finetuned models for multiclass/multilabel text classification and token classification task.\n\n<br>\n\nMulticlass text classification\n\n\n- 'wisesight_sentiment' \n \n 4-class text classification task ('positive', 'neutral', 'negative', and 'question') based on social media posts and tweets.\n\n- 'wongnai_reivews' \n\n Users' review rating classification task (scale is ranging from 1 to 5)\n\n- 'generated_reviews_enth' : ('review_star' as label)\n\n Generated users' review rating classification task (scale is ranging from 1 to 5).\n\nMultilabel text classification\n\n- 'prachathai67k'\n\n Thai topic classification with 12 labels based on news article corpus from URL. The detail is described in this page.\n \n\n\n\nToken classification\n\n- 'thainer' \n\n Named-entity recognition tagging with 13 named-entities as descibed in this page.\n\n- 'lst20' : NER NER and POS tagging\n \n Named-entity recognition tagging with 10 named-entities and Part-of-Speech tagging with 16 tags as descibed in this page.\n\n<br>",
"## How to use\n\n<br>\n\nThe getting started notebook of WangchanBERTa model can be found at this Colab notebook\n\n<br>",
"## Training data\n\n'wangchanberta-base-wiki-spm' model was pretrained on Thai Wikipedia. Specifically, we use the Wikipedia dump articles on 20 August 2020 (URL We opt out lists, and tables.",
"### Preprocessing\n\nTexts are preprocessed with the following rules:\n\n- Replace non-breaking space, zero-width non-breaking space, and soft hyphen with spaces.\n- Remove an empty parenthesis that occur right after the title of the first paragraph.\n- Replace spaces wtth <_>.\n\n<br>\n\n\nRegarding the vocabulary, we use subword token trained with SentencePice library on the training set of Thai Wikipedia corpus. The total number of subword tokens is 24,000. \n\n\nWe sample sentences contigously to have the length of at most 512 tokens. For some sentences that overlap the boundary of 512 tokens, we split such sentence with an additional token as document separator. This is the same approach as proposed by [[Liu et al., 2019]](URL (called \"FULL-SENTENCES\"). \n\nRegarding the masking procedure, for each sequence, we sampled 15% of the tokens and replace them with<mask>token.Out of the 15%, 80% is replaced with a<mask>token, 10% is left unchanged and 10% is replaced with a random token.\n\n<br>\n\nTrain/Val/Test splits\n\nWe split sequencially 944,782 sentences for training set, 24,863 sentences for validation set and 24,862 sentences for test set.\n\n<br>\n\nPretraining\n\nThe model was trained on 32 V100 GPUs for 31,250 steps with the batch size of 8,192 (16 sequences per device with 16 accumulation steps) and a sequence length of 512 tokens. The optimizer we used is Adam with the learning rate of $7e-4$, $\\beta_1 = 0.9$, $\\beta_2= 0.98$ and $\\epsilon = 1e-6$. The learning rate is warmed up for the first 1250 steps and linearly decayed to zero. The model checkpoint with minimum validation loss will be selected as the best model checkpoint. \n\n<br>\n\nBibTeX entry and citation info"
] |
[
59,
55,
35,
312,
28,
51,
456
] |
[
"passage: TAGS\n#transformers #pytorch #jax #roberta #fill-mask #th #arxiv-1907.11692 #arxiv-2101.09635 #autotrain_compatible #endpoints_compatible #region-us \n# WangchanBERTa base model: 'wangchanberta-base-wiki-spm'\n\n<br>\n\nPretrained RoBERTa BASE model on Thai Wikipedia corpus.\nThe script and documentation can be found at this reposiryory.\n<br>## Model description\n\n<br>\n\nThe architecture of the pretrained model is based on RoBERTa [[Liu et al., 2019]](URL \n\n<br>## Intended uses & limitations\n\n<br>\n\nYou can use the pretrained model for masked language modeling (i.e. predicting a mask token in the input text). In addition, we also provide finetuned models for multiclass/multilabel text classification and token classification task.\n\n<br>\n\nMulticlass text classification\n\n\n- 'wisesight_sentiment' \n \n 4-class text classification task ('positive', 'neutral', 'negative', and 'question') based on social media posts and tweets.\n\n- 'wongnai_reivews' \n\n Users' review rating classification task (scale is ranging from 1 to 5)\n\n- 'generated_reviews_enth' : ('review_star' as label)\n\n Generated users' review rating classification task (scale is ranging from 1 to 5).\n\nMultilabel text classification\n\n- 'prachathai67k'\n\n Thai topic classification with 12 labels based on news article corpus from URL. The detail is described in this page.\n \n\n\n\nToken classification\n\n- 'thainer' \n\n Named-entity recognition tagging with 13 named-entities as descibed in this page.\n\n- 'lst20' : NER NER and POS tagging\n \n Named-entity recognition tagging with 10 named-entities and Part-of-Speech tagging with 16 tags as descibed in this page.\n\n<br>## How to use\n\n<br>\n\nThe getting started notebook of WangchanBERTa model can be found at this Colab notebook\n\n<br>"
] |
[
-0.002432781271636486,
0.0388580858707428,
-0.005465319845825434,
0.02422945387661457,
0.11176861822605133,
0.016575314104557037,
0.14456512033939362,
0.06088431552052498,
0.02562941238284111,
0.1113348975777626,
-0.06156223639845848,
0.009211019612848759,
0.09428925812244415,
0.12436269223690033,
0.05130400136113167,
-0.20758630335330963,
0.0561528205871582,
-0.05064935237169266,
0.020189177244901657,
0.08171050995588303,
0.08263735473155975,
-0.0858171358704567,
0.08752664923667908,
0.05931726470589638,
-0.06434696912765503,
0.02157776989042759,
-0.01740049198269844,
-0.07836590707302094,
0.05765003710985184,
0.03417077288031578,
0.13142216205596924,
0.011180819943547249,
0.03581084683537483,
-0.14948004484176636,
0.013124417513608932,
0.05445173382759094,
0.022427931427955627,
0.03119599260389805,
0.12189175933599472,
-0.06658590584993362,
0.11901094019412994,
-0.07706614583730698,
0.11062440276145935,
0.06026074290275574,
-0.09531218558549881,
-0.033311352133750916,
-0.11438308656215668,
0.14981795847415924,
0.10931228846311569,
0.04772539436817169,
-0.046797338873147964,
0.09636201709508896,
-0.08145943284034729,
0.037364985793828964,
0.026617566123604774,
-0.2536850571632385,
-0.03770781308412552,
0.09938135743141174,
0.00019507590332068503,
0.031163258478045464,
-0.09550420939922333,
0.0009934910340234637,
-0.006157504394650459,
0.020671522244811058,
0.016756543889641762,
-0.05513102188706398,
-0.020402850583195686,
-0.06653732061386108,
-0.09912525862455368,
-0.000891237985342741,
0.11840580403804779,
0.024864396080374718,
-0.08224142342805862,
-0.14379900693893433,
-0.033922143280506134,
0.03105916827917099,
-0.0040940833278000355,
-0.019590463489294052,
-0.019707849249243736,
0.015022716484963894,
0.07180792093276978,
-0.07715325057506561,
-0.0928366556763649,
-0.00953724980354309,
-0.030752796679735184,
0.0344097800552845,
0.03035564161837101,
0.0011441014939919114,
-0.02693701907992363,
0.08473112434148788,
-0.002627914072945714,
-0.03417350351810455,
-0.004772588144987822,
-0.04682890698313713,
-0.19276981055736542,
-0.04072067514061928,
0.03753792867064476,
-0.11588884145021439,
-0.005391092970967293,
0.15534380078315735,
-0.04015222564339638,
0.11198769509792328,
-0.0930492952466011,
0.0050007314421236515,
0.06446580588817596,
0.17064154148101807,
-0.07010617107152939,
-0.026206713169813156,
0.01897016540169716,
-0.03517685830593109,
0.005336333531886339,
-0.01396103110164404,
-0.0018009282648563385,
-0.04423652961850166,
0.06359206140041351,
0.03609735518693924,
0.0016843926860019565,
0.0907350555062294,
-0.05713437870144844,
-0.0711648166179657,
0.20491740107536316,
-0.12322277575731277,
-0.008417925797402859,
0.039769794791936874,
-0.03350653871893883,
0.04149608314037323,
0.05717528238892555,
0.001393771031871438,
-0.07000932097434998,
0.09163025766611099,
-0.08756542950868607,
-0.010568141005933285,
-0.031170949339866638,
-0.08662699162960052,
0.015468337573111057,
-0.09536147862672806,
-0.10695162415504456,
-0.05233597382903099,
-0.11482233554124832,
-0.061426881700754166,
0.006791973952203989,
-0.033276863396167755,
-0.017592914402484894,
0.0006825926247984171,
0.02999979443848133,
-0.007321631535887718,
0.0331503301858902,
-0.04585978388786316,
0.004741207230836153,
0.01134053710848093,
-0.04116101935505867,
0.059269119054079056,
0.026912136003375053,
0.038730472326278687,
-0.09858060628175735,
0.06114925444126129,
-0.35438594222068787,
0.14267542958259583,
-0.0831848680973053,
0.05180908739566803,
-0.136858731508255,
0.005460123065859079,
-0.009424404241144657,
-0.012119323946535587,
-0.028950724750757217,
0.09803042560815811,
-0.01278767827898264,
-0.04026639088988304,
0.16976401209831238,
-0.1346268206834793,
-0.03339512646198273,
0.10970131307840347,
-0.03470807522535324,
0.06856591999530792,
0.10183117538690567,
0.16916699707508087,
0.16095788776874542,
-0.026127789169549942,
-0.040275320410728455,
-0.019997144117951393,
-0.0546194352209568,
0.11256148666143417,
0.06316012889146805,
-0.041642189025878906,
-0.03216277062892914,
0.01782972179353237,
-0.05037596821784973,
-0.0007999236695468426,
0.017534291371703148,
-0.020154757425189018,
0.019544826820492744,
-0.023958992213010788,
0.02114788442850113,
-0.04663184657692909,
0.04261910542845726,
0.02556067332625389,
-0.0466168150305748,
0.05842356011271477,
0.07694218307733536,
-0.05113140121102333,
0.02249336987733841,
-0.13249865174293518,
-0.013433934189379215,
-0.02609860524535179,
-0.00540904002264142,
-0.2048010379076004,
-0.06697121262550354,
0.07542399317026138,
-0.09617505222558975,
0.14702394604682922,
-0.009299022145569324,
-0.009836794808506966,
0.007228457368910313,
-0.04697705805301666,
-0.014889799058437347,
0.022507375106215477,
-0.04792686924338341,
-0.010095236822962761,
-0.10843441635370255,
-0.016913557425141335,
-0.022383473813533783,
0.1898827701807022,
-0.13706277310848236,
0.03575998172163963,
0.03972138464450836,
0.08318980783224106,
0.030010150745511055,
-0.026824796572327614,
0.09193991869688034,
0.01392784621566534,
0.025175191462039948,
-0.032719213515520096,
0.0009691946324892342,
0.018450865522027016,
-0.0811811089515686,
0.0805220678448677,
-0.12489966303110123,
-0.15458811819553375,
0.024468176066875458,
0.051203060895204544,
-0.10992242395877838,
0.058675315231084824,
-0.005538146011531353,
-0.05390216410160065,
-0.11359623819589615,
0.01359496358782053,
0.14848032593727112,
0.03232443705201149,
0.09914422780275345,
-0.04912690818309784,
-0.003051148261874914,
-0.02967008762061596,
-0.019277388229966164,
0.0044881729409098625,
0.08976171910762787,
0.0264759324491024,
-0.2951207756996155,
0.06187151372432709,
-0.05237848684191704,
0.009998462162911892,
0.12445637583732605,
0.023724116384983063,
-0.031466249376535416,
-0.02860584482550621,
0.08053351938724518,
0.0043428558856248856,
-0.012505139224231243,
-0.0723479688167572,
-0.005468070041388273,
0.021756712347269058,
0.01680797152221203,
0.005880939774215221,
-0.05430583655834198,
0.051342952996492386,
-0.017294785007834435,
-0.023479443043470383,
-0.029319392517209053,
0.02836267650127411,
0.05816097557544708,
0.0699920654296875,
0.05572493001818657,
0.10830084979534149,
-0.0187241118401289,
-0.020732644945383072,
-0.14219601452350616,
0.1363212764263153,
-0.09937839955091476,
-0.3061993420124054,
-0.15001794695854187,
0.020978527143597603,
-0.060124821960926056,
-0.013043595477938652,
-0.025446703657507896,
-0.044744592159986496,
-0.06759735196828842,
-0.051687516272068024,
0.12139645218849182,
-0.049576543271541595,
-0.07206664234399796,
-0.03967834264039993,
0.008632331155240536,
0.018449606373906136,
-0.06913471966981888,
0.016177061945199966,
0.012096862308681011,
-0.11313194781541824,
0.019440099596977234,
-0.048620495945215225,
0.016129281371831894,
0.13176628947257996,
-0.02239975705742836,
-0.021390048786997795,
-0.029541006311774254,
0.14839325845241547,
-0.0883542150259018,
0.0997188612818718,
0.0732111781835556,
-0.05085436999797821,
0.05860529839992523,
0.13534142076969147,
0.016756553202867508,
-0.02270152047276497,
0.06650897115468979,
0.09425491094589233,
-0.026429302990436554,
-0.29232361912727356,
-0.07035943120718002,
-0.03667962923645973,
0.04101995378732681,
0.06665276736021042,
0.0614679791033268,
0.0919826552271843,
0.04200366884469986,
-0.0661754310131073,
0.07457751780748367,
0.07848396897315979,
0.07107194513082504,
0.0865941047668457,
0.04053793475031853,
0.05912310257554054,
-0.1304839849472046,
-0.024150587618350983,
0.0750289261341095,
0.03373662754893303,
0.1681932657957077,
0.03487161546945572,
0.13005970418453217,
0.08061815798282623,
0.058103691786527634,
0.08407805114984512,
0.02801807038486004,
-0.0374176986515522,
0.03025449812412262,
-0.00736400717869401,
-0.036684490740299225,
-0.008894656784832478,
0.029246976599097252,
0.09991685301065445,
-0.04945538565516472,
0.052018072456121445,
-0.02638574130833149,
0.05190490931272507,
0.22672493755817413,
0.016034431755542755,
-0.06726359575986862,
-0.06074146181344986,
0.05402800440788269,
-0.055996306240558624,
-0.080845408141613,
-0.010223866440355778,
0.03695856034755707,
-0.17290447652339935,
0.13911016285419464,
-0.02251030132174492,
0.10673554986715317,
-0.1493178904056549,
-0.04945507273077965,
-0.04107852280139923,
-0.006610389798879623,
-0.007624363526701927,
0.061001334339380264,
-0.11812175810337067,
0.10701631754636765,
0.03326275944709778,
0.025855977088212967,
-0.042936790734529495,
0.02516716532409191,
0.012659001164138317,
0.06527649611234665,
0.11431810259819031,
-0.003599901683628559,
-0.029786579310894012,
-0.0605970099568367,
-0.057957932353019714,
-0.02008906565606594,
0.07830721139907837,
-0.09857559204101562,
0.0547725111246109,
-0.01275255624204874,
-0.011266084387898445,
-0.052782997488975525,
-0.05555138364434242,
-0.1648770272731781,
-0.13225364685058594,
0.027546489611268044,
-0.05696726217865944,
-0.035330742597579956,
-0.002666651038452983,
-0.010836424306035042,
-0.047679584473371506,
0.17912954092025757,
-0.10814177244901657,
-0.06581277400255203,
-0.10531970858573914,
-0.00836978666484356,
0.07802923023700714,
-0.08394301682710648,
0.03433581814169884,
-0.07569108158349991,
0.07979436218738556,
-0.030913295224308968,
-0.06187016889452934,
0.03837857022881508,
-0.01126114185899496,
-0.09946518391370773,
0.01435021124780178,
0.08822428435087204,
0.12029194086790085,
0.03219829872250557,
0.00012247958511579782,
0.018916411325335503,
0.06851419061422348,
-0.11371359974145889,
-0.04861271753907204,
0.11166936904191971,
-0.040927715599536896,
0.12742066383361816,
-0.06428389251232147,
-0.22374436259269714,
-0.10960029065608978,
-0.026720277965068817,
0.08179239928722382,
0.12565791606903076,
-0.045966800302267075,
0.12179321050643921,
0.15258239209651947,
-0.07648511230945587,
-0.18104077875614166,
0.0026486096903681755,
0.024247342720627785,
0.02966397814452648,
0.033573318272829056,
-0.19214962422847748,
0.1084684431552887,
0.04005112871527672,
-0.020160377025604248,
-0.06577129662036896,
-0.21962064504623413,
-0.12564052641391754,
0.046163588762283325,
0.021853577345609665,
-0.09007427096366882,
-0.11863435059785843,
-0.08322323858737946,
-0.026134006679058075,
-0.050519511103630066,
0.14434239268302917,
-0.019046280533075333,
0.06376288086175919,
0.02631845884025097,
0.02319403551518917,
0.009689039550721645,
-0.0168757326900959,
0.09984379261732101,
0.033769816160202026,
0.024185622110962868,
-0.044551119208335876,
-0.03621751815080643,
0.16747258603572845,
-0.02378014661371708,
0.14721165597438812,
0.06882230937480927,
0.022853385657072067,
-0.1252509504556656,
-0.06066746264696121,
-0.07056934386491776,
0.01988268829882145,
-0.041419629007577896,
-0.013738147914409637,
-0.1056574359536171,
0.0958729237318039,
0.02494218572974205,
-0.002922388259321451,
0.10878535360097885,
-0.06599204242229462,
0.06231136992573738,
0.03317902237176895,
0.11841918528079987,
0.16049320995807648,
-0.07388700544834137,
-0.06306740641593933,
-0.02336294576525688,
0.0385911799967289,
-0.16857793927192688,
0.061068400740623474,
0.06792277842760086,
0.04138602316379547,
0.1546405404806137,
-0.0169718936085701,
-0.14247801899909973,
0.03110538050532341,
0.07583173364400864,
-0.10008443146944046,
-0.16201123595237732,
0.032954007387161255,
0.04715748131275177,
-0.04827447980642319,
0.021605776622891426,
0.11115852743387222,
-0.023279793560504913,
-0.05518170818686485,
0.050669051706790924,
0.03168318048119545,
-0.04173702746629715,
0.06609906256198883,
0.008995047770440578,
0.04813344404101372,
-0.06812627613544464,
0.1575130969285965,
0.21163074672222137,
-0.07104157656431198,
-0.027660958468914032,
0.15984681248664856,
-0.13465216755867004,
-0.04175029695034027,
-0.10085480660200119,
0.1033528670668602,
0.014843221753835678,
-0.0442548468708992,
0.026583263650536537,
-0.08779893070459366,
0.020698601379990578,
0.17018859088420868,
0.010477129369974136,
0.05839983746409416,
-0.0509553961455822,
-0.007950189523398876,
-0.02786201797425747,
0.029555635526776314,
0.03467381373047829,
0.013706857338547707,
-0.09210868924856186,
0.02938441000878811,
0.10861214995384216,
0.047883134335279465,
-0.012151244096457958,
-0.0827011913061142,
-0.09900178015232086,
0.00900203362107277,
-0.014333846047520638,
0.06231727823615074,
-0.1005590409040451,
0.006759109906852245,
-0.0172409787774086,
-0.030749794095754623,
-0.016199199482798576,
-0.014890952035784721,
-0.03317820653319359,
-0.03657692298293114,
-0.05596272274851799,
0.11662230640649796,
-0.1495581418275833,
-0.013592967763543129,
0.07584821432828903,
-0.07378172129392624,
0.08585652709007263,
0.020424174144864082,
-0.03098677471280098,
0.03467978909611702,
-0.07960852235555649,
0.01173893641680479,
-0.0034964645747095346,
0.009357591159641743,
-0.01795969530940056,
-0.137202650308609,
-0.005240103229880333,
-0.04282256215810776,
0.025110164657235146,
0.026341477409005165,
0.038279302418231964,
-0.08028187602758408,
0.047319937497377396,
-0.0609143041074276,
-0.07308785617351532,
-0.069380022585392,
0.046146031469106674,
0.0823395624756813,
0.028393246233463287,
0.030569372698664665,
-0.05579857528209686,
0.04203042760491371,
-0.10319875925779343,
-0.023208174854516983,
-0.005364404525607824,
-0.000676233961712569,
-0.05009107291698456,
-0.05357961729168892,
0.02510547637939453,
0.0034558954648673534,
0.0795479416847229,
-0.040869634598493576,
0.012265280820429325,
0.04863648861646652,
0.06384024024009705,
-0.14192526042461395,
0.03284560143947601,
0.04044349864125252,
0.005025007296353579,
-0.016866439953446388,
0.09723979234695435,
-0.039507944136857986,
-0.09302858263254166,
0.046763259917497635,
0.1228809505701065,
0.1404767781496048,
-0.017261259257793427,
0.012582712806761265,
0.04667821153998375,
-0.012616786174476147,
-0.09576187282800674,
0.06395361572504044,
-0.12230582535266876,
-0.013421349227428436,
-0.07222160696983337,
0.05242703855037689,
0.14995405077934265,
-0.10396242886781693,
0.09196167439222336,
0.02734057419002056,
-0.08948680013418198,
-0.08184154331684113,
-0.20289622247219086,
-0.06964754313230515,
0.009668850339949131,
-0.010342473164200783,
-0.07581427693367004,
0.028743533417582512,
0.12974148988723755,
0.06047363579273224,
-0.01550603099167347,
0.17480704188346863,
-0.11752130091190338,
-0.09440728276968002,
0.06820350885391235,
0.02544671855866909,
0.0056845564395189285,
0.018867628648877144,
0.058030519634485245,
0.008963095024228096,
0.11926981806755066,
0.06724041700363159,
0.04687301814556122,
0.027885382995009422,
0.045684222131967545,
-0.051063407212495804,
-0.06981869786977768,
0.014330629259347916,
-0.0007605188293382525,
-0.011137357912957668,
0.15586881339550018,
0.04013792425394058,
-0.019846616312861443,
-0.030388185754418373,
0.18540796637535095,
-0.02001660130918026,
-0.0456027090549469,
-0.1761157065629959,
0.1708841174840927,
-0.01243086438626051,
-0.02360048145055771,
0.043297283351421356,
-0.10261636972427368,
-0.02446824125945568,
0.18209359049797058,
0.14097948372364044,
0.040031660348176956,
0.020420411601662636,
-0.0046338727697730064,
0.012216166593134403,
0.026272451505064964,
0.09495147317647934,
-0.033704858273267746,
0.173682302236557,
-0.04859958216547966,
0.07740134745836258,
-0.05818112939596176,
-0.04694833979010582,
-0.028593821451067924,
0.03337257727980614,
-0.020904429256916046,
0.03577060624957085,
-0.06570956110954285,
0.13812541961669922,
-0.13034148514270782,
-0.24656370282173157,
0.036096323281526566,
-0.06280043721199036,
-0.10539911687374115,
-0.019410237669944763,
-0.018990179523825645,
0.00706369336694479,
0.024833479896187782,
0.02981485426425934,
0.010980028659105301,
0.07932182401418686,
0.05245302990078926,
-0.05982578173279762,
-0.07996129989624023,
0.09229756891727448,
0.01755979284644127,
0.14070776104927063,
0.018892396241426468,
0.03751908987760544,
0.07668779790401459,
0.0071560777723789215,
-0.07390537858009338,
0.03221936896443367,
0.013166714459657669,
0.013053207658231258,
-0.02793874219059944,
0.16499371826648712,
0.0126341562718153,
0.12793543934822083,
0.06111680343747139,
-0.11101451516151428,
0.058383096009492874,
-0.012942778877913952,
0.04611439257860184,
-0.1498054414987564,
0.11145276576280594,
-0.11978226155042648,
0.1322534680366516,
0.16746342182159424,
-0.00912230834364891,
-0.0064118099398911,
-0.019127173349261284,
0.0007893122383393347,
-0.021853212267160416,
0.04507720097899437,
-0.03987609222531319,
-0.1730404794216156,
0.04842859134078026,
-0.1281733363866806,
0.09585436433553696,
-0.22880436480045319,
-0.020488914102315903,
0.012532106600701809,
0.010535008274018764,
-0.055320288985967636,
0.13038495182991028,
-0.04480671137571335,
0.026371633633971214,
-0.011112303473055363,
-0.2921421229839325,
0.019107624888420105,
0.08550454676151276,
-0.1032644584774971,
-0.045391716063022614
] |
null | null |
transformers
|
# WangchanBERTa base model: `wangchanberta-base-wiki-syllable`
<br>
Pretrained RoBERTa BASE model on Thai Wikipedia corpus.
The script and documentation can be found at [this reposiryory](https://github.com/vistec-AI/thai2transformers).
<br>
## Model description
<br>
The architecture of the pretrained model is based on RoBERTa [[Liu et al., 2019]](https://arxiv.org/abs/1907.11692).
<br>
## Intended uses & limitations
<br>
You can use the pretrained model for masked language modeling (i.e. predicting a mask token in the input text). In addition, we also provide finetuned models for multiclass/multilabel text classification and token classification task.
<br>
**Multiclass text classification**
- `wisesight_sentiment`
4-class text classification task (`positive`, `neutral`, `negative`, and `question`) based on social media posts and tweets.
- `wongnai_reivews`
Users' review rating classification task (scale is ranging from 1 to 5)
- `generated_reviews_enth` : (`review_star` as label)
Generated users' review rating classification task (scale is ranging from 1 to 5).
**Multilabel text classification**
- `prachathai67k`
Thai topic classification with 12 labels based on news article corpus from prachathai.com. The detail is described in this [page](https://huggingface.co/datasets/prachathai67k).
**Token classification**
- `thainer`
Named-entity recognition tagging with 13 named-entities as descibed in this [page](https://huggingface.co/datasets/thainer).
- `lst20` : NER NER and POS tagging
Named-entity recognition tagging with 10 named-entities and Part-of-Speech tagging with 16 tags as descibed in this [page](https://huggingface.co/datasets/lst20).
<br>
## How to use
<br>
The getting started notebook of WangchanBERTa model can be found at this [Colab notebook](https://colab.research.google.com/drive/1Kbk6sBspZLwcnOE61adAQo30xxqOQ9ko)
<br>
## Training data
`wangchanberta-base-wiki-syllable` model was pretrained on Thai Wikipedia. Specifically, we use the Wikipedia dump articles on 20 August 2020 (dumps.wikimedia.org/thwiki/20200820/). We opt out lists, and tables.
### Preprocessing
Texts are preprocessed with the following rules:
- Replace non-breaking space, zero-width non-breaking space, and soft hyphen with spaces.
- Remove an empty parenthesis that occur right after the title of the first paragraph.
- Replace spaces wtth <_>.
<br>
Regarding the vocabulary, we use a Thai syllable-level dictionary-based tokenizer denoted as `syllable` from PyThaiNLP [Phatthiyaphaibun et al., 2016]. The total number of word-level tokens in the vocabulary is 59,235.
We sample sentences contigously to have the length of at most 512 tokens. For some sentences that overlap the boundary of 512 tokens, we split such sentence with an additional token as document separator. This is the same approach as proposed by [[Liu et al., 2019]](https://arxiv.org/abs/1907.11692) (called "FULL-SENTENCES").
Regarding the masking procedure, for each sequence, we sampled 15% of the tokens and replace them with<mask>token.Out of the 15%, 80% is replaced with a<mask>token, 10% is left unchanged and 10% is replaced with a random token.
<br>
**Train/Val/Test splits**
We split sequencially 944,782 sentences for training set, 24,863 sentences for validation set and 24,862 sentences for test set.
<br>
**Pretraining**
The model was trained on 32 V100 GPUs for 31,250 steps with the batch size of 8,192 (16 sequences per device with 16 accumulation steps) and a sequence length of 512 tokens. The optimizer we used is Adam with the learning rate of $7e-4$, $\beta_1 = 0.9$, $\beta_2= 0.98$ and $\epsilon = 1e-6$. The learning rate is warmed up for the first 1250 steps and linearly decayed to zero. The model checkpoint with minimum validation loss will be selected as the best model checkpoint.
<br>
**BibTeX entry and citation info**
```
@misc{lowphansirikul2021wangchanberta,
title={WangchanBERTa: Pretraining transformer-based Thai Language Models},
author={Lalita Lowphansirikul and Charin Polpanumas and Nawat Jantrakulchai and Sarana Nutanong},
year={2021},
eprint={2101.09635},
archivePrefix={arXiv},
primaryClass={cs.CL}
}
```
|
{"language": "th"}
|
fill-mask
|
airesearch/wangchanberta-base-wiki-syllable
|
[
"transformers",
"pytorch",
"jax",
"roberta",
"fill-mask",
"th",
"arxiv:1907.11692",
"arxiv:2101.09635",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[
"1907.11692",
"2101.09635"
] |
[
"th"
] |
TAGS
#transformers #pytorch #jax #roberta #fill-mask #th #arxiv-1907.11692 #arxiv-2101.09635 #autotrain_compatible #endpoints_compatible #region-us
|
# WangchanBERTa base model: 'wangchanberta-base-wiki-syllable'
<br>
Pretrained RoBERTa BASE model on Thai Wikipedia corpus.
The script and documentation can be found at this reposiryory.
<br>
## Model description
<br>
The architecture of the pretrained model is based on RoBERTa [[Liu et al., 2019]](URL
<br>
## Intended uses & limitations
<br>
You can use the pretrained model for masked language modeling (i.e. predicting a mask token in the input text). In addition, we also provide finetuned models for multiclass/multilabel text classification and token classification task.
<br>
Multiclass text classification
- 'wisesight_sentiment'
4-class text classification task ('positive', 'neutral', 'negative', and 'question') based on social media posts and tweets.
- 'wongnai_reivews'
Users' review rating classification task (scale is ranging from 1 to 5)
- 'generated_reviews_enth' : ('review_star' as label)
Generated users' review rating classification task (scale is ranging from 1 to 5).
Multilabel text classification
- 'prachathai67k'
Thai topic classification with 12 labels based on news article corpus from URL. The detail is described in this page.
Token classification
- 'thainer'
Named-entity recognition tagging with 13 named-entities as descibed in this page.
- 'lst20' : NER NER and POS tagging
Named-entity recognition tagging with 10 named-entities and Part-of-Speech tagging with 16 tags as descibed in this page.
<br>
## How to use
<br>
The getting started notebook of WangchanBERTa model can be found at this Colab notebook
<br>
## Training data
'wangchanberta-base-wiki-syllable' model was pretrained on Thai Wikipedia. Specifically, we use the Wikipedia dump articles on 20 August 2020 (URL We opt out lists, and tables.
### Preprocessing
Texts are preprocessed with the following rules:
- Replace non-breaking space, zero-width non-breaking space, and soft hyphen with spaces.
- Remove an empty parenthesis that occur right after the title of the first paragraph.
- Replace spaces wtth <_>.
<br>
Regarding the vocabulary, we use a Thai syllable-level dictionary-based tokenizer denoted as 'syllable' from PyThaiNLP [Phatthiyaphaibun et al., 2016]. The total number of word-level tokens in the vocabulary is 59,235.
We sample sentences contigously to have the length of at most 512 tokens. For some sentences that overlap the boundary of 512 tokens, we split such sentence with an additional token as document separator. This is the same approach as proposed by [[Liu et al., 2019]](URL (called "FULL-SENTENCES").
Regarding the masking procedure, for each sequence, we sampled 15% of the tokens and replace them with<mask>token.Out of the 15%, 80% is replaced with a<mask>token, 10% is left unchanged and 10% is replaced with a random token.
<br>
Train/Val/Test splits
We split sequencially 944,782 sentences for training set, 24,863 sentences for validation set and 24,862 sentences for test set.
<br>
Pretraining
The model was trained on 32 V100 GPUs for 31,250 steps with the batch size of 8,192 (16 sequences per device with 16 accumulation steps) and a sequence length of 512 tokens. The optimizer we used is Adam with the learning rate of $7e-4$, $\beta_1 = 0.9$, $\beta_2= 0.98$ and $\epsilon = 1e-6$. The learning rate is warmed up for the first 1250 steps and linearly decayed to zero. The model checkpoint with minimum validation loss will be selected as the best model checkpoint.
<br>
BibTeX entry and citation info
|
[
"# WangchanBERTa base model: 'wangchanberta-base-wiki-syllable'\n\n<br>\n\nPretrained RoBERTa BASE model on Thai Wikipedia corpus.\nThe script and documentation can be found at this reposiryory.\n<br>",
"## Model description\n\n<br>\n\nThe architecture of the pretrained model is based on RoBERTa [[Liu et al., 2019]](URL \n\n<br>",
"## Intended uses & limitations\n\n<br>\n\nYou can use the pretrained model for masked language modeling (i.e. predicting a mask token in the input text). In addition, we also provide finetuned models for multiclass/multilabel text classification and token classification task.\n\n<br>\n\nMulticlass text classification\n\n\n- 'wisesight_sentiment' \n \n 4-class text classification task ('positive', 'neutral', 'negative', and 'question') based on social media posts and tweets.\n\n- 'wongnai_reivews' \n\n Users' review rating classification task (scale is ranging from 1 to 5)\n\n- 'generated_reviews_enth' : ('review_star' as label)\n\n Generated users' review rating classification task (scale is ranging from 1 to 5).\n\nMultilabel text classification\n\n- 'prachathai67k'\n\n Thai topic classification with 12 labels based on news article corpus from URL. The detail is described in this page.\n \n\n\n\nToken classification\n\n- 'thainer' \n\n Named-entity recognition tagging with 13 named-entities as descibed in this page.\n\n- 'lst20' : NER NER and POS tagging\n \n Named-entity recognition tagging with 10 named-entities and Part-of-Speech tagging with 16 tags as descibed in this page.\n\n<br>",
"## How to use\n\n<br>\n\nThe getting started notebook of WangchanBERTa model can be found at this Colab notebook\n\n<br>",
"## Training data\n\n'wangchanberta-base-wiki-syllable' model was pretrained on Thai Wikipedia. Specifically, we use the Wikipedia dump articles on 20 August 2020 (URL We opt out lists, and tables.",
"### Preprocessing\n\nTexts are preprocessed with the following rules:\n\n- Replace non-breaking space, zero-width non-breaking space, and soft hyphen with spaces.\n- Remove an empty parenthesis that occur right after the title of the first paragraph.\n- Replace spaces wtth <_>.\n\n<br>\n\n\nRegarding the vocabulary, we use a Thai syllable-level dictionary-based tokenizer denoted as 'syllable' from PyThaiNLP [Phatthiyaphaibun et al., 2016]. The total number of word-level tokens in the vocabulary is 59,235.\n\nWe sample sentences contigously to have the length of at most 512 tokens. For some sentences that overlap the boundary of 512 tokens, we split such sentence with an additional token as document separator. This is the same approach as proposed by [[Liu et al., 2019]](URL (called \"FULL-SENTENCES\"). \n\nRegarding the masking procedure, for each sequence, we sampled 15% of the tokens and replace them with<mask>token.Out of the 15%, 80% is replaced with a<mask>token, 10% is left unchanged and 10% is replaced with a random token.\n\n<br>\n\nTrain/Val/Test splits\n\nWe split sequencially 944,782 sentences for training set, 24,863 sentences for validation set and 24,862 sentences for test set.\n\n<br>\n\nPretraining\n\nThe model was trained on 32 V100 GPUs for 31,250 steps with the batch size of 8,192 (16 sequences per device with 16 accumulation steps) and a sequence length of 512 tokens. The optimizer we used is Adam with the learning rate of $7e-4$, $\\beta_1 = 0.9$, $\\beta_2= 0.98$ and $\\epsilon = 1e-6$. The learning rate is warmed up for the first 1250 steps and linearly decayed to zero. The model checkpoint with minimum validation loss will be selected as the best model checkpoint. \n\n<br>\n\nBibTeX entry and citation info"
] |
[
"TAGS\n#transformers #pytorch #jax #roberta #fill-mask #th #arxiv-1907.11692 #arxiv-2101.09635 #autotrain_compatible #endpoints_compatible #region-us \n",
"# WangchanBERTa base model: 'wangchanberta-base-wiki-syllable'\n\n<br>\n\nPretrained RoBERTa BASE model on Thai Wikipedia corpus.\nThe script and documentation can be found at this reposiryory.\n<br>",
"## Model description\n\n<br>\n\nThe architecture of the pretrained model is based on RoBERTa [[Liu et al., 2019]](URL \n\n<br>",
"## Intended uses & limitations\n\n<br>\n\nYou can use the pretrained model for masked language modeling (i.e. predicting a mask token in the input text). In addition, we also provide finetuned models for multiclass/multilabel text classification and token classification task.\n\n<br>\n\nMulticlass text classification\n\n\n- 'wisesight_sentiment' \n \n 4-class text classification task ('positive', 'neutral', 'negative', and 'question') based on social media posts and tweets.\n\n- 'wongnai_reivews' \n\n Users' review rating classification task (scale is ranging from 1 to 5)\n\n- 'generated_reviews_enth' : ('review_star' as label)\n\n Generated users' review rating classification task (scale is ranging from 1 to 5).\n\nMultilabel text classification\n\n- 'prachathai67k'\n\n Thai topic classification with 12 labels based on news article corpus from URL. The detail is described in this page.\n \n\n\n\nToken classification\n\n- 'thainer' \n\n Named-entity recognition tagging with 13 named-entities as descibed in this page.\n\n- 'lst20' : NER NER and POS tagging\n \n Named-entity recognition tagging with 10 named-entities and Part-of-Speech tagging with 16 tags as descibed in this page.\n\n<br>",
"## How to use\n\n<br>\n\nThe getting started notebook of WangchanBERTa model can be found at this Colab notebook\n\n<br>",
"## Training data\n\n'wangchanberta-base-wiki-syllable' model was pretrained on Thai Wikipedia. Specifically, we use the Wikipedia dump articles on 20 August 2020 (URL We opt out lists, and tables.",
"### Preprocessing\n\nTexts are preprocessed with the following rules:\n\n- Replace non-breaking space, zero-width non-breaking space, and soft hyphen with spaces.\n- Remove an empty parenthesis that occur right after the title of the first paragraph.\n- Replace spaces wtth <_>.\n\n<br>\n\n\nRegarding the vocabulary, we use a Thai syllable-level dictionary-based tokenizer denoted as 'syllable' from PyThaiNLP [Phatthiyaphaibun et al., 2016]. The total number of word-level tokens in the vocabulary is 59,235.\n\nWe sample sentences contigously to have the length of at most 512 tokens. For some sentences that overlap the boundary of 512 tokens, we split such sentence with an additional token as document separator. This is the same approach as proposed by [[Liu et al., 2019]](URL (called \"FULL-SENTENCES\"). \n\nRegarding the masking procedure, for each sequence, we sampled 15% of the tokens and replace them with<mask>token.Out of the 15%, 80% is replaced with a<mask>token, 10% is left unchanged and 10% is replaced with a random token.\n\n<br>\n\nTrain/Val/Test splits\n\nWe split sequencially 944,782 sentences for training set, 24,863 sentences for validation set and 24,862 sentences for test set.\n\n<br>\n\nPretraining\n\nThe model was trained on 32 V100 GPUs for 31,250 steps with the batch size of 8,192 (16 sequences per device with 16 accumulation steps) and a sequence length of 512 tokens. The optimizer we used is Adam with the learning rate of $7e-4$, $\\beta_1 = 0.9$, $\\beta_2= 0.98$ and $\\epsilon = 1e-6$. The learning rate is warmed up for the first 1250 steps and linearly decayed to zero. The model checkpoint with minimum validation loss will be selected as the best model checkpoint. \n\n<br>\n\nBibTeX entry and citation info"
] |
[
59,
56,
35,
312,
28,
52,
484
] |
[
"passage: TAGS\n#transformers #pytorch #jax #roberta #fill-mask #th #arxiv-1907.11692 #arxiv-2101.09635 #autotrain_compatible #endpoints_compatible #region-us \n# WangchanBERTa base model: 'wangchanberta-base-wiki-syllable'\n\n<br>\n\nPretrained RoBERTa BASE model on Thai Wikipedia corpus.\nThe script and documentation can be found at this reposiryory.\n<br>## Model description\n\n<br>\n\nThe architecture of the pretrained model is based on RoBERTa [[Liu et al., 2019]](URL \n\n<br>## Intended uses & limitations\n\n<br>\n\nYou can use the pretrained model for masked language modeling (i.e. predicting a mask token in the input text). In addition, we also provide finetuned models for multiclass/multilabel text classification and token classification task.\n\n<br>\n\nMulticlass text classification\n\n\n- 'wisesight_sentiment' \n \n 4-class text classification task ('positive', 'neutral', 'negative', and 'question') based on social media posts and tweets.\n\n- 'wongnai_reivews' \n\n Users' review rating classification task (scale is ranging from 1 to 5)\n\n- 'generated_reviews_enth' : ('review_star' as label)\n\n Generated users' review rating classification task (scale is ranging from 1 to 5).\n\nMultilabel text classification\n\n- 'prachathai67k'\n\n Thai topic classification with 12 labels based on news article corpus from URL. The detail is described in this page.\n \n\n\n\nToken classification\n\n- 'thainer' \n\n Named-entity recognition tagging with 13 named-entities as descibed in this page.\n\n- 'lst20' : NER NER and POS tagging\n \n Named-entity recognition tagging with 10 named-entities and Part-of-Speech tagging with 16 tags as descibed in this page.\n\n<br>## How to use\n\n<br>\n\nThe getting started notebook of WangchanBERTa model can be found at this Colab notebook\n\n<br>"
] |
[
0.004911165684461594,
0.05648637190461159,
-0.00574222719296813,
0.02399691939353943,
0.11357785016298294,
0.01254711952060461,
0.14050765335559845,
0.06106750667095184,
0.021561026573181152,
0.11240572482347488,
-0.061169445514678955,
0.011786097660660744,
0.09824775904417038,
0.12134397029876709,
0.05211124196648598,
-0.2122211754322052,
0.05414295941591263,
-0.04786067456007004,
0.02569042704999447,
0.08319767564535141,
0.08661982417106628,
-0.08592675626277924,
0.08916192501783371,
0.0642704963684082,
-0.06581936776638031,
0.02337922714650631,
-0.017412932589650154,
-0.07935967296361923,
0.052710045129060745,
0.03043813444674015,
0.1348719447851181,
0.011177008971571922,
0.03747370466589928,
-0.14796027541160583,
0.01213898230344057,
0.05675816535949707,
0.019013885408639908,
0.03262602165341377,
0.1286201924085617,
-0.07214400172233582,
0.11935372650623322,
-0.0791526809334755,
0.11432664841413498,
0.060208626091480255,
-0.09614882618188858,
-0.042315851897001266,
-0.1122889295220375,
0.15390464663505554,
0.11223588138818741,
0.04593653604388237,
-0.05095505341887474,
0.09238165616989136,
-0.08312352001667023,
0.03607187792658806,
0.02050650306046009,
-0.24641016125679016,
-0.034071147441864014,
0.09133679419755936,
-0.0073853894136846066,
0.029814589768648148,
-0.0979054868221283,
-0.0010504897218197584,
-0.004731255583465099,
0.01743769645690918,
0.02017798088490963,
-0.05157598853111267,
-0.02575194463133812,
-0.06782332807779312,
-0.09996144473552704,
-0.000516996078658849,
0.11269842833280563,
0.02692212164402008,
-0.08010087162256241,
-0.14373378455638885,
-0.036417156457901,
0.025120122358202934,
0.001981626031920314,
-0.0223021749407053,
-0.017406506463885307,
0.015780171379446983,
0.07483726739883423,
-0.07135336101055145,
-0.09172188490629196,
-0.005844182334840298,
-0.0347275473177433,
0.03637555614113808,
0.029363766312599182,
0.0006177998729981482,
-0.028444156050682068,
0.08700206875801086,
-0.005217037629336119,
-0.03662455081939697,
-0.0045537278056144714,
-0.04482248052954674,
-0.19117771089076996,
-0.040633611381053925,
0.03829300403594971,
-0.11831347644329071,
-0.00895108375698328,
0.15995918214321136,
-0.036408014595508575,
0.11347000300884247,
-0.09028851240873337,
0.0033869126345962286,
0.07059554010629654,
0.18069179356098175,
-0.07141204178333282,
-0.020077582448720932,
0.018435433506965637,
-0.03733766824007034,
0.004824663978070021,
-0.013730926439166069,
-0.0037847382482141256,
-0.04304596781730652,
0.06927403062582016,
0.03635551407933235,
0.0021734575275331736,
0.09216383844614029,
-0.056718409061431885,
-0.06894388794898987,
0.2090497463941574,
-0.12399190664291382,
-0.0072760917246341705,
0.039549876004457474,
-0.03906058892607689,
0.0395619198679924,
0.05409327521920204,
0.00458524189889431,
-0.06774554401636124,
0.09484126418828964,
-0.08586563915014267,
-0.010508831590414047,
-0.028725968673825264,
-0.08746727555990219,
0.014149880968034267,
-0.09971689432859421,
-0.1054498702287674,
-0.05437925457954407,
-0.10784308612346649,
-0.06207776814699173,
0.003754439763724804,
-0.03506802022457123,
-0.016864636912941933,
-0.0013429190730676055,
0.033076025545597076,
-0.0062063755467534065,
0.03473576158285141,
-0.0545240119099617,
0.005816331598907709,
0.012265844270586967,
-0.038744110614061356,
0.061910271644592285,
0.030896734446287155,
0.03954344987869263,
-0.10618887841701508,
0.059588972479104996,
-0.3516054153442383,
0.14329271018505096,
-0.08040334284305573,
0.05337291210889816,
-0.13419106602668762,
0.007872793823480606,
-0.0036895438097417355,
-0.014754531905055046,
-0.029567481949925423,
0.09662118554115295,
-0.014948898926377296,
-0.040764130651950836,
0.1679520159959793,
-0.13207432627677917,
-0.027373896911740303,
0.11195651441812515,
-0.035335373133420944,
0.06403353810310364,
0.10303300619125366,
0.1776421070098877,
0.16003470122814178,
-0.022910453379154205,
-0.04093458503484726,
-0.025081519037485123,
-0.05983363464474678,
0.11736372858285904,
0.06224241107702255,
-0.04626863822340965,
-0.024519307538866997,
0.017231302335858345,
-0.051068708300590515,
-0.0033223635982722044,
0.01747278682887554,
-0.019956907257437706,
0.022576753050088882,
-0.022907909005880356,
0.02766561135649681,
-0.04552212730050087,
0.04122938588261604,
0.02315383404493332,
-0.05117972195148468,
0.051960449665784836,
0.07583539932966232,
-0.047579266130924225,
0.021722832694649696,
-0.13312549889087677,
-0.010141953825950623,
-0.024289702996611595,
-0.007545182015746832,
-0.20281128585338593,
-0.0636705830693245,
0.07755102217197418,
-0.10582206398248672,
0.14997267723083496,
-0.016437577083706856,
-0.010614357888698578,
0.005156891420483589,
-0.04414963722229004,
-0.018801268190145493,
0.024119161069393158,
-0.045061253011226654,
-0.01134784147143364,
-0.10248202085494995,
-0.010148529894649982,
-0.022898120805621147,
0.18529842793941498,
-0.13664160668849945,
0.034376777708530426,
0.04994537681341171,
0.0890558660030365,
0.02873755805194378,
-0.025826022028923035,
0.09296751767396927,
0.009987957775592804,
0.024859154596924782,
-0.03459397703409195,
-0.0030985521152615547,
0.013244256377220154,
-0.08069255948066711,
0.0825616866350174,
-0.12726809084415436,
-0.14745768904685974,
0.023751569911837578,
0.053440701216459274,
-0.11009204387664795,
0.06093691661953926,
-0.0051427073776721954,
-0.051217854022979736,
-0.10897862911224365,
0.014049998484551907,
0.1530081033706665,
0.027197185903787613,
0.0981588363647461,
-0.04854544252157211,
-0.004373438190668821,
-0.02960566058754921,
-0.021777622401714325,
0.002000517211854458,
0.08872250467538834,
0.023618295788764954,
-0.30187729001045227,
0.06693608313798904,
-0.0479038842022419,
0.011786780320107937,
0.12418482452630997,
0.02638714388012886,
-0.02825971692800522,
-0.028310222551226616,
0.07309220731258392,
0.0016090507851913571,
-0.0181416142731905,
-0.06435564160346985,
-0.007846280001103878,
0.021012477576732635,
0.01728072576224804,
0.005056543275713921,
-0.05254208669066429,
0.05257612094283104,
-0.013710337691009045,
-0.022661931812763214,
-0.033925335854291916,
0.02702466957271099,
0.058133531361818314,
0.07074376940727234,
0.05643843859434128,
0.11123999208211899,
-0.01749645546078682,
-0.019588708877563477,
-0.13981306552886963,
0.13585756719112396,
-0.09954935312271118,
-0.3043248653411865,
-0.14830158650875092,
0.02035902626812458,
-0.057349350303411484,
-0.010638413019478321,
-0.025203686207532883,
-0.052480317652225494,
-0.07087308168411255,
-0.051307469606399536,
0.11884818226099014,
-0.0499732531607151,
-0.0700315609574318,
-0.044456835836172104,
0.007069634739309549,
0.0179799385368824,
-0.07105551660060883,
0.01635987125337124,
0.009670188650488853,
-0.11063960939645767,
0.019814424216747284,
-0.048942018300294876,
0.017814356833696365,
0.13599947094917297,
-0.028393739834427834,
-0.021312296390533447,
-0.02973678521811962,
0.1484699249267578,
-0.09253963083028793,
0.10106267780065536,
0.06628528982400894,
-0.05402945354580879,
0.0627969428896904,
0.13641035556793213,
0.014986038208007812,
-0.0228713471442461,
0.06815236806869507,
0.09480953961610794,
-0.02508852258324623,
-0.2914332449436188,
-0.06973826885223389,
-0.031230302527546883,
0.035083234310150146,
0.06743461638689041,
0.06244521960616112,
0.10382608324289322,
0.03853399306535721,
-0.06573034822940826,
0.06450382620096207,
0.0775994285941124,
0.07042224705219269,
0.08390069752931595,
0.044989317655563354,
0.05715028941631317,
-0.12933377921581268,
-0.024383630603551865,
0.07288207113742828,
0.030505750328302383,
0.16156625747680664,
0.03670043870806694,
0.12836940586566925,
0.0770597904920578,
0.05892058461904526,
0.08780262619256973,
0.023546185344457626,
-0.036287978291511536,
0.03395991772413254,
-0.00680882204324007,
-0.03439809009432793,
-0.005975794512778521,
0.030683608725667,
0.10799303650856018,
-0.05292665213346481,
0.05100465193390846,
-0.031550802290439606,
0.0550452284514904,
0.22256174683570862,
0.014598340727388859,
-0.07251235842704773,
-0.05727463215589523,
0.0545501708984375,
-0.054647840559482574,
-0.07789407670497894,
-0.01120259240269661,
0.029899869114160538,
-0.1693650633096695,
0.13787780702114105,
-0.01899644173681736,
0.10640273243188858,
-0.155457004904747,
-0.04834035038948059,
-0.0393943227827549,
-0.003306773491203785,
-0.008965350687503815,
0.06155823543667793,
-0.11326117813587189,
0.10455580055713654,
0.03275679051876068,
0.025993941351771355,
-0.041534118354320526,
0.02417219616472721,
0.012258222326636314,
0.06508451700210571,
0.11381983757019043,
-0.0030798614025115967,
-0.03363419324159622,
-0.06524693220853806,
-0.05792766809463501,
-0.023662488907575607,
0.08191028237342834,
-0.10410846024751663,
0.05185793340206146,
-0.011451225727796555,
-0.01285435538738966,
-0.05125097185373306,
-0.06289204955101013,
-0.16125018894672394,
-0.13089486956596375,
0.024108808487653732,
-0.059148456901311874,
-0.026935076341032982,
-0.00016127151320688426,
-0.01053495891392231,
-0.0505659393966198,
0.17438042163848877,
-0.12227772176265717,
-0.06777005642652512,
-0.10450977832078934,
-0.01161282230168581,
0.07569993287324905,
-0.08701016008853912,
0.03428594768047333,
-0.07793864607810974,
0.08136770129203796,
-0.027251245453953743,
-0.06218250095844269,
0.04276026785373688,
-0.0077263349667191505,
-0.10214263200759888,
0.011916073970496655,
0.08721569180488586,
0.12518930435180664,
0.02732488140463829,
0.0007522630621679127,
0.021485397592186928,
0.06867554038763046,
-0.11217549443244934,
-0.04357181861996651,
0.11304786056280136,
-0.044804662466049194,
0.1261388212442398,
-0.05237133428454399,
-0.229465052485466,
-0.11821793019771576,
-0.025846105068922043,
0.0771302804350853,
0.12001332640647888,
-0.04669154807925224,
0.12015295773744583,
0.15860559046268463,
-0.07733641564846039,
-0.17900831997394562,
0.0027995710261166096,
0.02312718704342842,
0.026665855199098587,
0.03330651670694351,
-0.1900811642408371,
0.10591814666986465,
0.04028206691145897,
-0.019334735348820686,
-0.06913618743419647,
-0.21186302602291107,
-0.12609899044036865,
0.04595869779586792,
0.01671122945845127,
-0.10148203372955322,
-0.11611419916152954,
-0.08488062024116516,
-0.02213534526526928,
-0.05085311830043793,
0.14949551224708557,
-0.011750219389796257,
0.060003072023391724,
0.028237367048859596,
0.025414548814296722,
0.008991544134914875,
-0.012354628182947636,
0.09848470985889435,
0.034362733364105225,
0.025905925780534744,
-0.04827248677611351,
-0.0419401191174984,
0.16496074199676514,
-0.022935375571250916,
0.14362409710884094,
0.07262590527534485,
0.023759454488754272,
-0.12668980658054352,
-0.061123836785554886,
-0.07023990899324417,
0.019641323015093803,
-0.040185634046792984,
-0.016277622431516647,
-0.10846985876560211,
0.09454057365655899,
0.0259840227663517,
-0.004254988860338926,
0.11032386869192123,
-0.06564389169216156,
0.05746345594525337,
0.03689194098114967,
0.11634981632232666,
0.16256052255630493,
-0.07118598371744156,
-0.06340029835700989,
-0.023505093529820442,
0.03993972763419151,
-0.17305037379264832,
0.062186796218156815,
0.06961338967084885,
0.04122232645750046,
0.15511952340602875,
-0.02055727317929268,
-0.14332328736782074,
0.031787123531103134,
0.07329589873552322,
-0.09663241356611252,
-0.1576368361711502,
0.034886036068201065,
0.04215003177523613,
-0.04128963500261307,
0.020801963284611702,
0.10454481095075607,
-0.022694209590554237,
-0.057004012167453766,
0.05054232105612755,
0.03354417905211449,
-0.04024391993880272,
0.06229112669825554,
0.013168935663998127,
0.047303665429353714,
-0.06547262519598007,
0.16186244785785675,
0.21754606068134308,
-0.07386451214551926,
-0.02598990872502327,
0.15701544284820557,
-0.13358674943447113,
-0.03818361461162567,
-0.10491542518138885,
0.10613325983285904,
0.01196364313364029,
-0.04664987325668335,
0.02785618230700493,
-0.08630315959453583,
0.017592914402484894,
0.1705751121044159,
0.010286816395819187,
0.057942405343055725,
-0.051303133368492126,
-0.006608904805034399,
-0.03003457561135292,
0.02855624072253704,
0.03772307559847832,
0.013588660396635532,
-0.09173472225666046,
0.026376377791166306,
0.11073113232851028,
0.04677680879831314,
-0.011907613836228848,
-0.08305089920759201,
-0.09762739390134811,
0.009420287795364857,
-0.008858716115355492,
0.060884974896907806,
-0.0974387601017952,
0.00840679556131363,
-0.017276037484407425,
-0.034658677875995636,
-0.014731801114976406,
-0.015233481302857399,
-0.030929498374462128,
-0.03603893145918846,
-0.05184479430317879,
0.11584145575761795,
-0.15324878692626953,
-0.016287101432681084,
0.07744304835796356,
-0.07077961415052414,
0.08644983917474747,
0.017024876549839973,
-0.0301507655531168,
0.03396467864513397,
-0.08528421074151993,
0.011150039732456207,
-0.0035620310809463263,
0.006517929490655661,
-0.017624972388148308,
-0.1367516964673996,
-0.006436407566070557,
-0.044753529131412506,
0.02167201228439808,
0.022817889228463173,
0.03859443590044975,
-0.0814913883805275,
0.04937665909528732,
-0.05857264623045921,
-0.07490178942680359,
-0.07138068974018097,
0.048125047236680984,
0.08149000257253647,
0.023557689040899277,
0.03132399916648865,
-0.05470024049282074,
0.04339323565363884,
-0.09994126111268997,
-0.022061295807361603,
-0.003062902018427849,
-0.0019076501484960318,
-0.050984736531972885,
-0.05051938071846962,
0.023519476875662804,
0.0003314970526844263,
0.08165030926465988,
-0.034603800624608994,
0.010818416252732277,
0.05256390944123268,
0.069252148270607,
-0.14743083715438843,
0.0340132862329483,
0.032813023775815964,
0.0009413569350726902,
-0.018999038264155388,
0.0935027152299881,
-0.043012600392103195,
-0.09399192780256271,
0.04031170904636383,
0.11748667061328888,
0.1403479129076004,
-0.014534025453031063,
0.008516684174537659,
0.04446012154221535,
-0.011596782132983208,
-0.10247613489627838,
0.06465122848749161,
-0.12403509020805359,
-0.01791466772556305,
-0.06966306269168854,
0.04503503814339638,
0.15164853632450104,
-0.09889440983533859,
0.09019672870635986,
0.02750523015856743,
-0.08962668478488922,
-0.07829413563013077,
-0.208229199051857,
-0.07146234810352325,
0.011882063001394272,
-0.010555963963270187,
-0.06972017139196396,
0.03216782212257385,
0.127794086933136,
0.05937028303742409,
-0.019764592871069908,
0.173728346824646,
-0.11238887161016464,
-0.09464380890130997,
0.07008541375398636,
0.025113889947533607,
0.0012797736562788486,
0.02133917436003685,
0.05912526696920395,
0.008028892800211906,
0.12383566796779633,
0.06823517382144928,
0.04671526327729225,
0.024934174492955208,
0.04447470232844353,
-0.05281287804245949,
-0.07030361145734787,
0.015045352280139923,
-0.0009225659887306392,
-0.017072511836886406,
0.1493287831544876,
0.041172049939632416,
-0.016660893335938454,
-0.028815802186727524,
0.18310153484344482,
-0.019001450389623642,
-0.03758345544338226,
-0.17412245273590088,
0.1771359145641327,
-0.009249069727957249,
-0.019524892792105675,
0.04801741987466812,
-0.10756667703390121,
-0.025309599936008453,
0.17718400061130524,
0.1375727653503418,
0.046530235558748245,
0.017962193116545677,
-0.01257641613483429,
0.013658675365149975,
0.02739603817462921,
0.09158328175544739,
-0.032669249922037125,
0.17010298371315002,
-0.046892326325178146,
0.08512263000011444,
-0.05865924432873726,
-0.044219810515642166,
-0.024678930640220642,
0.034812308847904205,
-0.019108207896351814,
0.03888539597392082,
-0.06995042413473129,
0.13892947137355804,
-0.13346749544143677,
-0.24514572322368622,
0.037869490683078766,
-0.06396038830280304,
-0.104268878698349,
-0.01618753746151924,
-0.018238358199596405,
0.009438875131309032,
0.023811159655451775,
0.03261351212859154,
0.007191740442067385,
0.07773086428642273,
0.054724618792533875,
-0.059747397899627686,
-0.07606083899736404,
0.08891365677118301,
0.00868232548236847,
0.14890579879283905,
0.019918521866202354,
0.03847235068678856,
0.07738147675991058,
0.004347688052803278,
-0.07377741485834122,
0.0352034755051136,
0.0121527798473835,
0.017123665660619736,
-0.030840687453746796,
0.1655111312866211,
0.016177359968423843,
0.12479428201913834,
0.06102827563881874,
-0.11168679594993591,
0.059005506336688995,
-0.017004286870360374,
0.04741036146879196,
-0.151535764336586,
0.11440854519605637,
-0.11720360815525055,
0.12835726141929626,
0.1628737896680832,
-0.007751045282930136,
-0.004420779645442963,
-0.019860392436385155,
-0.0025556592736393213,
-0.021986084058880806,
0.042642369866371155,
-0.03902694210410118,
-0.16640780866146088,
0.04846181347966194,
-0.12034634500741959,
0.09702561050653458,
-0.22214291989803314,
-0.015473154373466969,
0.00858550239354372,
0.00876049604266882,
-0.05101528391242027,
0.12895363569259644,
-0.0475815050303936,
0.027427593246102333,
-0.01232205331325531,
-0.28974100947380066,
0.01933775655925274,
0.08722259849309921,
-0.10471322387456894,
-0.04245841130614281
] |
null | null |
transformers
|
# `wav2vec2-large-xlsr-53-th`
Finetuning `wav2vec2-large-xlsr-53` on Thai [Common Voice 7.0](https://commonvoice.mozilla.org/en/datasets)
[Read more on our blog](https://medium.com/airesearch-in-th/airesearch-in-th-3c1019a99cd)
We finetune [wav2vec2-large-xlsr-53](https://huggingface.co/facebook/wav2vec2-large-xlsr-53) based on [Fine-tuning Wav2Vec2 for English ASR](https://colab.research.google.com/github/patrickvonplaten/notebooks/blob/master/Fine_tuning_Wav2Vec2_for_English_ASR.ipynb) using Thai examples of [Common Voice Corpus 7.0](https://commonvoice.mozilla.org/en/datasets). The notebooks and scripts can be found in [vistec-ai/wav2vec2-large-xlsr-53-th](https://github.com/vistec-ai/wav2vec2-large-xlsr-53-th). The pretrained model and processor can be found at [airesearch/wav2vec2-large-xlsr-53-th](https://huggingface.co/airesearch/wav2vec2-large-xlsr-53-th).
## `robust-speech-event`
Add `syllable_tokenize`, `word_tokenize` ([PyThaiNLP](https://github.com/PyThaiNLP/pythainlp)) and [deepcut](https://github.com/rkcosmos/deepcut) tokenizers to `eval.py` from [robust-speech-event](https://github.com/huggingface/transformers/tree/master/examples/research_projects/robust-speech-event#evaluation)
```
> python eval.py --model_id ./ --dataset mozilla-foundation/common_voice_7_0 --config th --split test --log_outputs --thai_tokenizer newmm/syllable/deepcut/cer
```
### Eval results on Common Voice 7 "test":
| | WER PyThaiNLP 2.3.1 | WER deepcut | SER | CER |
|---------------------------------|---------------------|-------------|---------|---------|
| Only Tokenization | 0.9524% | 2.5316% | 1.2346% | 0.1623% |
| Cleaning rules and Tokenization | TBD | TBD | TBD | TBD |
## Usage
```
#load pretrained processor and model
processor = Wav2Vec2Processor.from_pretrained("airesearch/wav2vec2-large-xlsr-53-th")
model = Wav2Vec2ForCTC.from_pretrained("airesearch/wav2vec2-large-xlsr-53-th")
#function to resample to 16_000
def speech_file_to_array_fn(batch,
text_col="sentence",
fname_col="path",
resampling_to=16000):
speech_array, sampling_rate = torchaudio.load(batch[fname_col])
resampler=torchaudio.transforms.Resample(sampling_rate, resampling_to)
batch["speech"] = resampler(speech_array)[0].numpy()
batch["sampling_rate"] = resampling_to
batch["target_text"] = batch[text_col]
return batch
#get 2 examples as sample input
test_dataset = test_dataset.map(speech_file_to_array_fn)
inputs = processor(test_dataset["speech"][:2], sampling_rate=16_000, return_tensors="pt", padding=True)
#infer
with torch.no_grad():
logits = model(inputs.input_values,).logits
predicted_ids = torch.argmax(logits, dim=-1)
print("Prediction:", processor.batch_decode(predicted_ids))
print("Reference:", test_dataset["sentence"][:2])
>> Prediction: ['และ เขา ก็ สัมผัส ดีบุก', 'คุณ สามารถ รับทราบ เมื่อ ข้อความ นี้ ถูก อ่าน แล้ว']
>> Reference: ['และเขาก็สัมผัสดีบุก', 'คุณสามารถรับทราบเมื่อข้อความนี้ถูกอ่านแล้ว']
```
## Datasets
Common Voice Corpus 7.0](https://commonvoice.mozilla.org/en/datasets) contains 133 validated hours of Thai (255 total hours) at 5GB. We pre-tokenize with `pythainlp.tokenize.word_tokenize`. We preprocess the dataset using cleaning rules described in `notebooks/cv-preprocess.ipynb` by [@tann9949](https://github.com/tann9949). We then deduplicate and split as described in [ekapolc/Thai_commonvoice_split](https://github.com/ekapolc/Thai_commonvoice_split) in order to 1) avoid data leakage due to random splits after cleaning in [Common Voice Corpus 7.0](https://commonvoice.mozilla.org/en/datasets) and 2) preserve the majority of the data for the training set. The dataset loading script is `scripts/th_common_voice_70.py`. You can use this scripts together with `train_cleand.tsv`, `validation_cleaned.tsv` and `test_cleaned.tsv` to have the same splits as we do. The resulting dataset is as follows:
```
DatasetDict({
train: Dataset({
features: ['path', 'sentence'],
num_rows: 86586
})
test: Dataset({
features: ['path', 'sentence'],
num_rows: 2502
})
validation: Dataset({
features: ['path', 'sentence'],
num_rows: 3027
})
})
```
## Training
We fintuned using the following configuration on a single V100 GPU and chose the checkpoint with the lowest validation loss. The finetuning script is `scripts/wav2vec2_finetune.py`
```
# create model
model = Wav2Vec2ForCTC.from_pretrained(
"facebook/wav2vec2-large-xlsr-53",
attention_dropout=0.1,
hidden_dropout=0.1,
feat_proj_dropout=0.0,
mask_time_prob=0.05,
layerdrop=0.1,
gradient_checkpointing=True,
ctc_loss_reduction="mean",
pad_token_id=processor.tokenizer.pad_token_id,
vocab_size=len(processor.tokenizer)
)
model.freeze_feature_extractor()
training_args = TrainingArguments(
output_dir="../data/wav2vec2-large-xlsr-53-thai",
group_by_length=True,
per_device_train_batch_size=32,
gradient_accumulation_steps=1,
per_device_eval_batch_size=16,
metric_for_best_model='wer',
evaluation_strategy="steps",
eval_steps=1000,
logging_strategy="steps",
logging_steps=1000,
save_strategy="steps",
save_steps=1000,
num_train_epochs=100,
fp16=True,
learning_rate=1e-4,
warmup_steps=1000,
save_total_limit=3,
report_to="tensorboard"
)
```
## Evaluation
We benchmark on the test set using WER with words tokenized by [PyThaiNLP](https://github.com/PyThaiNLP/pythainlp) 2.3.1 and [deepcut](https://github.com/rkcosmos/deepcut), and CER. We also measure performance when spell correction using [TNC](http://www.arts.chula.ac.th/ling/tnc/) ngrams is applied. Evaluation codes can be found in `notebooks/wav2vec2_finetuning_tutorial.ipynb`. Benchmark is performed on `test-unique` split.
| | WER PyThaiNLP 2.3.1 | WER deepcut | CER |
|--------------------------------|---------------------|----------------|----------------|
| [Kaldi from scratch](https://github.com/vistec-AI/commonvoice-th) | 23.04 | | 7.57 |
| Ours without spell correction | 13.634024 | **8.152052** | **2.813019** |
| Ours with spell correction | 17.996397 | 14.167975 | 5.225761 |
| Google Web Speech API※ | 13.711234 | 10.860058 | 7.357340 |
| Microsoft Bing Speech API※ | **12.578819** | 9.620991 | 5.016620 |
| Amazon Transcribe※ | 21.86334 | 14.487553 | 7.077562 |
| NECTEC AI for Thai Partii API※ | 20.105887 | 15.515631 | 9.551027 |
※ APIs are not finetuned with Common Voice 7.0 data
## LICENSE
[cc-by-sa 4.0](https://github.com/vistec-AI/wav2vec2-large-xlsr-53-th/blob/main/LICENSE)
## Ackowledgements
* model training and validation notebooks/scripts [@cstorm125](https://github.com/cstorm125/)
* dataset cleaning scripts [@tann9949](https://github.com/tann9949)
* dataset splits [@ekapolc](https://github.com/ekapolc/) and [@14mss](https://github.com/14mss)
* running the training [@mrpeerat](https://github.com/mrpeerat)
* spell correction [@wannaphong](https://github.com/wannaphong)
|
{"language": "th", "license": "cc-by-sa-4.0", "tags": ["audio", "automatic-speech-recognition", "hf-asr-leaderboard", "robust-speech-event", "speech", "xlsr-fine-tuning"], "datasets": ["common_voice"], "model-index": [{"name": "XLS-R-53 - Thai", "results": [{"task": {"type": "automatic-speech-recognition", "name": "Automatic Speech Recognition"}, "dataset": {"name": "Common Voice 7", "type": "mozilla-foundation/common_voice_7_0", "args": "th"}, "metrics": [{"type": "wer", "value": 0.9524, "name": "Test WER"}, {"type": "ser", "value": 1.2346, "name": "Test SER"}, {"type": "cer", "value": 0.1623, "name": "Test CER"}]}, {"task": {"type": "automatic-speech-recognition", "name": "Automatic Speech Recognition"}, "dataset": {"name": "Robust Speech Event - Dev Data", "type": "speech-recognition-community-v2/dev_data", "args": "sv"}, "metrics": [{"type": "wer", "name": "Test WER"}, {"type": "ser", "name": "Test SER"}, {"type": "cer", "name": "Test CER"}]}]}]}
|
automatic-speech-recognition
|
airesearch/wav2vec2-large-xlsr-53-th
|
[
"transformers",
"pytorch",
"wav2vec2",
"automatic-speech-recognition",
"audio",
"hf-asr-leaderboard",
"robust-speech-event",
"speech",
"xlsr-fine-tuning",
"th",
"dataset:common_voice",
"doi:10.57967/hf/0404",
"license:cc-by-sa-4.0",
"model-index",
"endpoints_compatible",
"has_space",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"th"
] |
TAGS
#transformers #pytorch #wav2vec2 #automatic-speech-recognition #audio #hf-asr-leaderboard #robust-speech-event #speech #xlsr-fine-tuning #th #dataset-common_voice #doi-10.57967/hf/0404 #license-cc-by-sa-4.0 #model-index #endpoints_compatible #has_space #region-us
|
'wav2vec2-large-xlsr-53-th'
===========================
Finetuning 'wav2vec2-large-xlsr-53' on Thai Common Voice 7.0
Read more on our blog
We finetune wav2vec2-large-xlsr-53 based on Fine-tuning Wav2Vec2 for English ASR using Thai examples of Common Voice Corpus 7.0. The notebooks and scripts can be found in vistec-ai/wav2vec2-large-xlsr-53-th. The pretrained model and processor can be found at airesearch/wav2vec2-large-xlsr-53-th.
'robust-speech-event'
---------------------
Add 'syllable\_tokenize', 'word\_tokenize' (PyThaiNLP) and deepcut tokenizers to 'URL' from robust-speech-event
### Eval results on Common Voice 7 "test":
Usage
-----
Datasets
--------
Common Voice Corpus 7.0](URL contains 133 validated hours of Thai (255 total hours) at 5GB. We pre-tokenize with 'pythainlp.tokenize.word\_tokenize'. We preprocess the dataset using cleaning rules described in 'notebooks/URL' by @tann9949. We then deduplicate and split as described in ekapolc/Thai\_commonvoice\_split in order to 1) avoid data leakage due to random splits after cleaning in Common Voice Corpus 7.0 and 2) preserve the majority of the data for the training set. The dataset loading script is 'scripts/th\_common\_voice\_70.py'. You can use this scripts together with 'train\_cleand.tsv', 'validation\_cleaned.tsv' and 'test\_cleaned.tsv' to have the same splits as we do. The resulting dataset is as follows:
Training
--------
We fintuned using the following configuration on a single V100 GPU and chose the checkpoint with the lowest validation loss. The finetuning script is 'scripts/wav2vec2\_finetune.py'
Evaluation
----------
We benchmark on the test set using WER with words tokenized by PyThaiNLP 2.3.1 and deepcut, and CER. We also measure performance when spell correction using TNC ngrams is applied. Evaluation codes can be found in 'notebooks/wav2vec2\_finetuning\_tutorial.ipynb'. Benchmark is performed on 'test-unique' split.
※ APIs are not finetuned with Common Voice 7.0 data
LICENSE
-------
cc-by-sa 4.0
Ackowledgements
---------------
* model training and validation notebooks/scripts @cstorm125
* dataset cleaning scripts @tann9949
* dataset splits @ekapolc and @14mss
* running the training @mrpeerat
* spell correction @wannaphong
|
[
"### Eval results on Common Voice 7 \"test\":\n\n\n\nUsage\n-----\n\n\nDatasets\n--------\n\n\nCommon Voice Corpus 7.0](URL contains 133 validated hours of Thai (255 total hours) at 5GB. We pre-tokenize with 'pythainlp.tokenize.word\\_tokenize'. We preprocess the dataset using cleaning rules described in 'notebooks/URL' by @tann9949. We then deduplicate and split as described in ekapolc/Thai\\_commonvoice\\_split in order to 1) avoid data leakage due to random splits after cleaning in Common Voice Corpus 7.0 and 2) preserve the majority of the data for the training set. The dataset loading script is 'scripts/th\\_common\\_voice\\_70.py'. You can use this scripts together with 'train\\_cleand.tsv', 'validation\\_cleaned.tsv' and 'test\\_cleaned.tsv' to have the same splits as we do. The resulting dataset is as follows:\n\n\nTraining\n--------\n\n\nWe fintuned using the following configuration on a single V100 GPU and chose the checkpoint with the lowest validation loss. The finetuning script is 'scripts/wav2vec2\\_finetune.py'\n\n\nEvaluation\n----------\n\n\nWe benchmark on the test set using WER with words tokenized by PyThaiNLP 2.3.1 and deepcut, and CER. We also measure performance when spell correction using TNC ngrams is applied. Evaluation codes can be found in 'notebooks/wav2vec2\\_finetuning\\_tutorial.ipynb'. Benchmark is performed on 'test-unique' split.\n\n\n\n※ APIs are not finetuned with Common Voice 7.0 data\n\n\nLICENSE\n-------\n\n\ncc-by-sa 4.0\n\n\nAckowledgements\n---------------\n\n\n* model training and validation notebooks/scripts @cstorm125\n* dataset cleaning scripts @tann9949\n* dataset splits @ekapolc and @14mss\n* running the training @mrpeerat\n* spell correction @wannaphong"
] |
[
"TAGS\n#transformers #pytorch #wav2vec2 #automatic-speech-recognition #audio #hf-asr-leaderboard #robust-speech-event #speech #xlsr-fine-tuning #th #dataset-common_voice #doi-10.57967/hf/0404 #license-cc-by-sa-4.0 #model-index #endpoints_compatible #has_space #region-us \n",
"### Eval results on Common Voice 7 \"test\":\n\n\n\nUsage\n-----\n\n\nDatasets\n--------\n\n\nCommon Voice Corpus 7.0](URL contains 133 validated hours of Thai (255 total hours) at 5GB. We pre-tokenize with 'pythainlp.tokenize.word\\_tokenize'. We preprocess the dataset using cleaning rules described in 'notebooks/URL' by @tann9949. We then deduplicate and split as described in ekapolc/Thai\\_commonvoice\\_split in order to 1) avoid data leakage due to random splits after cleaning in Common Voice Corpus 7.0 and 2) preserve the majority of the data for the training set. The dataset loading script is 'scripts/th\\_common\\_voice\\_70.py'. You can use this scripts together with 'train\\_cleand.tsv', 'validation\\_cleaned.tsv' and 'test\\_cleaned.tsv' to have the same splits as we do. The resulting dataset is as follows:\n\n\nTraining\n--------\n\n\nWe fintuned using the following configuration on a single V100 GPU and chose the checkpoint with the lowest validation loss. The finetuning script is 'scripts/wav2vec2\\_finetune.py'\n\n\nEvaluation\n----------\n\n\nWe benchmark on the test set using WER with words tokenized by PyThaiNLP 2.3.1 and deepcut, and CER. We also measure performance when spell correction using TNC ngrams is applied. Evaluation codes can be found in 'notebooks/wav2vec2\\_finetuning\\_tutorial.ipynb'. Benchmark is performed on 'test-unique' split.\n\n\n\n※ APIs are not finetuned with Common Voice 7.0 data\n\n\nLICENSE\n-------\n\n\ncc-by-sa 4.0\n\n\nAckowledgements\n---------------\n\n\n* model training and validation notebooks/scripts @cstorm125\n* dataset cleaning scripts @tann9949\n* dataset splits @ekapolc and @14mss\n* running the training @mrpeerat\n* spell correction @wannaphong"
] |
[
112,
489
] |
[
"passage: TAGS\n#transformers #pytorch #wav2vec2 #automatic-speech-recognition #audio #hf-asr-leaderboard #robust-speech-event #speech #xlsr-fine-tuning #th #dataset-common_voice #doi-10.57967/hf/0404 #license-cc-by-sa-4.0 #model-index #endpoints_compatible #has_space #region-us \n"
] |
[
-0.17779573798179626,
0.06430971622467041,
-0.003587376559153199,
-0.025053735822439194,
0.0689401775598526,
-0.06832180917263031,
0.0680026262998581,
0.10224537551403046,
0.05836803466081619,
0.06668006628751755,
0.03603559359908104,
0.09738650918006897,
0.03002149425446987,
0.07523287087678909,
-0.08006204664707184,
-0.19180215895175934,
0.08023010194301605,
0.02172050066292286,
0.08749089390039444,
0.07638002187013626,
0.10246714949607849,
-0.08612444996833801,
0.026337286457419395,
0.043206628412008286,
-0.0833020731806755,
0.035266779363155365,
0.07094532996416092,
-0.12154760211706161,
0.13500864803791046,
0.05919983983039856,
0.01750165782868862,
0.08421527594327927,
0.03563067689538002,
-0.15605053305625916,
0.03980003297328949,
-0.013653355650603771,
0.02001313865184784,
0.04790889844298363,
0.020667191594839096,
-0.020049136132001877,
0.005385423079133034,
0.05330720916390419,
-0.060256510972976685,
0.10109373927116394,
-0.06792663782835007,
-0.20774443447589874,
-0.06830208748579025,
0.08597629517316818,
0.005770323798060417,
0.07048168033361435,
-0.06272470951080322,
0.10851223766803741,
-0.09317196905612946,
0.08137359470129013,
0.11663714051246643,
-0.24059565365314484,
0.0304558202624321,
-0.013057379983365536,
0.06653863191604614,
0.04274795204401016,
-0.049342937767505646,
0.06654784083366394,
0.03882976248860359,
0.00542342197149992,
-0.03796400874853134,
-0.06602254509925842,
-0.17229336500167847,
0.0031708956230431795,
-0.10633958876132965,
0.002023223089054227,
0.27095744013786316,
0.052396003156900406,
0.017408300191164017,
-0.10398941487073898,
-0.020216569304466248,
-0.022774582728743553,
-0.03986073285341263,
-0.014702558517456055,
-0.004507929086685181,
0.007643860764801502,
-0.004919617436826229,
-0.004959354642778635,
-0.1194477379322052,
-0.0656597763299942,
-0.10233308374881744,
0.181971937417984,
-0.008380640298128128,
0.01226000115275383,
-0.11396027356386185,
0.011599818244576454,
-0.08535608649253845,
-0.075375497341156,
0.0007632112829014659,
0.002884641755372286,
-0.06696382910013199,
0.036411404609680176,
-0.0380890853703022,
-0.014620976522564888,
0.1237032562494278,
-0.024481404572725296,
-0.09381356835365295,
0.016653696075081825,
-0.051875460892915726,
0.10033437609672546,
0.011467766016721725,
0.11810054630041122,
-0.1133754700422287,
-0.014423619024455547,
0.0320357121527195,
0.03289457783102989,
0.06753209978342056,
-0.03557761386036873,
-0.1038348600268364,
-0.047352466732263565,
0.0037474429700523615,
0.04852207005023956,
0.019495896995067596,
0.040842149406671524,
-0.056572481989860535,
0.010695567354559898,
0.07341077923774719,
-0.10907287150621414,
-0.014543977566063404,
0.07201172411441803,
0.053294721990823746,
0.07504341751337051,
0.01466887816786766,
0.04532087966799736,
-0.07557422667741776,
0.01828860677778721,
-0.01362904068082571,
0.028935162350535393,
0.06929594278335571,
-0.057586975395679474,
0.04455956816673279,
-0.08550858497619629,
0.007545606233179569,
-0.11440898478031158,
0.008424290455877781,
-0.04381941631436348,
-0.0861879363656044,
0.027328236028552055,
-0.12454784661531448,
-0.04042914882302284,
-0.02965979091823101,
0.02458345703780651,
-0.1009766235947609,
-0.0034924589563161135,
-0.0787624791264534,
0.09758154302835464,
0.045320168137550354,
0.06807538866996765,
-0.11735884845256805,
0.09296845644712448,
-0.027049031108617783,
-0.006750029511749744,
-0.04649154841899872,
0.09854254871606827,
-0.07634618133306503,
-0.010031450539827347,
-0.06093090400099754,
-0.0555996336042881,
-0.11115767061710358,
0.09669063985347748,
-0.02668810449540615,
0.10955267399549484,
-0.17963986098766327,
-0.13687407970428467,
0.1440766602754593,
-0.07862958312034607,
-0.057903919368982315,
0.13111984729766846,
0.04308216646313667,
-0.03556579723954201,
0.10479433834552765,
0.3440326452255249,
-0.027004648000001907,
-0.17733079195022583,
-0.04118502885103226,
0.0649430975317955,
-0.0824727937579155,
-0.05465301126241684,
0.05163613334298134,
-0.09463085234165192,
0.03404884412884712,
0.005387766752392054,
0.006483500823378563,
0.04947414621710777,
-0.024086620658636093,
-0.061694566160440445,
-0.0021123704500496387,
-0.076756551861763,
0.023493124172091484,
-0.0029265983030200005,
-0.002413935260847211,
-0.03521409630775452,
-0.05396568402647972,
-0.005877362098544836,
0.10353262722492218,
-0.059286508709192276,
0.07756378501653671,
-0.14973485469818115,
0.13986331224441528,
-0.07113813608884811,
-0.0037654107436537743,
-0.16483697295188904,
0.18622402846813202,
-0.05666220933198929,
0.08628600835800171,
0.13336005806922913,
0.13932545483112335,
0.04252038151025772,
-0.06911443918943405,
-0.03155231848359108,
-0.04961830750107765,
0.10416118055582047,
0.04890269413590431,
-0.008955583907663822,
-0.1814170777797699,
0.041424546390771866,
-0.08063893020153046,
0.11959686875343323,
-0.08767890185117722,
-0.03611808642745018,
0.14991571009159088,
0.06896396726369858,
0.01867673173546791,
0.004648021887987852,
0.09358880668878555,
0.0387413427233696,
0.033074621111154556,
0.028062395751476288,
0.037901315838098526,
0.002334178891032934,
-0.10350313782691956,
0.24823351204395294,
-0.23747804760932922,
0.1956242173910141,
0.19665659964084625,
-0.11211297661066055,
0.04596709460020065,
0.10735375434160233,
-0.003772337920963764,
-0.014035115949809551,
0.04280411824584007,
-0.05254990980029106,
0.16755343973636627,
-0.019853930920362473,
0.13684117794036865,
-0.06694412231445312,
0.008635752834379673,
0.02417442388832569,
-0.01780199632048607,
-0.02487061358988285,
0.1087256595492363,
-0.013431341387331486,
-0.034919094294309616,
0.07523521780967712,
0.11007045209407806,
-0.04126386716961861,
0.21893928945064545,
-0.04531398043036461,
-0.0623994842171669,
0.04859369248151779,
-0.04164551571011543,
-0.0516175776720047,
0.14029450714588165,
-0.2070586383342743,
-0.05551599711179733,
0.04771043360233307,
-0.0031072681304067373,
0.0780639499425888,
-0.16438470780849457,
0.007826180197298527,
-0.020873192697763443,
-0.10085490345954895,
-0.15850339829921722,
0.11331581324338913,
0.0005701761110685766,
0.09081831574440002,
-0.06733734160661697,
-0.17663942277431488,
0.030676329508423805,
-0.04276839271187782,
-0.12306296080350876,
0.03798970207571983,
-0.09014835208654404,
-0.26735371351242065,
-0.09255316853523254,
-0.0638786107301712,
-0.0018210881389677525,
0.027069492265582085,
0.10394182056188583,
-0.12480289489030838,
-0.018453234806656837,
-0.019929228350520134,
0.05743282288312912,
-0.04990717023611069,
-0.011025095358490944,
0.006206177640706301,
-0.02764207124710083,
0.07020661979913712,
-0.13614754378795624,
-0.014043212868273258,
-0.07667258381843567,
0.023577464744448662,
0.03064008243381977,
0.03975920379161835,
0.08508679270744324,
0.22721268236637115,
0.09437732398509979,
0.035066913813352585,
-0.04854736477136612,
0.11079639941453934,
-0.14855828881263733,
-0.055911727249622345,
0.18174166977405548,
-0.049378346651792526,
-0.008225071243941784,
0.19337967038154602,
0.031435903161764145,
-0.00820861104875803,
-0.06752432882785797,
-0.01714891754090786,
-0.026009229943156242,
-0.18599046766757965,
-0.13624323904514313,
-0.11700065433979034,
-0.023644404485821724,
-0.07723724097013474,
0.05416446551680565,
0.04344567283987999,
-0.050044458359479904,
-0.02308678813278675,
-0.11888593435287476,
0.02035640925168991,
-0.02336866781115532,
0.24185563623905182,
-0.06977146118879318,
0.10902290046215057,
-0.07362283021211624,
-0.0704561248421669,
0.06271586567163467,
0.054340627044439316,
0.009257792495191097,
0.121986523270607,
0.10686531662940979,
0.038296911865472794,
0.11308639496564865,
0.1391906440258026,
0.023159442469477654,
0.031516753137111664,
-0.02196189947426319,
0.014319405891001225,
-0.07461673021316528,
0.0020237232092767954,
0.037459906190633774,
0.2644045352935791,
-0.06435661017894745,
-0.0347551554441452,
-0.09064260125160217,
0.034474167972803116,
0.15335015952587128,
0.11727112531661987,
-0.14943812787532806,
-0.019767669960856438,
0.024496860802173615,
-0.07136043161153793,
-0.002834377810359001,
0.08387750387191772,
0.10168085247278214,
-0.038211170583963394,
0.07481983304023743,
0.0652049332857132,
0.06026624143123627,
-0.05736348778009415,
0.08775106072425842,
-0.13598428666591644,
-0.002200681483373046,
0.010810516774654388,
0.02296234481036663,
-0.1844082772731781,
0.2541390061378479,
0.03880169987678528,
0.04620039835572243,
-0.0006730268942192197,
-0.01232418604195118,
0.06054415926337242,
0.10794951766729355,
0.15341249108314514,
-0.0005480929976329207,
-0.0350252166390419,
-0.11206921935081482,
-0.06277627497911453,
0.04722968488931656,
0.04321626201272011,
0.08127158135175705,
-0.04047572985291481,
0.004164363723248243,
-0.04358217120170593,
0.04201586917042732,
-0.059545811265707016,
-0.13866743445396423,
-0.07428005337715149,
0.06365647912025452,
0.270168662071228,
0.09245284646749496,
-0.005114121828228235,
-0.07690324634313583,
-0.23222850263118744,
0.042299915105104446,
-0.16830988228321075,
0.0010681033600121737,
-0.05690045654773712,
-0.13207033276557922,
0.11258266121149063,
-0.057145051658153534,
-0.01680975966155529,
0.02576751634478569,
0.03644055500626564,
-0.0638929009437561,
-0.06429741531610489,
0.13184604048728943,
-0.0770452618598938,
-0.06040690839290619,
-0.002280647400766611,
0.2677314877510071,
-0.031196491792798042,
0.08726461231708527,
0.014811904169619083,
0.02412262372672558,
-0.0370207279920578,
-0.01604127697646618,
0.1538066864013672,
0.06130208820104599,
-0.06986831873655319,
0.053642455488443375,
0.037707798182964325,
-0.18165171146392822,
-0.07484105229377747,
-0.010479598306119442,
0.2142985463142395,
0.16939431428909302,
-0.073160320520401,
0.18498583137989044,
0.21713027358055115,
-0.014522094279527664,
-0.27599722146987915,
-0.08027539402246475,
-0.05950945243239403,
0.054225411266088486,
-0.04395131766796112,
-0.1199369952082634,
0.06418225169181824,
-0.05412689596414566,
-0.07376839965581894,
0.05569671466946602,
-0.15862038731575012,
-0.11556821316480637,
0.2572135031223297,
-0.13706792891025543,
0.22162872552871704,
-0.06304170936346054,
-0.08304691314697266,
-0.0219431035220623,
-0.10507852584123611,
0.020079078152775764,
-0.13109292089939117,
0.10112041234970093,
0.03435668721795082,
0.08727847039699554,
0.023822607472538948,
-0.00864979438483715,
0.08825139701366425,
0.042442742735147476,
-0.032883044332265854,
-0.013180645182728767,
-0.07229410856962204,
-0.004875231999903917,
0.02194538153707981,
0.06200898811221123,
-0.09637433290481567,
-0.013691527768969536,
-0.0856698676943779,
-0.04056813567876816,
-0.09527096897363663,
0.09193595498800278,
0.056478410959243774,
0.003179783932864666,
0.005501061212271452,
-0.0622289702296257,
-0.040524326264858246,
0.020590662956237793,
0.15181156992912292,
-0.1916092038154602,
0.054718099534511566,
0.1241629347205162,
0.21200314164161682,
-0.15763121843338013,
-0.09547290205955505,
-0.036449868232011795,
-0.08471246063709259,
0.11851517856121063,
-0.04517929628491402,
0.06842029839754105,
0.05366326868534088,
0.04524519294500351,
0.10176954418420792,
0.03940998762845993,
-0.07025475800037384,
0.05225295573472977,
0.0817805603146553,
-0.11533106118440628,
-0.1417633593082428,
-0.024164840579032898,
-0.039184726774692535,
0.05394889414310455,
0.08675014227628708,
0.16970638930797577,
-0.008707722648978233,
-0.0013776894193142653,
-0.009283540770411491,
0.023602036759257317,
-0.12549848854541779,
0.15096023678779602,
0.10003113001585007,
0.05212732031941414,
-0.16922904551029205,
0.06641825288534164,
-0.015592282637953758,
-0.13029977679252625,
0.023020723834633827,
0.014571682550013065,
-0.040637657046318054,
-0.1024298146367073,
-0.17409585416316986,
0.01985572651028633,
0.013597534038126469,
-0.14020298421382904,
-0.0586506649851799,
-0.15681704878807068,
0.039942994713783264,
0.16553424298763275,
0.030863070860505104,
0.060626886785030365,
-0.07444599270820618,
-0.05217216908931732,
-0.023107580840587616,
0.011899071745574474,
0.006253272760659456,
0.012631749734282494,
-0.18040437996387482,
0.09129355102777481,
-0.005591376218944788,
0.11072377860546112,
-0.06636585295200348,
-0.071489118039608,
-0.05573597550392151,
0.0577404722571373,
-0.08577225357294083,
-0.014679783955216408,
-0.06258541345596313,
-0.0009708820725791156,
0.01535275299102068,
-0.07885976135730743,
-0.03333289548754692,
0.0463685467839241,
-0.11973867565393448,
0.04539802670478821,
0.0219409316778183,
0.08174815028905869,
-0.12146598845720291,
0.040827348828315735,
0.02109583467245102,
-0.019370878115296364,
0.15481843054294586,
0.14621981978416443,
-0.14191091060638428,
0.07869577407836914,
-0.21836668252944946,
-0.1923755407333374,
0.1320556402206421,
0.03557154908776283,
0.002789321355521679,
-0.025339201092720032,
-0.03456861898303032,
0.10358545184135437,
0.045161377638578415,
-0.001897722017019987,
0.0610361248254776,
-0.05094105377793312,
0.037819940596818924,
-0.11192414909601212,
-0.023348461836576462,
-0.033310145139694214,
-0.001027863472700119,
0.1386002153158188,
0.1147754117846489,
0.10330972820520401,
-0.0695529580116272,
-0.0001306810008827597,
-0.08214238286018372,
0.05813712626695633,
-0.05875670537352562,
-0.11271554231643677,
-0.13424433767795563,
-0.019069138914346695,
0.0933479592204094,
-0.05231771618127823,
0.17013539373874664,
-0.03849710524082184,
-0.08800558745861053,
0.023912329226732254,
-0.0028211819007992744,
-0.03537997603416443,
0.008041676133871078,
0.22466863691806793,
0.045102883130311966,
-0.016316348686814308,
-0.005105928052216768,
-0.025973143056035042,
0.044996023178100586,
0.1397925317287445,
0.015321857295930386,
0.15238390862941742,
0.12352795898914337,
0.08945602178573608,
0.1524476408958435,
-0.06785187125205994,
-0.031042862683534622,
0.035429541021585464,
-0.10781484097242355,
0.0449133962392807,
-0.0694093257188797,
0.12751930952072144,
0.15150992572307587,
0.01675787940621376,
0.055475637316703796,
-0.06355323642492294,
-0.026483377441763878,
-0.17467962205410004,
-0.08663471043109894,
-0.08433863520622253,
-0.12621259689331055,
0.022768091410398483,
-0.04107997566461563,
0.057024307548999786,
0.0625314712524414,
0.032543085515499115,
-0.0157329011708498,
0.07281465828418732,
-0.02041274681687355,
-0.06990478932857513,
0.10915564000606537,
-0.04946824535727501,
-0.01630544103682041,
-0.06409011036157608,
-0.0044129034504294395,
0.14455416798591614,
-0.007619301788508892,
0.03512691706418991,
-0.004346619360148907,
-0.1098514050245285,
0.021101588383316994,
-0.12399046123027802,
-0.07452832907438278,
-0.002061061095446348,
0.017332041636109352,
0.07675322890281677,
0.1477564573287964,
0.09452130645513535,
-0.06194482743740082,
0.031416360288858414,
0.151639923453331,
-0.08056037873029709,
-0.12724220752716064,
-0.049849532544612885,
0.13239997625350952,
-0.02727849781513214,
0.06392749398946762,
-0.04383614659309387,
-0.080082967877388,
-0.01214776374399662,
0.20645126700401306,
0.29291778802871704,
-0.07754432410001755,
0.08578263968229294,
-0.06423530727624893,
0.025062965229153633,
-0.06641329824924469,
-0.03340701013803482,
0.10634398460388184,
0.21408608555793762,
0.016784092411398888,
-0.04486118629574776,
-0.06570755690336227,
-0.03315667435526848,
-0.016102150082588196,
0.03699179366230965,
-0.03504108637571335,
-0.11511098593473434,
-0.008939127437770367,
0.09548551589250565,
-0.10801435261964798,
-0.10398206114768982,
-0.15013070404529572,
-0.1533442884683609,
-0.04975632578134537,
-0.01600790210068226,
0.0836927741765976,
0.13927170634269714,
-0.008538463152945042,
-0.0718303993344307,
-0.046482931822538376,
0.02570166438817978,
0.0038893227465450764,
-0.15991733968257904,
0.03521644324064255,
0.02630838379263878,
-0.14015144109725952,
-0.01491103507578373,
-0.020875107496976852,
0.12443836033344269,
0.012701599858701229,
0.13245344161987305,
0.015811365097761154,
0.1828443706035614,
0.007511064875870943,
-0.1598130464553833,
0.00226204888895154,
0.18616631627082825,
0.0057518454268574715,
0.08878977596759796,
0.0512651652097702,
-0.12594753503799438,
0.06295833736658096,
-0.09189431369304657,
-0.11074250936508179,
-0.06875722855329514,
0.0035343586932867765,
-0.03594409301877022,
0.06621193885803223,
-0.009723593480885029,
-0.0338192880153656,
-0.03577956184744835,
-0.027698693796992302,
-0.017032330855727196,
0.046107880771160126,
-0.08708781749010086,
-0.07724358141422272,
-0.14049401879310608,
-0.014842791482806206,
-0.09546864777803421,
-0.007914790883660316,
-0.1420651525259018,
-0.02658231183886528,
-0.07371006906032562,
0.002858353080227971,
-0.024043945595622063,
0.02887023612856865,
0.10477802157402039,
0.00587398000061512,
0.013393592089414597,
0.00965821836143732,
0.09651101380586624,
0.1291121244430542,
-0.13733647763729095,
-0.05555998906493187
] |
null | null |
transformers
|
# xlm-roberta-base-finetune-qa
Finetuning `xlm-roberta-base` with the training set of `iapp_wiki_qa_squad`, `thaiqa_squad`, and `nsc_qa` (removed examples which have cosine similarity with validation and test examples over 0.8; contexts of the latter two are trimmed to be around 300 `newmm` words). Benchmarks shared on [wandb](https://wandb.ai/cstorm125/wangchanberta-qa) using validation and test sets of `iapp_wiki_qa_squad`.
Trained with [thai2transformers](https://github.com/vistec-AI/thai2transformers/blob/dev/scripts/downstream/train_question_answering_lm_finetuning.py).
Train with:
```
export WANDB_PROJECT=wangchanberta-qa
export MODEL_NAME=xlm-roberta-base
python train_question_answering_lm_finetuning.py \
--model_name $MODEL_NAME \
--dataset_name chimera_qa \
--output_dir $MODEL_NAME-finetune-chimera_qa-model \
--log_dir $MODEL_NAME-finetune-chimera_qa-log \
--pad_on_right \
--fp16
```
|
{"widget": [{"text": "\u0e2a\u0e27\u0e19\u0e01\u0e38\u0e2b\u0e25\u0e32\u0e1a\u0e40\u0e1b\u0e47\u0e19\u0e42\u0e23\u0e07\u0e40\u0e23\u0e35\u0e22\u0e19\u0e2d\u0e30\u0e44\u0e23", "context": "\u0e42\u0e23\u0e07\u0e40\u0e23\u0e35\u0e22\u0e19\u0e2a\u0e27\u0e19\u0e01\u0e38\u0e2b\u0e25\u0e32\u0e1a\u0e27\u0e34\u0e17\u0e22\u0e32\u0e25\u0e31\u0e22 (Suankularb Wittayalai School) (\u0e2d\u0e31\u0e01\u0e29\u0e23\u0e22\u0e48\u0e2d : \u0e2a.\u0e01. / S.K.) \u0e40\u0e1b\u0e47\u0e19\u0e42\u0e23\u0e07\u0e40\u0e23\u0e35\u0e22\u0e19\u0e0a\u0e32\u0e22\u0e25\u0e49\u0e27\u0e19 \u0e23\u0e30\u0e14\u0e31\u0e1a\u0e0a\u0e31\u0e49\u0e19\u0e21\u0e31\u0e18\u0e22\u0e21\u0e28\u0e36\u0e01\u0e29\u0e32\u0e02\u0e19\u0e32\u0e14\u0e43\u0e2b\u0e0d\u0e48\u0e1e\u0e34\u0e40\u0e28\u0e29 \u0e2a\u0e31\u0e07\u0e01\u0e31\u0e14\u0e2a\u0e33\u0e19\u0e31\u0e01\u0e07\u0e32\u0e19\u0e40\u0e02\u0e15\u0e1e\u0e37\u0e49\u0e19\u0e17\u0e35\u0e48\u0e01\u0e32\u0e23\u0e28\u0e36\u0e01\u0e29\u0e32\u0e21\u0e31\u0e18\u0e22\u0e21\u0e28\u0e36\u0e01\u0e29\u0e32\u0e40\u0e02\u0e15 1 \u0e2a\u0e33\u0e19\u0e31\u0e01\u0e07\u0e32\u0e19\u0e04\u0e13\u0e30\u0e01\u0e23\u0e23\u0e21\u0e01\u0e32\u0e23\u0e01\u0e32\u0e23\u0e28\u0e36\u0e01\u0e29\u0e32\u0e02\u0e31\u0e49\u0e19\u0e1e\u0e37\u0e49\u0e19\u0e10\u0e32\u0e19 (\u0e0a\u0e37\u0e48\u0e2d\u0e40\u0e14\u0e34\u0e21: \u0e01\u0e23\u0e21\u0e2a\u0e32\u0e21\u0e31\u0e0d\u0e28\u0e36\u0e01\u0e29\u0e32) \u0e01\u0e23\u0e30\u0e17\u0e23\u0e27\u0e07\u0e28\u0e36\u0e01\u0e29\u0e32\u0e18\u0e34\u0e01\u0e32\u0e23 \u0e01\u0e48\u0e2d\u0e15\u0e31\u0e49\u0e07\u0e42\u0e14\u0e22 \u0e1e\u0e23\u0e30\u0e1a\u0e32\u0e17\u0e2a\u0e21\u0e40\u0e14\u0e47\u0e08\u0e1e\u0e23\u0e30\u0e08\u0e38\u0e25\u0e08\u0e2d\u0e21\u0e40\u0e01\u0e25\u0e49\u0e32\u0e40\u0e08\u0e49\u0e32\u0e2d\u0e22\u0e39\u0e48\u0e2b\u0e31\u0e27 \u0e44\u0e14\u0e49\u0e23\u0e31\u0e1a\u0e01\u0e32\u0e23\u0e2a\u0e16\u0e32\u0e1b\u0e19\u0e32\u0e02\u0e36\u0e49\u0e19\u0e43\u0e19\u0e27\u0e31\u0e19\u0e17\u0e35\u0e48 8 \u0e21\u0e35\u0e19\u0e32\u0e04\u0e21 \u0e1e.\u0e28. 2424 (\u0e02\u0e13\u0e30\u0e19\u0e31\u0e49\u0e19\u0e19\u0e31\u0e1a\u0e27\u0e31\u0e19\u0e17\u0e35\u0e48 1 \u0e40\u0e21\u0e29\u0e32\u0e22\u0e19 \u0e40\u0e1b\u0e47\u0e19\u0e27\u0e31\u0e19\u0e02\u0e36\u0e49\u0e19\u0e1b\u0e35\u0e43\u0e2b\u0e21\u0e48 \u0e40\u0e21\u0e37\u0e48\u0e2d\u0e19\u0e31\u0e1a\u0e2d\u0e22\u0e48\u0e32\u0e07\u0e2a\u0e32\u0e01\u0e25\u0e16\u0e37\u0e2d\u0e40\u0e1b\u0e47\u0e19 \u0e1e.\u0e28. 2425) \u0e42\u0e14\u0e22\u0e40\u0e1b\u0e47\u0e19\u0e42\u0e23\u0e07\u0e40\u0e23\u0e35\u0e22\u0e19\u0e23\u0e31\u0e10\u0e1a\u0e32\u0e25\u0e41\u0e2b\u0e48\u0e07\u0e41\u0e23\u0e01\u0e02\u0e2d\u0e07\u0e1b\u0e23\u0e30\u0e40\u0e17\u0e28\u0e44\u0e17\u0e22"}]}
|
question-answering
|
airesearch/xlm-roberta-base-finetune-qa
|
[
"transformers",
"pytorch",
"xlm-roberta",
"question-answering",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #xlm-roberta #question-answering #endpoints_compatible #region-us
|
# xlm-roberta-base-finetune-qa
Finetuning 'xlm-roberta-base' with the training set of 'iapp_wiki_qa_squad', 'thaiqa_squad', and 'nsc_qa' (removed examples which have cosine similarity with validation and test examples over 0.8; contexts of the latter two are trimmed to be around 300 'newmm' words). Benchmarks shared on wandb using validation and test sets of 'iapp_wiki_qa_squad'.
Trained with thai2transformers.
Train with:
|
[
"# xlm-roberta-base-finetune-qa\n\nFinetuning 'xlm-roberta-base' with the training set of 'iapp_wiki_qa_squad', 'thaiqa_squad', and 'nsc_qa' (removed examples which have cosine similarity with validation and test examples over 0.8; contexts of the latter two are trimmed to be around 300 'newmm' words). Benchmarks shared on wandb using validation and test sets of 'iapp_wiki_qa_squad'.\nTrained with thai2transformers.\n\nTrain with:"
] |
[
"TAGS\n#transformers #pytorch #xlm-roberta #question-answering #endpoints_compatible #region-us \n",
"# xlm-roberta-base-finetune-qa\n\nFinetuning 'xlm-roberta-base' with the training set of 'iapp_wiki_qa_squad', 'thaiqa_squad', and 'nsc_qa' (removed examples which have cosine similarity with validation and test examples over 0.8; contexts of the latter two are trimmed to be around 300 'newmm' words). Benchmarks shared on wandb using validation and test sets of 'iapp_wiki_qa_squad'.\nTrained with thai2transformers.\n\nTrain with:"
] |
[
33,
137
] |
[
"passage: TAGS\n#transformers #pytorch #xlm-roberta #question-answering #endpoints_compatible #region-us \n# xlm-roberta-base-finetune-qa\n\nFinetuning 'xlm-roberta-base' with the training set of 'iapp_wiki_qa_squad', 'thaiqa_squad', and 'nsc_qa' (removed examples which have cosine similarity with validation and test examples over 0.8; contexts of the latter two are trimmed to be around 300 'newmm' words). Benchmarks shared on wandb using validation and test sets of 'iapp_wiki_qa_squad'.\nTrained with thai2transformers.\n\nTrain with:"
] |
[
-0.06340575218200684,
-0.09364575147628784,
-0.0028253146447241306,
0.08245882391929626,
0.054491981863975525,
-0.01630401611328125,
0.07953746616840363,
0.1427355259656906,
-0.03259872645139694,
0.002955858828499913,
0.08285122364759445,
0.054296866059303284,
0.05122102424502373,
0.031610455363988876,
-0.07162099331617355,
-0.17529621720314026,
0.06074916198849678,
0.11971043050289154,
-0.1251733899116516,
0.1333480179309845,
0.10996031761169434,
-0.06834851950407028,
0.08427143841981888,
0.042582567781209946,
-0.17622871696949005,
0.042276669293642044,
0.019286062568426132,
-0.0896592065691948,
0.0926113873720169,
0.03075028769671917,
0.18492211401462555,
0.06519167125225067,
-0.05736352130770683,
-0.20820769667625427,
0.04812633991241455,
-0.028158443048596382,
0.030135387554764748,
-0.013509858399629593,
-0.0011663382174447179,
-0.018564891070127487,
-0.06344768404960632,
0.02515186369419098,
0.029953187331557274,
0.004678402561694384,
-0.09317474812269211,
-0.05071399360895157,
-0.06027577444911003,
0.039437100291252136,
0.13133208453655243,
0.06652460247278214,
-0.01807974837720394,
0.22056154906749725,
-0.19219589233398438,
0.09263867139816284,
0.16995875537395477,
-0.3857592046260834,
-0.042257267981767654,
0.10266087204217911,
0.09944487363100052,
0.053184568881988525,
-0.06573335826396942,
0.002249418292194605,
0.08184902369976044,
0.034261759370565414,
-0.03263351321220398,
-0.11548224091529846,
-0.08041655272245407,
0.029303468763828278,
-0.08549030870199203,
0.029023291543126106,
0.1708565205335617,
0.02730870433151722,
-0.08716186881065369,
-0.05980442836880684,
-0.012153541669249535,
-0.06698217242956161,
0.006522171199321747,
-0.07684745639562607,
-0.031139077618718147,
-0.0056688799522817135,
-0.05519966036081314,
0.025998737663030624,
-0.08496765047311783,
-0.08686301112174988,
-0.10937011986970901,
0.09101521968841553,
0.07137179374694824,
0.026028167456388474,
-0.14632803201675415,
0.025447078049182892,
-0.007599606644362211,
-0.09846022725105286,
-0.10602837055921555,
-0.044838797301054,
-0.05312255769968033,
-0.002917250618338585,
-0.07296647876501083,
-0.05723455548286438,
0.12196996808052063,
0.15084969997406006,
-0.05187735706567764,
0.0657939687371254,
-0.10303382575511932,
0.05553293228149414,
-0.05336032435297966,
0.2217838317155838,
-0.02333172783255577,
0.07754197716712952,
0.025161748751997948,
-0.021023450419306755,
-0.0853816568851471,
0.02635393664240837,
-0.01681823655962944,
-0.10057830810546875,
0.07919792085886002,
0.06623982638120651,
-0.10123764723539352,
0.08392712473869324,
-0.022641640156507492,
-0.017966758459806442,
0.04128759726881981,
-0.1126156821846962,
-0.051738496869802475,
-0.015315056778490543,
-0.03367394581437111,
0.025770708918571472,
0.014034629799425602,
0.0722944512963295,
-0.041538532823324203,
0.025885285809636116,
0.007277263794094324,
0.02671600691974163,
-0.003516286611557007,
-0.06824637204408646,
-0.010780910030007362,
-0.11310195922851562,
0.021225357428193092,
-0.13730087876319885,
-0.09758112579584122,
-0.0006240532384254038,
0.027426380664110184,
0.027565397322177887,
0.02705841138958931,
-0.04423614591360092,
-0.011938638053834438,
-0.06237025186419487,
0.0020390877034515142,
0.019115306437015533,
-0.027863286435604095,
0.07620827853679657,
0.05739845708012581,
0.13154929876327515,
-0.11105009913444519,
0.039527930319309235,
-0.13360410928726196,
0.07054635882377625,
-0.12028127163648605,
0.06794138252735138,
-0.05715987831354141,
-0.010973811149597168,
-0.039244480431079865,
-0.03210002928972244,
-0.031640756875276566,
0.02896229363977909,
0.013141544535756111,
0.049052897840738297,
-0.09976745396852493,
-0.04841886833310127,
0.2574613392353058,
-0.09347859770059586,
-0.18435059487819672,
0.16359193623065948,
-0.062185220420360565,
0.05261832848191261,
0.0330713726580143,
0.2242382913827896,
0.0851970985531807,
-0.1111234650015831,
0.04876640439033508,
-0.013411527499556541,
-0.04594094678759575,
-0.002667381428182125,
0.050762828439474106,
0.06104329973459244,
-0.01784818060696125,
0.0863613560795784,
-0.05127609148621559,
0.04701010882854462,
-0.05501368269324303,
-0.06795504689216614,
-0.08772163838148117,
-0.1041511744260788,
0.021324925124645233,
-0.04102690517902374,
0.1299922615289688,
-0.07364251464605331,
-0.009115921333432198,
-0.01882915198802948,
0.07549423724412918,
-0.027687659487128258,
0.010823399759829044,
-0.13102135062217712,
0.12099752575159073,
-0.1477583646774292,
-0.0474177822470665,
-0.1259354054927826,
0.009436240419745445,
0.04952158406376839,
0.09528622776269913,
0.049965474754571915,
0.11396343261003494,
0.010012483224272728,
-0.03926423192024231,
-0.04762469232082367,
-0.0031301844865083694,
0.05433424562215805,
-0.016553429886698723,
-0.057543765753507614,
-0.01986631006002426,
0.06835807114839554,
-0.04318709671497345,
0.036244772374629974,
-0.09236052632331848,
0.03129112720489502,
0.002136592287570238,
0.1010219007730484,
0.022559260949492455,
0.07082714140415192,
0.05633450672030449,
0.09435179829597473,
0.0010643296409398317,
0.0020534200593829155,
0.04036902263760567,
0.017613014206290245,
-0.13048185408115387,
0.11516819894313812,
-0.03810831531882286,
0.19019275903701782,
0.15403354167938232,
-0.09020181000232697,
-0.026487302035093307,
0.005730149336159229,
-0.05058548226952553,
-0.035625044256448746,
-0.08088035881519318,
0.12147349864244461,
0.1828327775001526,
-0.02914355881512165,
0.1817293018102646,
-0.12013998627662659,
-0.021428892388939857,
0.0062444075010716915,
-0.06430461257696152,
0.020742304623126984,
0.06933769583702087,
0.04577058553695679,
-0.15694944560527802,
0.09116966277360916,
0.11753949522972107,
-0.06907229125499725,
0.16551721096038818,
-0.07563943415880203,
-0.03204607963562012,
-0.016137367114424706,
0.12503036856651306,
-0.021510696038603783,
0.0620960108935833,
-0.0298004150390625,
0.009470283053815365,
0.03720345348119736,
0.05401697754859924,
0.05533068627119064,
-0.14877204596996307,
-0.055591534823179245,
0.018063955008983612,
-0.08065612614154816,
-0.09467536211013794,
0.1154429093003273,
0.03970283269882202,
0.07629140466451645,
0.006330814212560654,
0.00119087565690279,
0.03054087795317173,
0.008788751438260078,
-0.12077293545007706,
0.16439394652843475,
-0.04074559360742569,
-0.2648687958717346,
-0.07149749249219894,
0.0300515815615654,
-0.09448041766881943,
-0.05144278332591057,
0.04440454766154289,
-0.14982332289218903,
0.026485105976462364,
0.009746836498379707,
-0.04916416481137276,
-0.049439456313848495,
0.04150829091668129,
-0.04881938919425011,
0.052455008029937744,
0.032829999923706055,
-0.08121980726718903,
-0.0528014600276947,
-0.05179064720869064,
-0.07907841354608536,
0.10538296401500702,
-0.1453150510787964,
0.05353272706270218,
0.007400264497846365,
-0.11011026054620743,
0.03973335027694702,
-0.021155809983611107,
0.2247237265110016,
-0.05947474390268326,
0.005073266103863716,
0.07453125715255737,
-0.11052961647510529,
0.018930837512016296,
0.10979361087083817,
-0.04035269469022751,
-0.09574075788259506,
0.027850951999425888,
0.03832226246595383,
-0.04935746267437935,
-0.2517513334751129,
0.0199750903993845,
-0.04156184196472168,
0.010066915303468704,
-0.034548744559288025,
0.05095696821808815,
0.07213068753480911,
0.11695189774036407,
0.04717979207634926,
-0.017759623005986214,
-0.10948091000318527,
0.04719967022538185,
0.006634187418967485,
0.06631476432085037,
0.13725455105304718,
-0.07712441682815552,
-0.037787776440382004,
0.0015500413719564676,
0.17711852490901947,
0.16638065874576569,
0.041942138224840164,
-0.038704656064510345,
0.11576613038778305,
0.253561407327652,
0.20335792005062103,
0.04306701943278313,
-0.09926770627498627,
-0.06974118947982788,
0.015286708250641823,
-0.0076440079137682915,
-0.048486050218343735,
0.027458947151899338,
0.01043919287621975,
0.020926570519804955,
-0.06081371754407883,
0.023824132978916168,
0.07043541222810745,
0.259429395198822,
0.0006588142714463174,
-0.11765577644109726,
-0.05000239238142967,
0.009155998937785625,
-0.08799486607313156,
-0.007269634399563074,
0.05959917604923248,
0.025882365182042122,
-0.16217483580112457,
0.018872231245040894,
-0.02544625848531723,
0.13193020224571228,
-0.026693398132920265,
0.020646357908844948,
-0.0577823631465435,
-0.13762196898460388,
-0.008270280435681343,
0.036172326654195786,
-0.3725380599498749,
0.21734753251075745,
0.02418799325823784,
0.030393268913030624,
-0.0339241586625576,
-0.028286835178732872,
-0.016195746138691902,
0.042537011206150055,
0.09787696599960327,
-0.023817550390958786,
-0.12603434920310974,
-0.13977372646331787,
0.005905697587877512,
0.13350804150104523,
0.07941114902496338,
0.09087679535150528,
0.08887004107236862,
-0.0023575066588819027,
0.038364823907613754,
-0.008028710260987282,
0.0014325627125799656,
-0.13271573185920715,
-0.04525788128376007,
-0.013503530994057655,
0.014396557584404945,
-0.027389561757445335,
-0.034763187170028687,
0.0044537256471812725,
-0.16142746806144714,
0.21572479605674744,
-0.09612295776605606,
-0.07839689403772354,
-0.08144595474004745,
0.05502867326140404,
0.048428069800138474,
-0.08135251700878143,
0.006890629883855581,
-0.04559522494673729,
0.022417843341827393,
-0.033315423876047134,
-0.04436643794178963,
0.04969478398561478,
-0.04497365280985832,
-0.047897592186927795,
0.00838698074221611,
0.07524016499519348,
-0.02599909156560898,
0.04264151304960251,
0.07973436266183853,
-0.01610177382826805,
0.016813000664114952,
-0.07462693005800247,
-0.057235345244407654,
-0.032969798892736435,
-0.0273131150752306,
0.015675734728574753,
-0.11361867189407349,
0.07139847427606583,
-0.1230611726641655,
-0.0879460945725441,
0.1923898309469223,
0.11052060127258301,
-0.07586336880922318,
0.09692566096782684,
0.05792859569191933,
-0.041531626135110855,
-0.11427432298660278,
0.022154035046696663,
0.006309804040938616,
0.07300891727209091,
-0.06779350340366364,
-0.02336161769926548,
0.1288529932498932,
0.05459308624267578,
0.00777920987457037,
-0.03636624291539192,
-0.1076316386461258,
-0.10348352789878845,
0.0809071883559227,
0.05704248324036598,
0.2517162561416626,
-0.12348723411560059,
-0.018920570611953735,
0.02326826937496662,
-0.1861332803964615,
0.011508016847074032,
-0.06806548684835434,
0.09843182563781738,
-0.07135520875453949,
0.04677698016166687,
-0.0014656935818493366,
-0.02299771085381508,
0.12901021540164948,
0.015474420972168446,
0.05791632458567619,
-0.04292702302336693,
-0.02419804222881794,
-0.013413280248641968,
0.014833462424576283,
0.08256618678569794,
-0.08964399248361588,
0.06305784732103348,
-0.17729315161705017,
-0.039262671023607254,
-0.057194944471120834,
-0.03201071545481682,
-0.013364083133637905,
-0.049062613397836685,
-0.06497887521982193,
0.02652767114341259,
0.02033373899757862,
0.005461286753416061,
-0.033147916197776794,
-0.10033217072486877,
0.10947366803884506,
0.05115015059709549,
0.10519377142190933,
-0.057463061064481735,
0.028165077790617943,
-0.014874747022986412,
-0.030007898807525635,
0.12228209525346756,
-0.17054107785224915,
0.06338474899530411,
0.1261473447084427,
-0.02523457072675228,
0.11578075587749481,
0.06200229749083519,
0.014046947471797466,
0.09525161236524582,
0.04967324063181877,
-0.05851387232542038,
-0.084197036921978,
0.04132719710469246,
-0.19515354931354523,
0.025885675102472305,
-0.010276228189468384,
0.09388820827007294,
-0.04840168356895447,
-0.04392175003886223,
-0.005608187057077885,
-0.04746284708380699,
-0.11632247269153595,
0.09083562344312668,
0.07858853787183762,
0.05009505897760391,
-0.07418107241392136,
0.060135748237371445,
0.0032842739019542933,
-0.11053837090730667,
0.03662481531500816,
-0.0009801973355934024,
-0.12936151027679443,
-0.053330328315496445,
0.037705954164266586,
0.19611045718193054,
-0.07429778575897217,
-0.08030585199594498,
-0.1589268445968628,
-0.1109667718410492,
0.04004695266485214,
0.22138915956020355,
0.0922202542424202,
0.019277559593319893,
0.04229586571455002,
-0.030739376321434975,
-0.06752132624387741,
0.030077217146754265,
0.07544829696416855,
0.08125579357147217,
-0.12236250936985016,
-0.005558541044592857,
0.02147027477622032,
0.16793999075889587,
-0.028958244249224663,
-0.024451330304145813,
-0.13849659264087677,
0.07717051357030869,
-0.23672811686992645,
0.06832703948020935,
-0.01801740750670433,
-0.022118842229247093,
-0.0003391321806702763,
-0.1284988820552826,
-0.06236572936177254,
0.027847202494740486,
-0.07024481892585754,
0.038035742938518524,
-0.013678725808858871,
-0.005300018470734358,
-0.04215257987380028,
-0.027046214789152145,
0.10718037933111191,
-0.04472610726952553,
0.046950843185186386,
0.0575043186545372,
-0.09042029827833176,
0.07899783551692963,
-0.14481830596923828,
-0.04793247580528259,
0.007425629999488592,
0.01815902441740036,
0.058983124792575836,
-0.043749235570430756,
0.07229074090719223,
0.061164893209934235,
0.0798104777932167,
0.053218308836221695,
0.15050917863845825,
-0.10424492508172989,
-0.07868626713752747,
-0.06595438718795776,
-0.03133725747466087,
-0.09140828996896744,
-0.02467472292482853,
0.1812487542629242,
0.10177907347679138,
0.09050291776657104,
-0.09121330082416534,
0.06618262082338333,
-0.11112257093191147,
-0.00650716433301568,
-0.00704926997423172,
-0.10702244192361832,
0.03316376730799675,
-0.0733010470867157,
0.0016270751366391778,
-0.049689315259456635,
0.20024214684963226,
-0.06918427348136902,
0.02487618289887905,
0.020442798733711243,
-0.10703852027654648,
0.016716472804546356,
0.05566982924938202,
0.2827918529510498,
0.069678895175457,
-0.009533756412565708,
-0.054159943014383316,
0.011211474426090717,
-0.02208741009235382,
0.06890591233968735,
0.011003551073372364,
0.2857811748981476,
-0.05453796312212944,
0.07644118368625641,
0.04868900403380394,
0.10571887344121933,
0.028158314526081085,
-0.03762461245059967,
-0.09696754068136215,
-0.004839022643864155,
0.03601709008216858,
-0.029440419748425484,
0.2869356870651245,
-0.12229777872562408,
0.0569118857383728,
-0.029061300680041313,
-0.0732305645942688,
-0.13314422965049744,
0.004397836979478598,
-0.13512177765369415,
-0.09130588918924332,
0.06597485393285751,
-0.08295618742704391,
-0.03584472835063934,
0.08344549685716629,
0.09031234681606293,
0.0075297243893146515,
0.15993450582027435,
0.060817599296569824,
-0.02255951054394245,
0.043616991490125656,
-0.057345759123563766,
-0.023303015157580376,
0.04698256775736809,
0.03867487236857414,
0.010057317093014717,
-0.049084268510341644,
-0.00766628235578537,
0.0066475635394454,
-0.025676874443888664,
-0.026686396449804306,
-0.13458667695522308,
-0.032538119703531265,
-0.007861685939133167,
0.006394667085260153,
0.027878081426024437,
0.09853584319353104,
0.049922503530979156,
-0.003902885364368558,
-0.0056914822198450565,
0.12075137346982956,
-0.002933420706540346,
-0.18118084967136383,
-0.24780656397342682,
0.05189406871795654,
0.006818363908678293,
0.0197738129645586,
0.0024879679549485445,
0.009345464408397675,
0.009320749901235104,
0.3527725636959076,
0.18934407830238342,
-0.024061068892478943,
0.058595601469278336,
0.03999657556414604,
0.027128104120492935,
-0.026856044307351112,
0.054219696670770645,
0.10224337130784988,
0.08539250493049622,
-0.09963060915470123,
-0.11352062970399857,
-0.10196402668952942,
-0.0927543193101883,
-0.05214140936732292,
0.08076276630163193,
0.061379335820674896,
-0.0029316223226487637,
-0.06059460714459419,
0.036870501935482025,
-0.05511065572500229,
0.07140855491161346,
0.010656207799911499,
-0.1288851648569107,
-0.12213525921106339,
-0.03801979869604111,
-0.09606941044330597,
-0.005720073357224464,
-0.003721941728144884,
-0.04163595288991928,
-0.003114090533927083,
0.008375309407711029,
0.0017773666186258197,
-0.04515257850289345,
-0.008238879963755608,
0.10821753740310669,
0.07425132393836975,
-0.05527432635426521,
0.046024564653635025,
0.12359807640314102,
0.09412767738103867,
0.0879034698009491,
0.022374385967850685,
0.1112997755408287,
0.038834474980831146,
0.0654674842953682,
-0.0399390384554863,
0.11482443660497665,
0.0334247350692749,
0.03932565823197365,
0.060148414224386215,
-0.08641479164361954,
0.08644386380910873,
-0.1295512616634369,
-0.031555354595184326,
-0.13113583624362946,
0.0884794220328331,
-0.03764394298195839,
0.11794627457857132,
0.18162986636161804,
-0.021674707531929016,
0.0683511346578598,
-0.049049537628889084,
0.07558406889438629,
0.013907247222959995,
-0.07144059240818024,
-0.08131737262010574,
-0.16111521422863007,
0.02542763762176037,
0.07900691777467728,
-0.0399649515748024,
-0.2626499831676483,
-0.06451244652271271,
-0.011422750540077686,
-0.03892064839601517,
-0.015699587762355804,
0.09211145341396332,
0.12512356042861938,
0.07029765099287033,
-0.01792079396545887,
-0.22033417224884033,
0.017446795478463173,
0.054513778537511826,
-0.07420125603675842,
-0.10091985762119293
] |
null | null |
transformers
|
# Finetuend `xlm-roberta-base` model on Thai sequence and token classification datasets
<br>
Finetuned XLM Roberta BASE model on Thai sequence and token classification datasets
The script and documentation can be found at [this repository](https://github.com/vistec-AI/thai2transformers).
<br>
## Model description
<br>
We use the pretrained cross-lingual RoBERTa model as proposed by [[Conneau et al., 2020]](https://arxiv.org/abs/1911.02116). We download the pretrained PyTorch model via HuggingFace's Model Hub (https://huggingface.co/xlm-roberta-base)
<br>
## Intended uses & limitations
<br>
You can use the finetuned models for multiclass/multilabel text classification and token classification task.
<br>
**Multiclass text classification**
- `wisesight_sentiment`
4-class text classification task (`positive`, `neutral`, `negative`, and `question`) based on social media posts and tweets.
- `wongnai_reivews`
Users' review rating classification task (scale is ranging from 1 to 5)
- `generated_reviews_enth` : (`review_star` as label)
Generated users' review rating classification task (scale is ranging from 1 to 5).
**Multilabel text classification**
- `prachathai67k`
Thai topic classification with 12 labels based on news article corpus from prachathai.com. The detail is described in this [page](https://huggingface.co/datasets/prachathai67k).
**Token classification**
- `thainer`
Named-entity recognition tagging with 13 named-entities as descibed in this [page](https://huggingface.co/datasets/thainer).
- `lst20` : NER NER and POS tagging
Named-entity recognition tagging with 10 named-entities and Part-of-Speech tagging with 16 tags as descibed in this [page](https://huggingface.co/datasets/lst20).
<br>
## How to use
<br>
The example notebook demonstrating how to use finetuned model for inference can be found at this [Colab notebook](https://colab.research.google.com/drive/1Kbk6sBspZLwcnOE61adAQo30xxqOQ9ko)
<br>
**BibTeX entry and citation info**
```
@misc{lowphansirikul2021wangchanberta,
title={WangchanBERTa: Pretraining transformer-based Thai Language Models},
author={Lalita Lowphansirikul and Charin Polpanumas and Nawat Jantrakulchai and Sarana Nutanong},
year={2021},
eprint={2101.09635},
archivePrefix={arXiv},
primaryClass={cs.CL}
}
```
|
{}
|
fill-mask
|
airesearch/xlm-roberta-base-finetuned
|
[
"transformers",
"xlm-roberta",
"fill-mask",
"arxiv:1911.02116",
"arxiv:2101.09635",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[
"1911.02116",
"2101.09635"
] |
[] |
TAGS
#transformers #xlm-roberta #fill-mask #arxiv-1911.02116 #arxiv-2101.09635 #autotrain_compatible #endpoints_compatible #region-us
|
# Finetuend 'xlm-roberta-base' model on Thai sequence and token classification datasets
<br>
Finetuned XLM Roberta BASE model on Thai sequence and token classification datasets
The script and documentation can be found at this repository.
<br>
## Model description
<br>
We use the pretrained cross-lingual RoBERTa model as proposed by [[Conneau et al., 2020]](URL We download the pretrained PyTorch model via HuggingFace's Model Hub (URL
<br>
## Intended uses & limitations
<br>
You can use the finetuned models for multiclass/multilabel text classification and token classification task.
<br>
Multiclass text classification
- 'wisesight_sentiment'
4-class text classification task ('positive', 'neutral', 'negative', and 'question') based on social media posts and tweets.
- 'wongnai_reivews'
Users' review rating classification task (scale is ranging from 1 to 5)
- 'generated_reviews_enth' : ('review_star' as label)
Generated users' review rating classification task (scale is ranging from 1 to 5).
Multilabel text classification
- 'prachathai67k'
Thai topic classification with 12 labels based on news article corpus from URL. The detail is described in this page.
Token classification
- 'thainer'
Named-entity recognition tagging with 13 named-entities as descibed in this page.
- 'lst20' : NER NER and POS tagging
Named-entity recognition tagging with 10 named-entities and Part-of-Speech tagging with 16 tags as descibed in this page.
<br>
## How to use
<br>
The example notebook demonstrating how to use finetuned model for inference can be found at this Colab notebook
<br>
BibTeX entry and citation info
|
[
"# Finetuend 'xlm-roberta-base' model on Thai sequence and token classification datasets\n\n<br>\n\nFinetuned XLM Roberta BASE model on Thai sequence and token classification datasets\nThe script and documentation can be found at this repository.\n\n<br>",
"## Model description\n\n<br>\n\nWe use the pretrained cross-lingual RoBERTa model as proposed by [[Conneau et al., 2020]](URL We download the pretrained PyTorch model via HuggingFace's Model Hub (URL\n<br>",
"## Intended uses & limitations\n\n<br>\n\nYou can use the finetuned models for multiclass/multilabel text classification and token classification task.\n\n<br>\n\nMulticlass text classification\n\n\n- 'wisesight_sentiment' \n \n 4-class text classification task ('positive', 'neutral', 'negative', and 'question') based on social media posts and tweets.\n\n- 'wongnai_reivews' \n\n Users' review rating classification task (scale is ranging from 1 to 5)\n\n- 'generated_reviews_enth' : ('review_star' as label)\n\n Generated users' review rating classification task (scale is ranging from 1 to 5).\n\nMultilabel text classification\n\n- 'prachathai67k'\n\n Thai topic classification with 12 labels based on news article corpus from URL. The detail is described in this page.\n \n\nToken classification\n\n- 'thainer' \n\n Named-entity recognition tagging with 13 named-entities as descibed in this page.\n\n- 'lst20' : NER NER and POS tagging\n \n Named-entity recognition tagging with 10 named-entities and Part-of-Speech tagging with 16 tags as descibed in this page.\n\n<br>",
"## How to use\n\n<br>\n\nThe example notebook demonstrating how to use finetuned model for inference can be found at this Colab notebook\n\n<br>\n\n\nBibTeX entry and citation info"
] |
[
"TAGS\n#transformers #xlm-roberta #fill-mask #arxiv-1911.02116 #arxiv-2101.09635 #autotrain_compatible #endpoints_compatible #region-us \n",
"# Finetuend 'xlm-roberta-base' model on Thai sequence and token classification datasets\n\n<br>\n\nFinetuned XLM Roberta BASE model on Thai sequence and token classification datasets\nThe script and documentation can be found at this repository.\n\n<br>",
"## Model description\n\n<br>\n\nWe use the pretrained cross-lingual RoBERTa model as proposed by [[Conneau et al., 2020]](URL We download the pretrained PyTorch model via HuggingFace's Model Hub (URL\n<br>",
"## Intended uses & limitations\n\n<br>\n\nYou can use the finetuned models for multiclass/multilabel text classification and token classification task.\n\n<br>\n\nMulticlass text classification\n\n\n- 'wisesight_sentiment' \n \n 4-class text classification task ('positive', 'neutral', 'negative', and 'question') based on social media posts and tweets.\n\n- 'wongnai_reivews' \n\n Users' review rating classification task (scale is ranging from 1 to 5)\n\n- 'generated_reviews_enth' : ('review_star' as label)\n\n Generated users' review rating classification task (scale is ranging from 1 to 5).\n\nMultilabel text classification\n\n- 'prachathai67k'\n\n Thai topic classification with 12 labels based on news article corpus from URL. The detail is described in this page.\n \n\nToken classification\n\n- 'thainer' \n\n Named-entity recognition tagging with 13 named-entities as descibed in this page.\n\n- 'lst20' : NER NER and POS tagging\n \n Named-entity recognition tagging with 10 named-entities and Part-of-Speech tagging with 16 tags as descibed in this page.\n\n<br>",
"## How to use\n\n<br>\n\nThe example notebook demonstrating how to use finetuned model for inference can be found at this Colab notebook\n\n<br>\n\n\nBibTeX entry and citation info"
] |
[
54,
69,
60,
280,
42
] |
[
"passage: TAGS\n#transformers #xlm-roberta #fill-mask #arxiv-1911.02116 #arxiv-2101.09635 #autotrain_compatible #endpoints_compatible #region-us \n# Finetuend 'xlm-roberta-base' model on Thai sequence and token classification datasets\n\n<br>\n\nFinetuned XLM Roberta BASE model on Thai sequence and token classification datasets\nThe script and documentation can be found at this repository.\n\n<br>## Model description\n\n<br>\n\nWe use the pretrained cross-lingual RoBERTa model as proposed by [[Conneau et al., 2020]](URL We download the pretrained PyTorch model via HuggingFace's Model Hub (URL\n<br>## Intended uses & limitations\n\n<br>\n\nYou can use the finetuned models for multiclass/multilabel text classification and token classification task.\n\n<br>\n\nMulticlass text classification\n\n\n- 'wisesight_sentiment' \n \n 4-class text classification task ('positive', 'neutral', 'negative', and 'question') based on social media posts and tweets.\n\n- 'wongnai_reivews' \n\n Users' review rating classification task (scale is ranging from 1 to 5)\n\n- 'generated_reviews_enth' : ('review_star' as label)\n\n Generated users' review rating classification task (scale is ranging from 1 to 5).\n\nMultilabel text classification\n\n- 'prachathai67k'\n\n Thai topic classification with 12 labels based on news article corpus from URL. The detail is described in this page.\n \n\nToken classification\n\n- 'thainer' \n\n Named-entity recognition tagging with 13 named-entities as descibed in this page.\n\n- 'lst20' : NER NER and POS tagging\n \n Named-entity recognition tagging with 10 named-entities and Part-of-Speech tagging with 16 tags as descibed in this page.\n\n<br>## How to use\n\n<br>\n\nThe example notebook demonstrating how to use finetuned model for inference can be found at this Colab notebook\n\n<br>\n\n\nBibTeX entry and citation info"
] |
[
0.019229451194405556,
0.04474462568759918,
-0.006500227376818657,
0.03396664559841156,
0.11710596084594727,
0.0306523647159338,
0.13348302245140076,
0.0680093839764595,
0.01242862455546856,
0.10120369493961334,
-0.0774005651473999,
0.02800348401069641,
0.08855036646127701,
0.10306128114461899,
0.030863717198371887,
-0.255337119102478,
0.04313816875219345,
-0.07294829189777374,
0.05541711673140526,
0.10123944282531738,
0.10134541988372803,
-0.09315218776464462,
0.09413497895002365,
0.042269349098205566,
-0.05531882867217064,
0.02437555231153965,
-0.01921853795647621,
-0.0748666524887085,
0.04551757127046585,
0.04106621444225311,
0.13779446482658386,
0.0242173969745636,
0.053630754351615906,
-0.15429863333702087,
0.013600809499621391,
0.05709104239940643,
0.016281718388199806,
0.04337843507528305,
0.1337868571281433,
-0.09552018344402313,
0.1003432497382164,
-0.07678079605102539,
0.12486858665943146,
0.06848348677158356,
-0.10720743983983994,
-0.06089501082897186,
-0.13025951385498047,
0.17261938750743866,
0.11667460203170776,
0.03525931388139725,
-0.04807708412408829,
0.0795394778251648,
-0.10144145786762238,
0.029970208182930946,
-0.0033727458212524652,
-0.2062510848045349,
-0.03394670411944389,
0.07319813966751099,
-0.015685779973864555,
0.07365071773529053,
-0.09284673631191254,
-0.004861720371991396,
-0.003763914108276367,
0.005803247448056936,
0.02240801602602005,
-0.04292277991771698,
-0.030880019068717957,
-0.04229488968849182,
-0.11043515056371689,
-0.019127007573843002,
0.0998392403125763,
0.044826310127973557,
-0.08422447741031647,
-0.16584274172782898,
-0.023362385109066963,
0.022905457764863968,
-0.002130646724253893,
-0.032359763979911804,
-0.01301488559693098,
0.012029945850372314,
0.07711847126483917,
-0.0764504000544548,
-0.09444417804479599,
-0.011778919026255608,
-0.04852177947759628,
0.027023758739233017,
0.01882139965891838,
0.0008939510444179177,
-0.029262488707900047,
0.1074155867099762,
0.027527544647455215,
-0.03924541920423508,
0.015019772574305534,
-0.057755760848522186,
-0.1901523321866989,
-0.0608258917927742,
0.04491028934717178,
-0.09908884763717651,
-0.009038809686899185,
0.1574743390083313,
-0.005926700308918953,
0.10443489998579025,
-0.10270686447620392,
0.0038867334369570017,
0.0748414546251297,
0.17470411956310272,
-0.07476390898227692,
0.0008782374206930399,
-0.024465661495923996,
-0.00014164205640554428,
-0.017298251390457153,
-0.0041824993677437305,
0.005331419408321381,
-0.04506303742527962,
0.04992600530385971,
0.05772015079855919,
0.014856833033263683,
0.0849032998085022,
-0.08116015791893005,
-0.0717238038778305,
0.23715490102767944,
-0.1270635426044464,
0.010537880472838879,
0.04878447204828262,
-0.05324834585189819,
0.05037437379360199,
0.03455346077680588,
0.017507510259747505,
-0.05113533139228821,
0.11462969332933426,
-0.07042153179645538,
0.01553238183259964,
-0.0413958802819252,
-0.09365616738796234,
0.013025970198214054,
-0.05215635150671005,
-0.11146043986082077,
-0.037654031068086624,
-0.12132488191127777,
-0.05133986473083496,
-0.021737385541200638,
-0.06433595716953278,
-0.009008671157062054,
0.0007055888418108225,
0.04722254350781441,
-0.004333374556154013,
0.020501498132944107,
-0.0391070656478405,
0.002838736865669489,
0.01619362272322178,
-0.05198484659194946,
0.04753902181982994,
0.04466985911130905,
0.031547173857688904,
-0.08852800726890564,
0.031363412737846375,
-0.3236677050590515,
0.16438940167427063,
-0.09614582359790802,
0.032385922968387604,
-0.14412203431129456,
-0.011557240039110184,
-0.005978289060294628,
-0.014324476942420006,
-0.05611105263233185,
0.10351833701133728,
-0.06906132400035858,
-0.02420205995440483,
0.14933642745018005,
-0.10876873135566711,
-0.009115634486079216,
0.12246160209178925,
-0.0430954247713089,
0.03251740336418152,
0.10153177380561829,
0.14821872115135193,
0.19635838270187378,
-0.037003763020038605,
-0.02287820354104042,
-0.030598772689700127,
-0.0710560753941536,
0.15765734016895294,
0.09891527891159058,
-0.06809990108013153,
0.01446585077792406,
0.006071170326322317,
-0.034914642572402954,
-0.017270775511860847,
0.007499744184315205,
-0.02114289440214634,
0.020949022844433784,
-0.0012312212493270636,
0.05308331549167633,
-0.04160875082015991,
0.015807153657078743,
0.00634687440469861,
-0.0476948618888855,
0.07407446205615997,
0.0898502841591835,
-0.03323124721646309,
0.0019435887224972248,
-0.13602593541145325,
-0.00807185284793377,
-0.009734151884913445,
-0.02424290031194687,
-0.2081926167011261,
-0.07987970113754272,
0.07666219770908356,
-0.06911231577396393,
0.15492355823516846,
-0.002549821976572275,
-0.007513549644500017,
-0.006827423349022865,
-0.05080334097146988,
-0.022096987813711166,
0.004827793687582016,
-0.0353337898850441,
-0.022221706807613373,
-0.09052348881959915,
-0.015708407387137413,
-0.020779866725206375,
0.14480754733085632,
-0.12660345435142517,
0.03884311392903328,
0.11795450747013092,
0.1095307320356369,
0.026862945407629013,
-0.021656114608049393,
0.08298829197883606,
-0.0016473974101245403,
0.0409492552280426,
-0.04341200366616249,
0.003328366670757532,
-0.014983853325247765,
-0.06727102398872375,
0.07547643780708313,
-0.1147102415561676,
-0.1293344795703888,
0.02699178084731102,
0.054285112768411636,
-0.11452968418598175,
0.012222286313772202,
-0.024229399859905243,
-0.024750106036663055,
-0.13582777976989746,
-0.020050978288054466,
0.17032396793365479,
0.03483889624476433,
0.1013629361987114,
-0.056836653500795364,
-0.033389586955308914,
-0.03609532117843628,
-0.003918138332664967,
-0.008076705038547516,
0.09465635567903519,
-0.007052459754049778,
-0.3077133893966675,
0.08104613423347473,
-0.024288009852170944,
0.06733232736587524,
0.14134818315505981,
0.006640493404120207,
-0.028854835778474808,
-0.030463602393865585,
0.06956782937049866,
-0.012582814320921898,
-0.027054086327552795,
0.005147297866642475,
0.015334230847656727,
0.03212811052799225,
0.027958020567893982,
-0.00041454052552580833,
-0.07004862278699875,
0.05640828236937523,
0.0010353176621720195,
-0.034662727266550064,
-0.04200971871614456,
0.04473579674959183,
0.07870033383369446,
0.08980605751276016,
0.03773112595081329,
0.12498646974563599,
-0.0200344305485487,
-0.023148979991674423,
-0.1406901776790619,
0.12929169833660126,
-0.10426829010248184,
-0.27920234203338623,
-0.15279068052768707,
0.043128080666065216,
-0.03946425020694733,
-0.03131259232759476,
-0.032944418489933014,
-0.0455622524023056,
-0.06995812058448792,
-0.071708545088768,
0.09889392554759979,
-0.028463471680879593,
-0.06430642306804657,
-0.04935003072023392,
0.014841414988040924,
0.007592556998133659,
-0.08438184857368469,
0.01262027770280838,
0.02305019646883011,
-0.10474222153425217,
0.0038398862816393375,
-0.03706410899758339,
0.007165149785578251,
0.12961891293525696,
-0.03619888052344322,
-0.014119727537035942,
-0.03751939535140991,
0.17876383662223816,
-0.0937693789601326,
0.11404634267091751,
0.09233230352401733,
-0.050724096596241,
0.07442725449800491,
0.12888090312480927,
0.036956898868083954,
-0.004465087782591581,
0.07585791498422623,
0.10011190176010132,
0.0065002525225281715,
-0.28188490867614746,
-0.08369845896959305,
-0.012971178628504276,
0.026728149503469467,
0.0673273429274559,
0.056576188653707504,
0.10269518196582794,
0.03744497895240784,
-0.08917666971683502,
0.03288272023200989,
0.10855486989021301,
0.061904408037662506,
0.08644300699234009,
0.04059366136789322,
0.061736710369586945,
-0.1313871443271637,
-0.02979966253042221,
0.09627837687730789,
0.04009658098220825,
0.1396995335817337,
0.06065679341554642,
0.11308306455612183,
0.07323484122753143,
0.03165623918175697,
0.08379256725311279,
0.02224261686205864,
-0.0342571921646595,
0.04834902286529541,
-0.028632743284106255,
-0.03189070522785187,
0.024926267564296722,
0.016770770773291588,
0.13973142206668854,
-0.04794509708881378,
0.03624477982521057,
-0.08128812909126282,
0.09844693541526794,
0.19634707272052765,
-0.011747813783586025,
-0.09469066560268402,
-0.04098743945360184,
0.07467589527368546,
-0.06323767453432083,
-0.05318721383810043,
-0.029918238520622253,
0.030445832759141922,
-0.17786777019500732,
0.14579689502716064,
-0.024201925843954086,
0.10958010703325272,
-0.13717281818389893,
-0.035373054444789886,
-0.0241130031645298,
-0.010862979106605053,
-0.02181519940495491,
0.07795880734920502,
-0.1288963109254837,
0.09763277322053909,
0.04387153685092926,
0.00920160673558712,
-0.04182114452123642,
0.027426425367593765,
0.0200069397687912,
0.07889215648174286,
0.11048389971256256,
-0.0036702128127217293,
-0.0015542283654212952,
-0.06612223386764526,
-0.06532818078994751,
-0.034914009273052216,
0.08136767148971558,
-0.1268678903579712,
0.05448802560567856,
-0.025509905070066452,
-0.013528721407055855,
-0.05643288791179657,
-0.09406625479459763,
-0.16481974720954895,
-0.13965201377868652,
0.02751508727669716,
-0.06840917468070984,
-0.0034543736837804317,
0.0030596773140132427,
-0.01976904831826687,
-0.0818413496017456,
0.1249743402004242,
-0.15900176763534546,
-0.07089507579803467,
-0.13157764077186584,
-0.0014820992946624756,
0.10856375098228455,
-0.0761210098862648,
0.021984096616506577,
-0.060117848217487335,
0.1031988263130188,
-0.02096688374876976,
-0.0606500580906868,
0.021548965945839882,
-0.010961911641061306,
-0.14850392937660217,
0.0077043273486196995,
0.09563843160867691,
0.10463806986808777,
0.03700710088014603,
-0.0046966238878667355,
0.019546568393707275,
0.05936986953020096,
-0.1311052292585373,
-0.04458041861653328,
0.11745532602071762,
0.016840536147356033,
0.12819904088974,
-0.060367703437805176,
-0.25395405292510986,
-0.13568304479122162,
-0.01777130365371704,
0.08098937571048737,
0.11289667338132858,
-0.055492691695690155,
0.10772251337766647,
0.098560631275177,
-0.079297736287117,
-0.1623411476612091,
0.009085077792406082,
0.02215508744120598,
0.015295060351490974,
0.073051318526268,
-0.1624167263507843,
0.05765588954091072,
0.06749194115400314,
-0.014637162908911705,
-0.11731278896331787,
-0.2174331247806549,
-0.13434350490570068,
0.03533793240785599,
0.016753334552049637,
-0.15832841396331787,
-0.11680735647678375,
-0.08321522176265717,
-0.029642948880791664,
-0.03821524232625961,
0.19462242722511292,
-0.0014827311970293522,
0.03428282588720322,
0.03371219336986542,
0.025961266830563545,
0.01325199380517006,
-0.002150032902136445,
0.109063059091568,
0.03834844380617142,
0.05342753231525421,
-0.035445839166641235,
-0.07138655334711075,
0.12858174741268158,
-0.01542515680193901,
0.12597936391830444,
0.055994316935539246,
0.038636304438114166,
-0.16748473048210144,
-0.053050003945827484,
-0.06480912864208221,
-0.0011283045168966055,
-0.037971850484609604,
-0.01099617499858141,
-0.12432374060153961,
0.09641510248184204,
0.03370976820588112,
-0.01877514086663723,
0.09106160700321198,
-0.08050109446048737,
0.02004421129822731,
0.014347795397043228,
0.14430737495422363,
0.17924635112285614,
-0.0992070883512497,
-0.08502452075481415,
-0.016917621716856956,
0.02281748503446579,
-0.1732606589794159,
0.06491133570671082,
0.07913202792406082,
0.028526870533823967,
0.18331363797187805,
-0.018840115517377853,
-0.12986062467098236,
0.03952842950820923,
0.07222597301006317,
-0.08986598253250122,
-0.15433961153030396,
0.018829578533768654,
0.0521874725818634,
-0.047790247946977615,
0.005415741354227066,
0.08156298846006393,
-0.012995327822864056,
-0.07716408371925354,
0.048384927213191986,
0.04919768497347832,
-0.021251946687698364,
0.06813248991966248,
0.009044833481311798,
0.06586453318595886,
-0.06236102804541588,
0.1614181399345398,
0.19934958219528198,
-0.08286774903535843,
-0.04704482480883598,
0.15388113260269165,
-0.12184440344572067,
-0.04715634882450104,
-0.0859486311674118,
0.08228884637355804,
0.015282859094440937,
-0.04700455442070961,
0.027128778398036957,
-0.09086283296346664,
0.022753462195396423,
0.20698505640029907,
0.020173892378807068,
0.06180013716220856,
-0.04495462775230408,
-0.023734431713819504,
-0.018475381657481194,
0.05250350385904312,
0.03309880197048187,
-0.0023860980290919542,
-0.10668708384037018,
0.04549671337008476,
0.09568202495574951,
0.014112618751823902,
-0.01169673353433609,
-0.0666474923491478,
-0.0807800143957138,
-0.004449353087693453,
0.019153360277414322,
0.04450313001871109,
-0.07215776294469833,
0.006988672539591789,
-0.014935387298464775,
-0.043986253440380096,
0.0013026043307036161,
-0.016660425812005997,
-0.03773554787039757,
-0.03153073415160179,
-0.03440765291452408,
0.1443227380514145,
-0.17059803009033203,
-0.005321213509887457,
0.09769804775714874,
-0.07029424607753754,
0.08735984563827515,
0.011817685328423977,
-0.02325662225484848,
0.03159289062023163,
-0.09571714699268341,
0.014828660525381565,
0.007642854005098343,
0.019256742671132088,
-0.01022128202021122,
-0.13833174109458923,
0.006688418332487345,
-0.051329098641872406,
0.02475161850452423,
0.029811060056090355,
0.059023529291152954,
-0.0847359150648117,
0.049501266330480576,
-0.047587521374225616,
-0.06432979553937912,
-0.07580561935901642,
0.04744938015937805,
0.09463614970445633,
0.021250709891319275,
0.05855879187583923,
-0.04838234558701515,
0.03771131485700607,
-0.1304643750190735,
-0.01609749160706997,
-0.007560041733086109,
-0.008620299398899078,
-0.0651962086558342,
-0.028035301715135574,
0.02862391620874405,
-0.00999495666474104,
0.08904294669628143,
-0.021853581070899963,
0.01191114354878664,
0.06629904359579086,
0.07432146370410919,
-0.1337519735097885,
0.05006410926580429,
0.03289653733372688,
-0.00395713746547699,
-0.013072660192847252,
0.060116201639175415,
-0.06539535522460938,
-0.10877758264541626,
0.008027574047446251,
0.089458167552948,
0.1687035858631134,
-0.06092141568660736,
-0.024145927280187607,
0.0705537274479866,
-0.022019056603312492,
-0.06269292533397675,
0.034245751798152924,
-0.10350248217582703,
-0.030931085348129272,
-0.0833781361579895,
0.06005540490150452,
0.1357542872428894,
-0.1260715126991272,
0.10157063603401184,
0.03408299386501312,
-0.07960173487663269,
-0.06915272772312164,
-0.17642560601234436,
-0.06644006073474884,
-0.0008633539546281099,
-0.010697675868868828,
-0.08486992120742798,
0.05866195261478424,
0.1287316381931305,
0.05402609333395958,
-0.018291592597961426,
0.14164596796035767,
-0.1520344316959381,
-0.0865996778011322,
0.0904596745967865,
0.03633096069097519,
0.015625204890966415,
0.020855603739619255,
0.06752213835716248,
0.013954663649201393,
0.11310189217329025,
0.06558938324451447,
0.037538327276706696,
0.027702100574970245,
0.0085699912160635,
-0.06872031092643738,
-0.06437043845653534,
0.023118363693356514,
-0.029701517894864082,
-0.03251795843243599,
0.12867286801338196,
0.040760286152362823,
-0.013517741113901138,
-0.026834575459361076,
0.1867782324552536,
-0.02007957547903061,
-0.05352398008108139,
-0.18020246922969818,
0.18508034944534302,
-0.0034153545275330544,
0.0010386888170614839,
0.05990565940737724,
-0.11107519268989563,
-0.019634995609521866,
0.13689856231212616,
0.12031970918178558,
0.034317389130592346,
0.018598943948745728,
-0.016025356948375702,
0.019605115056037903,
0.04956316202878952,
0.07463814318180084,
-0.047713909298181534,
0.1287991851568222,
-0.03976263105869293,
0.11481860280036926,
-0.03644251078367233,
-0.049970656633377075,
0.009227240458130836,
0.07429122924804688,
-0.03150572255253792,
0.03874475508928299,
-0.06549790501594543,
0.1595064103603363,
-0.10024707764387131,
-0.19462719559669495,
0.033005110919475555,
-0.07092368602752686,
-0.1075330376625061,
-0.009745705872774124,
-0.03410053253173828,
-0.016118282452225685,
0.015354711562395096,
0.04012908414006233,
-0.010188618674874306,
0.06288966536521912,
0.0593702495098114,
-0.06419707089662552,
-0.06346817314624786,
0.07435794174671173,
-0.0023311153054237366,
0.14725404977798462,
0.022759031504392624,
0.06525751203298569,
0.07639878988265991,
-0.03934235870838165,
-0.07552587240934372,
0.05163107067346573,
0.013011828064918518,
0.046925563365221024,
-0.016644088551402092,
0.15650007128715515,
0.036114126443862915,
0.1068665087223053,
0.06354646384716034,
-0.13619333505630493,
0.06088360399007797,
-0.06298170238733292,
0.006446499843150377,
-0.14458143711090088,
0.10535690188407898,
-0.0986173152923584,
0.11299620568752289,
0.1476270854473114,
-0.011304743587970734,
0.01551720593124628,
-0.01666768454015255,
-0.001848581712692976,
-0.017904585227370262,
0.10122039914131165,
-0.030801894143223763,
-0.1395294964313507,
0.032381802797317505,
-0.09546112269163132,
0.08701074868440628,
-0.193209707736969,
-0.015743644908070564,
-0.0023997267708182335,
0.0006125345826148987,
-0.027833685278892517,
0.1275157332420349,
-0.027888108044862747,
0.018409285694360733,
-0.004910774528980255,
-0.27846506237983704,
0.017015686258673668,
0.09660661220550537,
-0.09771013259887695,
0.001264525461010635
] |
null | null |
transformers
|
# Michael Scott DialoGPT Model
|
{"tags": ["conversational"]}
|
text-generation
|
aishanisingh/DiagloGPT-small-michaelscott
|
[
"transformers",
"pytorch",
"gpt2",
"text-generation",
"conversational",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
# Michael Scott DialoGPT Model
|
[
"# Michael Scott DialoGPT Model"
] |
[
"TAGS\n#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n",
"# Michael Scott DialoGPT Model"
] |
[
51,
8
] |
[
"passage: TAGS\n#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n# Michael Scott DialoGPT Model"
] |
[
-0.05219185724854469,
0.09866782277822495,
-0.005691746715456247,
0.014186694286763668,
0.1394561529159546,
-0.001829843153245747,
0.16353429853916168,
0.11410007625818253,
0.0003006179176736623,
-0.04741425812244415,
0.1353054791688919,
0.15719813108444214,
-0.014070987701416016,
0.08814262598752975,
-0.06975510716438293,
-0.2998508810997009,
0.039975605905056,
0.049550775438547134,
0.0006589151453226805,
0.12373805791139603,
0.09086789190769196,
-0.04772906005382538,
0.07793775945901871,
0.010827907361090183,
-0.15130233764648438,
0.00034275747020728886,
0.021106015890836716,
-0.10742229968309402,
0.11193658411502838,
0.057059239596128464,
0.011923723854124546,
0.05351797118782997,
-0.046079181134700775,
-0.12970955669879913,
0.037044789642095566,
-0.025121131911873817,
-0.03236977010965347,
0.04242695868015289,
0.025312280282378197,
-0.09374593198299408,
0.12003415822982788,
0.1303921341896057,
0.001079527661204338,
0.04427899792790413,
-0.16276776790618896,
0.016268204897642136,
-0.0005779521889053285,
0.04450548440217972,
0.08583608269691467,
0.12227486819028854,
-0.04295159503817558,
0.12185361236333847,
-0.0555940717458725,
0.11034291237592697,
0.06189177185297012,
-0.3160879909992218,
-0.01910942979156971,
0.10594988614320755,
0.01992202363908291,
0.05502206087112427,
-0.029418546706438065,
0.08084716647863388,
0.013392501510679722,
0.0037920791655778885,
-0.00698307016864419,
-0.07697310298681259,
-0.0839802697300911,
0.02420506812632084,
-0.09330907464027405,
-0.009450321085751057,
0.2700197696685791,
-0.02744600921869278,
0.072395920753479,
-0.08372221887111664,
-0.08785450458526611,
-0.022152839228510857,
-0.028888678178191185,
-0.0352373942732811,
-0.07969874143600464,
0.0692928358912468,
-0.02994782291352749,
-0.0891190692782402,
-0.12064703553915024,
-0.015572070144116879,
-0.1808616667985916,
0.1355033814907074,
0.02015385404229164,
0.04424908012151718,
-0.2066064178943634,
0.10033243149518967,
-0.0034225129056721926,
-0.09585415571928024,
0.025406567379832268,
-0.08791401237249374,
0.023702410981059074,
0.006199836265295744,
-0.03435196727514267,
-0.03682130575180054,
0.052537158131599426,
0.1017875000834465,
0.037624917924404144,
0.010535156354308128,
-0.0018888848135247827,
0.03797334060072899,
0.051690686494112015,
0.09864642471075058,
-0.012159604579210281,
-0.09275422245264053,
0.02472686767578125,
-0.07794763147830963,
-0.005519283004105091,
-0.058316051959991455,
-0.18736064434051514,
-0.015742581337690353,
0.0638531967997551,
0.044187113642692566,
0.038800206035375595,
0.12312106788158417,
-0.010700120590627193,
-0.04949404299259186,
0.030500246211886406,
-0.011449377983808517,
-0.04102412611246109,
0.0034441438037902117,
-0.006849803030490875,
0.12526994943618774,
0.026637904345989227,
0.04344993084669113,
-0.11248063296079636,
0.028229007497429848,
-0.057886455208063126,
-0.01740637980401516,
-0.009245701134204865,
-0.03968518599867821,
-0.012927724979817867,
-0.013261554762721062,
0.022976692765951157,
-0.14569568634033203,
-0.1517621874809265,
-0.01016512792557478,
-0.02071678265929222,
-0.03244677186012268,
-0.1160486489534378,
-0.10807818174362183,
-0.01673271879553795,
0.019136225804686546,
-0.06594734638929367,
-0.02844683639705181,
-0.0698276236653328,
0.08516715466976166,
-0.018525948747992516,
0.08600673824548721,
-0.10450328141450882,
0.08221111446619034,
-0.08476737141609192,
-0.03818882256746292,
-0.09571085125207901,
0.13696692883968353,
0.013556249439716339,
0.0781843289732933,
-0.018449140712618828,
-0.02062518522143364,
-0.0882476195693016,
0.059506144374608994,
-0.04792816564440727,
0.2627614736557007,
-0.05373995751142502,
-0.12228331714868546,
0.25644850730895996,
-0.04264277219772339,
-0.1323377937078476,
0.12074649333953857,
-0.011831019073724747,
0.10573146492242813,
0.1418948471546173,
0.19843782484531403,
0.02438964881002903,
-0.015437912195920944,
0.08063486218452454,
0.11468730866909027,
-0.07269655913114548,
-0.025499138981103897,
0.021268967539072037,
-0.013782723806798458,
-0.08294597268104553,
0.04756487160921097,
0.0545177198946476,
0.06714420020580292,
-0.061176858842372894,
-0.018936485052108765,
0.008749358355998993,
-0.012960782274603844,
0.07812909036874771,
-0.03309756517410278,
0.13732872903347015,
-0.023552410304546356,
-0.04682721197605133,
0.022938000038266182,
0.0061576031148433685,
-0.03251919895410538,
0.029665304347872734,
-0.0829462856054306,
0.06871046125888824,
-0.03380677103996277,
0.046204015612602234,
-0.1479959934949875,
-0.06659089773893356,
-0.0554286427795887,
0.19006399810314178,
0.0666101947426796,
0.11865320801734924,
0.050943177193403244,
-0.050719913095235825,
-0.022334067150950432,
0.026607545092701912,
0.1764538288116455,
-0.00432533398270607,
-0.08827561140060425,
-0.0893583744764328,
0.10425122827291489,
-0.050604887306690216,
0.14028102159500122,
-0.04839342460036278,
0.02007957734167576,
0.0026882649399340153,
0.08945371210575104,
-0.01477344986051321,
0.026225006207823753,
0.02395329251885414,
-0.023401744663715363,
-0.03615526854991913,
0.002531831618398428,
0.09866794943809509,
0.01095353439450264,
-0.10495016723871231,
0.21652550995349884,
-0.17865079641342163,
0.1712048351764679,
0.1963411271572113,
-0.23899659514427185,
-0.0015646020183339715,
-0.11911866068840027,
-0.028263965621590614,
0.001987667288631201,
0.06021636724472046,
-0.0393366664648056,
0.22000858187675476,
-0.016854142770171165,
0.17416520416736603,
-0.02770877256989479,
-0.040796905755996704,
-0.037516918033361435,
-0.03281911462545395,
0.006335647311061621,
0.10647379606962204,
0.11007938534021378,
-0.15187709033489227,
0.16408798098564148,
0.11575789749622345,
0.0780109092593193,
0.17331485450267792,
0.034749943763017654,
-0.0032106752041727304,
0.05403655767440796,
-0.016047578305006027,
-0.058072831481695175,
-0.054366856813430786,
-0.2821202278137207,
-0.022700391709804535,
0.06727944314479828,
0.03511533513665199,
0.1157771497964859,
-0.09771660715341568,
-0.03510351851582527,
0.007726206444203854,
-0.004056483972817659,
-0.011832889169454575,
0.11260128766298294,
0.024472780525684357,
0.11891433596611023,
-0.014555193483829498,
-0.049464691430330276,
0.06548446416854858,
0.015799861401319504,
-0.0911889299750328,
0.1808817833662033,
-0.1293938159942627,
-0.31928402185440063,
-0.11005263775587082,
-0.1739242821931839,
-0.06692400574684143,
0.044659167528152466,
0.089129239320755,
-0.0986839309334755,
-0.012885700911283493,
-0.010432631708681583,
0.10299661755561829,
-0.1042117103934288,
-0.00021655845921486616,
-0.024086903780698776,
-0.008923182263970375,
-0.12579397857189178,
-0.09363239258527756,
-0.05231120064854622,
-0.047037553042173386,
-0.056600943207740784,
0.12313047051429749,
-0.16023315489292145,
0.016609620302915573,
0.23161454498767853,
0.0652986690402031,
0.057193268090486526,
-0.03880190849304199,
0.2495022416114807,
-0.10808973759412766,
-0.0023339898325502872,
0.18616418540477753,
-0.04433102160692215,
0.05136839672923088,
0.11985351145267487,
-0.0136415995657444,
-0.06612838804721832,
0.02638145536184311,
-0.026510832831263542,
-0.06462612748146057,
-0.20766602456569672,
-0.12633801996707916,
-0.10652592033147812,
0.09661045670509338,
0.014828304760158062,
0.031906623393297195,
0.13614897429943085,
0.06607432663440704,
-0.028223717585206032,
-0.021433252841234207,
0.051379118114709854,
0.0797564685344696,
0.2964741587638855,
-0.08455753326416016,
0.1417170912027359,
-0.013198381289839745,
-0.15605655312538147,
0.07814671099185944,
0.044270530343055725,
0.0715622529387474,
0.06255177408456802,
0.05851732939481735,
-0.0010256161913275719,
0.04383862763643265,
0.10823048651218414,
0.06493564695119858,
0.03111851029098034,
-0.03486765921115875,
-0.04263158515095711,
-0.04394565895199776,
-0.024241000413894653,
0.04993441700935364,
0.07002178579568863,
-0.1495700478553772,
-0.028503501787781715,
-0.01351531594991684,
0.05873153731226921,
0.051203496754169464,
0.10058362036943436,
-0.1801169514656067,
-0.0331742949783802,
0.06462137401103973,
-0.03960053250193596,
-0.1126687154173851,
0.08458040654659271,
0.020008135586977005,
-0.12716439366340637,
0.05063258856534958,
-0.005617031827569008,
0.11739388853311539,
-0.07477667927742004,
0.07746375352144241,
-0.11893178522586823,
-0.09835978597402573,
-0.0009378000977449119,
0.09371139109134674,
-0.25335580110549927,
0.20636585354804993,
-0.0014321436174213886,
-0.06408964097499847,
-0.09884043782949448,
-0.01885804533958435,
0.02359641157090664,
0.13839443027973175,
0.1132744550704956,
-0.009273536503314972,
0.030854539945721626,
0.005529241636395454,
-0.08657826483249664,
0.026634985581040382,
0.0939711406826973,
-0.053410161286592484,
-0.007721978239715099,
-0.03209467604756355,
-0.005074217449873686,
-0.00485841603949666,
-0.08350344747304916,
0.02240070514380932,
-0.18551349639892578,
0.09220478683710098,
0.0513395220041275,
0.0812494307756424,
0.042932961136102676,
-0.03902555629611015,
-0.08352344483137131,
0.2238694131374359,
-0.017721988260746002,
-0.10115890949964523,
-0.09303940832614899,
-0.01734204962849617,
0.04749097675085068,
-0.060149773955345154,
0.01021429430693388,
-0.04765286669135094,
0.014978951774537563,
-0.05359692499041557,
-0.18430352210998535,
0.11967626214027405,
-0.08449894189834595,
-0.035681381821632385,
-0.027693649753928185,
0.23086680471897125,
-0.024970047175884247,
0.018091564998030663,
0.034332241863012314,
0.0026490986347198486,
-0.11853642761707306,
-0.09563998878002167,
0.012357753701508045,
0.003760053077712655,
-0.0007214160868898034,
0.034301016479730606,
-0.016425905749201775,
-0.06487460434436798,
-0.05784493684768677,
-0.01362778339534998,
0.3071885406970978,
0.13983532786369324,
-0.04023562744259834,
0.15182597935199738,
0.11129388213157654,
-0.059501636773347855,
-0.269977331161499,
-0.05761236697435379,
-0.09105021506547928,
-0.04330240562558174,
-0.03144540265202522,
-0.1561248004436493,
0.1035873144865036,
-0.043056048452854156,
-0.00868955347687006,
0.1129111498594284,
-0.2777594029903412,
-0.1097259521484375,
0.17846046388149261,
-0.030682211741805077,
0.4436364769935608,
-0.09721767902374268,
-0.07306288927793503,
-0.04873170331120491,
-0.2055576592683792,
0.13611987233161926,
0.011453157290816307,
0.1054515689611435,
-0.0006767508457414806,
0.18992725014686584,
0.05275087431073189,
-0.0010212045162916183,
0.07581665366888046,
0.018801437690854073,
-0.06132015958428383,
-0.08938997983932495,
-0.10635406523942947,
0.005542214959859848,
0.02234536036849022,
0.015152977779507637,
-0.0446556881070137,
0.03197968378663063,
-0.13170892000198364,
-0.0657021701335907,
-0.08875783532857895,
0.03514505550265312,
0.02016925998032093,
-0.06620988994836807,
0.01154492050409317,
-0.040027666836977005,
-0.003320010844618082,
-0.009548322297632694,
0.10755529999732971,
-0.12642329931259155,
0.12724217772483826,
0.07259592413902283,
0.14272688329219818,
-0.13762855529785156,
-0.02596178464591503,
-0.05356896296143532,
-0.06463674455881119,
0.06073172017931938,
-0.09363619238138199,
0.02392594702541828,
0.1137228012084961,
-0.02330799214541912,
0.07304049283266068,
0.09772489964962006,
0.006532188504934311,
0.011307611130177975,
0.08549375832080841,
-0.2518516778945923,
-0.09829720854759216,
-0.08842265605926514,
-0.006847918499261141,
0.07912944257259369,
0.10351879149675369,
0.21049904823303223,
-0.011108608916401863,
-0.028110411018133163,
0.015511404722929,
0.012660179287195206,
-0.02552294172346592,
0.0629696473479271,
-0.013685106299817562,
0.011273954063653946,
-0.142191544175148,
0.04728856310248375,
-0.010115091688930988,
-0.10728852450847626,
0.0070778122171759605,
0.11895912885665894,
-0.11085338890552521,
-0.1132420152425766,
-0.08729799091815948,
0.11374850571155548,
-0.11740808933973312,
0.01583920791745186,
-0.04273713007569313,
-0.14329342544078827,
0.06087562441825867,
0.12644949555397034,
0.05289270728826523,
0.054770201444625854,
-0.09863127022981644,
-0.017707131803035736,
-0.01886041834950447,
0.008445576764643192,
0.045651089400053024,
-0.029413091018795967,
-0.04314618930220604,
0.08987950533628464,
-0.0398998036980629,
0.12349136173725128,
-0.08804872632026672,
-0.10643022507429123,
-0.13970406353473663,
0.036211591213941574,
-0.09100706875324249,
-0.1015876904129982,
-0.09673352539539337,
-0.0404139868915081,
-0.015968140214681625,
-0.033703241497278214,
-0.04924946650862694,
-0.04616044834256172,
-0.11585718393325806,
0.04197810962796211,
-0.035671547055244446,
0.002554558916017413,
-0.06397118419408798,
0.03164825960993767,
0.057608336210250854,
-0.029087256640195847,
0.16798870265483856,
0.14900587499141693,
-0.113084577023983,
0.08678457140922546,
-0.10599664598703384,
-0.08436369150876999,
0.09296881407499313,
0.015673324465751648,
0.050969187170267105,
0.05570908635854721,
0.0008179193246178329,
0.05604454129934311,
0.06767231225967407,
0.05749130994081497,
0.05125901848077774,
-0.0781613439321518,
0.026795929297804832,
-0.0353478379547596,
-0.10971686244010925,
-0.05053088814020157,
-0.032386112958192825,
0.0321459099650383,
0.03835080564022064,
0.10419028252363205,
-0.05157046392560005,
0.07967690378427505,
-0.06761150062084198,
0.04166098311543465,
0.02458209916949272,
-0.17817717790603638,
-0.011017074808478355,
-0.08725284785032272,
0.062094446271657944,
0.010369001887738705,
0.22922518849372864,
0.0010608482407405972,
-0.016776392236351967,
0.04306100308895111,
0.09107155352830887,
0.039415836334228516,
-0.008862773887813091,
0.17689786851406097,
0.11348211020231247,
-0.04523318260908127,
-0.08200377225875854,
0.09221187978982925,
0.028359251096844673,
0.0636616051197052,
0.1533689647912979,
-0.009847167879343033,
-0.029553398489952087,
0.08961945027112961,
-0.00820152461528778,
0.04360003024339676,
-0.11889488995075226,
-0.17109516263008118,
-0.035233404487371445,
0.07828069478273392,
-0.054776258766651154,
0.125512033700943,
0.12796565890312195,
-0.02143034152686596,
0.02544325962662697,
-0.02652924321591854,
-0.06300850212574005,
-0.18642783164978027,
-0.15833324193954468,
-0.07247349619865417,
-0.1361551433801651,
0.003649224527180195,
-0.12954506278038025,
0.034160468727350235,
0.03228195384144783,
0.09627309441566467,
-0.07396618276834488,
0.07815251499414444,
0.009733840823173523,
-0.11123551428318024,
0.09365225583314896,
-0.03206063434481621,
0.08423103392124176,
-0.05801235884428024,
0.008146187290549278,
-0.07326289266347885,
0.061165083199739456,
0.005733943078666925,
0.024489792063832283,
-0.06499718874692917,
0.0006845087045803666,
-0.12899963557720184,
-0.08145640790462494,
-0.07403001189231873,
0.07683531194925308,
-0.00021575485880021006,
0.14871475100517273,
0.0007607350125908852,
-0.024332456290721893,
0.022351602092385292,
0.2547334134578705,
-0.0823284238576889,
-0.1065358966588974,
-0.0760388895869255,
0.1612902730703354,
-0.01100313849747181,
0.09074048697948456,
-0.027294078841805458,
0.005724847782403231,
-0.07292640954256058,
0.3486209213733673,
0.31385937333106995,
-0.12144593149423599,
0.008552278392016888,
0.004036948550492525,
0.042547158896923065,
0.12109120190143585,
0.08933079242706299,
0.08928026258945465,
0.29122135043144226,
-0.06460609287023544,
-0.029492847621440887,
-0.01038265135139227,
-0.028137030079960823,
-0.03290077671408653,
0.061318300664424896,
0.06818705052137375,
-0.06311694532632828,
-0.037182971835136414,
0.11344512552022934,
-0.24985961616039276,
0.07765885442495346,
-0.16423965990543365,
-0.19129236042499542,
-0.0909482091665268,
-0.005408334545791149,
0.09333501011133194,
0.025192279368638992,
0.08434837311506271,
-0.004080671351402998,
-0.05664246529340744,
0.06709237396717072,
0.018763625994324684,
-0.2031526118516922,
-0.014244482852518559,
0.09505189210176468,
-0.03442936763167381,
-0.05224468931555748,
-0.013855420984327793,
0.06696508079767227,
0.06307736784219742,
0.05825239419937134,
-0.015055189840495586,
0.04039718955755234,
-0.0020734043791890144,
-0.07968246936798096,
0.021742230281233788,
0.029902489855885506,
0.01116474624723196,
-0.06253565847873688,
0.07943971455097198,
-0.1343110054731369,
0.053404469043016434,
-0.023831099271774292,
-0.06456206738948822,
-0.028253937140107155,
0.022574080154299736,
-0.06423580646514893,
0.0817347913980484,
0.10829576104879379,
-0.021677108481526375,
-0.018034711480140686,
-0.018499786034226418,
-0.023955125361680984,
-0.022458717226982117,
-0.06042034551501274,
-0.09595854580402374,
-0.16020077466964722,
-0.1087728962302208,
0.06417213380336761,
-0.0026469272561371326,
-0.2069835513830185,
0.0078546442091465,
-0.12910261750221252,
0.05230732262134552,
-0.10699926316738129,
0.12214474380016327,
0.07983854413032532,
0.01550530269742012,
0.0022410741075873375,
0.00903013814240694,
0.0417785719037056,
0.09253215044736862,
-0.1290154904127121,
-0.08394894748926163
] |
null | null |
transformers
|
# Harry Potter DialoGPT Model
|
{"tags": ["conversational"]}
|
text-generation
|
aishanisingh/DialoGPT-small-harrypotter
|
[
"transformers",
"pytorch",
"gpt2",
"text-generation",
"conversational",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
# Harry Potter DialoGPT Model
|
[
"# Harry Potter DialoGPT Model"
] |
[
"TAGS\n#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n",
"# Harry Potter DialoGPT Model"
] |
[
51,
8
] |
[
"passage: TAGS\n#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n# Harry Potter DialoGPT Model"
] |
[
-0.0009023238671943545,
0.07815738022327423,
-0.006546166725456715,
0.07792752981185913,
0.10655936598777771,
0.048972971737384796,
0.17639793455600739,
0.12185695022344589,
0.016568755730986595,
-0.04774167761206627,
0.11647630482912064,
0.2130284160375595,
-0.002118367003276944,
0.024608047679066658,
-0.05022026598453522,
-0.3065771162509918,
0.0474756620824337,
0.014356585219502449,
-0.07174845039844513,
0.11724270135164261,
0.09064973145723343,
-0.046179238706827164,
0.08330509811639786,
-0.009135239757597446,
-0.13198648393154144,
-0.039482954889535904,
0.019292812794446945,
-0.11745545268058777,
0.1662212759256363,
0.05298272892832756,
0.02469746209681034,
-0.008447164669632912,
-0.06598151475191116,
-0.15036040544509888,
0.037190426141023636,
-0.027472136542201042,
-0.01080626156181097,
0.05462246760725975,
0.023526115342974663,
-0.07521048933267593,
0.170567125082016,
0.17678891122341156,
0.0833497866988182,
0.0349111407995224,
-0.14917024970054626,
-0.045548245310783386,
0.008950977586209774,
0.05421316996216774,
-0.017893504351377487,
0.09349167346954346,
-0.019903047010302544,
0.11801653355360031,
-0.04491448402404785,
0.09210366010665894,
0.15255063772201538,
-0.4016275703907013,
-0.027563704177737236,
0.08920855820178986,
0.05989706888794899,
0.12076901644468307,
-0.10560955852270126,
0.03972794860601425,
-0.0039703017100691795,
0.01236654631793499,
-0.014540530741214752,
-0.08304883539676666,
-0.07308239489793777,
0.032504837960004807,
-0.1272556483745575,
0.008525865152478218,
0.23756256699562073,
-0.10643257945775986,
0.037069112062454224,
-0.09791990369558334,
-0.07414398342370987,
0.048336777836084366,
-0.053761593997478485,
-0.081727035343647,
-0.054839808493852615,
0.06347949057817459,
0.004366500303149223,
-0.06301609426736832,
-0.08326146006584167,
-0.0006536149303428829,
-0.12781435251235962,
0.17595994472503662,
0.061243366450071335,
0.041611745953559875,
-0.21322020888328552,
0.08940251916646957,
0.04477722570300102,
-0.04711297154426575,
0.007116159424185753,
-0.11796226352453232,
0.04023287072777748,
0.005483259446918964,
-0.03256071358919144,
-0.021854614838957787,
0.0393419973552227,
0.13909944891929626,
-0.01777748204767704,
0.03252175822854042,
0.006831915583461523,
0.05811219662427902,
0.08162496984004974,
0.02222144603729248,
0.019291909411549568,
-0.0818009302020073,
0.019385190680623055,
-0.08128736168146133,
-0.0030400939285755157,
-0.048940129578113556,
-0.17071883380413055,
-0.07477642595767975,
0.052610911428928375,
0.020047198981046677,
0.03746970370411873,
0.08054786175489426,
-0.0017944995779544115,
-0.05560554191470146,
0.03284840285778046,
0.01671096310019493,
-0.020622212439775467,
-0.010361049324274063,
-0.02412462793290615,
0.19123271107673645,
0.019619356840848923,
0.014111656695604324,
-0.12379156798124313,
0.10023640841245651,
-0.08179095387458801,
0.0037731381598860025,
0.02743307314813137,
-0.04204464703798294,
-0.004716555587947369,
0.02917117439210415,
0.023101668804883957,
-0.1252521574497223,
-0.1099385917186737,
-0.0030569476075470448,
-0.012054097838699818,
-0.036421261727809906,
-0.10490952432155609,
-0.08483029156923294,
-0.012153145857155323,
0.0449371263384819,
-0.013397793285548687,
0.007936403155326843,
-0.05143149942159653,
0.0985720232129097,
-0.0514979362487793,
0.09873400628566742,
-0.08342572301626205,
0.06359215080738068,
-0.09124887734651566,
-0.061886150389909744,
-0.11452563107013702,
0.05216052383184433,
0.012905281968414783,
0.066250741481781,
0.016998225823044777,
-0.044836658984422684,
-0.014836243353784084,
0.05253177136182785,
-0.07656687498092651,
0.1940697431564331,
-0.041674621403217316,
-0.12459053844213486,
0.24146439135074615,
-0.09138800948858261,
-0.1802034229040146,
0.12973085045814514,
-0.022254703566432,
0.08523941785097122,
0.12802475690841675,
0.20380465686321259,
-0.00019822151807602495,
-0.01302915159612894,
0.07281201332807541,
0.07031642645597458,
-0.09803894907236099,
0.06239739805459976,
0.029653839766979218,
-0.008071083575487137,
-0.08906278014183044,
0.05762826278805733,
0.046033453196287155,
-0.010650773532688618,
-0.035073768347501755,
-0.001896020956337452,
-0.012895751744508743,
-0.022185025736689568,
0.14126582443714142,
-0.02006692811846733,
0.1300428807735443,
-0.06926563382148743,
-0.03515486419200897,
-0.009500149637460709,
0.03533667325973511,
-0.04091939330101013,
0.08151165395975113,
-0.0436173714697361,
0.10586477071046829,
0.09034156054258347,
0.053724925965070724,
-0.13120363652706146,
0.00466286763548851,
-0.015246815048158169,
0.17014820873737335,
0.08964069187641144,
0.05222717300057411,
0.06265474855899811,
-0.0020888058934360743,
-0.06708643585443497,
0.045407816767692566,
0.13778303563594818,
-0.037020038813352585,
-0.12218865007162094,
-0.1755627691745758,
0.051157694309949875,
-0.045444171875715256,
0.10855234414339066,
-0.10010123997926712,
0.022670533508062363,
-0.055906031280756,
0.07772238552570343,
-0.024998966604471207,
0.020512236282229424,
-0.0013405600329861045,
-0.021700702607631683,
-0.08356887847185135,
-0.002377772703766823,
0.08597290515899658,
-0.02048647589981556,
-0.06707409024238586,
0.16556480526924133,
-0.16400809586048126,
0.1631954461336136,
0.2116095870733261,
-0.28542569279670715,
-0.005696662236005068,
-0.15163889527320862,
-0.0208092350512743,
0.019645055755972862,
0.07834604382514954,
0.026225795969367027,
0.2044338881969452,
-0.012928472831845284,
0.16565458476543427,
-0.05699567869305611,
-0.07730039209127426,
-0.06881127506494522,
-0.048101142048835754,
0.013522743247449398,
0.09095205366611481,
0.04542696103453636,
-0.11962861567735672,
0.13119758665561676,
0.1054433062672615,
0.06484298408031464,
0.12711186707019806,
0.1030748188495636,
-0.008113685995340347,
0.07252490520477295,
-0.03624548763036728,
-0.03462279960513115,
-0.09254947304725647,
-0.30446043610572815,
-0.04840317741036415,
0.0939924493432045,
0.007963384501636028,
0.09285714477300644,
-0.0919896736741066,
-0.03311870992183685,
0.006042704917490482,
0.009473444893956184,
0.028337622061371803,
0.09653715789318085,
0.013490920886397362,
0.15320514142513275,
-0.008011690340936184,
-0.03430786728858948,
0.05891305208206177,
0.017982570454478264,
-0.09147711098194122,
0.17280617356300354,
-0.17050009965896606,
-0.27190929651260376,
-0.06990014761686325,
-0.21745692193508148,
-0.013139115646481514,
0.05258983001112938,
0.0786920040845871,
-0.11818131804466248,
-0.018352627754211426,
-0.006239492911845446,
0.05685517191886902,
-0.2425733357667923,
0.0004911290016025305,
-0.1354890614748001,
0.0501418262720108,
-0.1974833607673645,
-0.09718500077724457,
-0.02271542325615883,
-0.013450481928884983,
-0.0464281290769577,
0.13365240395069122,
-0.1448695808649063,
-0.011572926305234432,
0.2329535037279129,
0.032479673624038696,
0.027794739231467247,
-0.05020907148718834,
0.19788463413715363,
-0.0958966314792633,
-0.023973820731043816,
0.11024576425552368,
-0.05038975924253464,
0.04834126681089401,
0.06649978458881378,
-0.012981836684048176,
-0.08557141572237015,
0.023789849132299423,
-0.068336620926857,
-0.03150583803653717,
-0.27926525473594666,
-0.0930178239941597,
-0.09319330751895905,
0.11305391043424606,
0.04079577326774597,
0.06421639025211334,
0.16545771062374115,
0.05191578343510628,
-0.024325082078576088,
-0.03006586618721485,
0.11609793454408646,
0.12905290722846985,
0.2277202159166336,
-0.06067761778831482,
0.10221996158361435,
0.009445492178201675,
-0.08203992247581482,
0.06062209978699684,
0.056782789528369904,
0.06324724853038788,
0.02584579586982727,
0.03694582358002663,
-0.030939655378460884,
0.1121687963604927,
0.12571842968463898,
0.05258069559931755,
0.0481170229613781,
0.0002127334737451747,
-0.0561506561934948,
-0.008168719708919525,
-0.05726633965969086,
0.06774696707725525,
0.061340972781181335,
-0.12918008863925934,
-0.08061543852090836,
0.0011613310780376196,
0.06660808622837067,
-0.016230419278144836,
0.06823775917291641,
-0.13560809195041656,
-0.03582429885864258,
0.0790911465883255,
-0.07693151384592056,
-0.14156894385814667,
0.11972879618406296,
-0.026570770889520645,
-0.19904157519340515,
0.05265914276242256,
0.007704653777182102,
0.0908159390091896,
-0.06360849738121033,
0.05343840271234512,
-0.13023801147937775,
-0.12935101985931396,
-0.018437571823596954,
0.07945099472999573,
-0.3450873792171478,
0.13536721467971802,
-0.013286802917718887,
-0.02876877970993519,
-0.06474969536066055,
-0.02640824392437935,
0.013905409723520279,
0.12719078361988068,
0.08667250722646713,
0.0008821099763736129,
0.0991629809141159,
0.03823768347501755,
0.04188435152173042,
-0.002011700300499797,
0.10950417071580887,
0.0050011589191854,
0.004797275178134441,
-0.04982118681073189,
0.007274609990417957,
-0.05164213851094246,
-0.07472953200340271,
0.08393982797861099,
-0.20678792893886566,
0.09087453782558441,
-0.03378438204526901,
0.08427679538726807,
0.04304937273263931,
-0.018965769559144974,
-0.1001204177737236,
0.19745583832263947,
-0.012206900864839554,
-0.11405988782644272,
-0.07517550885677338,
-0.02810264565050602,
0.09103139489889145,
-0.013817726634442806,
0.012886416167020798,
-0.045470476150512695,
0.032183047384023666,
-0.1263762265443802,
-0.1597503274679184,
0.08734500408172607,
-0.04441224783658981,
-0.10894393920898438,
-0.025462759658694267,
0.20382575690746307,
-0.007266622502356768,
0.08242089301347733,
0.01605331338942051,
0.010653935372829437,
-0.18066231906414032,
-0.04018142446875572,
0.02645772136747837,
-0.0016437612939625978,
0.005979063920676708,
0.047698814421892166,
0.019091911613941193,
0.06207629665732384,
-0.1069745197892189,
-0.013920160941779613,
0.3158324360847473,
0.15978319942951202,
-0.00912671908736229,
0.14943915605545044,
0.1093616932630539,
-0.08669080585241318,
-0.17238758504390717,
-0.1171615794301033,
-0.1210922971367836,
-0.08425768464803696,
-0.10681738704442978,
-0.1525043100118637,
0.09535340964794159,
-0.03392014652490616,
0.03498011827468872,
0.14615866541862488,
-0.280263751745224,
-0.10949636250734329,
0.13820378482341766,
0.010744688101112843,
0.3510635495185852,
-0.12303631007671356,
-0.044944874942302704,
-0.06214528530836105,
-0.16933435201644897,
0.08021392673254013,
-0.031203703954815865,
0.11581093072891235,
-0.0744495838880539,
0.19395925104618073,
0.01719796098768711,
0.014287159778177738,
0.0916559100151062,
0.05038322135806084,
-0.05808406323194504,
-0.07368700206279755,
-0.10248131304979324,
0.010812131687998772,
0.03546109423041344,
0.010252019390463829,
-0.008802837692201138,
0.0211968794465065,
-0.11341743916273117,
-0.050869911909103394,
-0.06302189081907272,
0.0072614275850355625,
-0.01001308299601078,
-0.042155615985393524,
-0.05533592775464058,
-0.022557416930794716,
-0.020093943923711777,
0.02266426384449005,
0.14185629785060883,
-0.07527699321508408,
0.18586260080337524,
0.02357078716158867,
0.1586609035730362,
-0.11956068128347397,
-0.06724818795919418,
-0.029193658381700516,
-0.05280323326587677,
0.06468886137008667,
-0.08884575963020325,
-0.027708567678928375,
0.1332162618637085,
-0.01903904788196087,
0.04655366763472557,
0.12936700880527496,
0.02046884410083294,
0.015383756719529629,
0.034968774765729904,
-0.2578005790710449,
-0.07463036477565765,
-0.03505445644259453,
-0.012416874058544636,
0.05272092670202255,
0.05525677278637886,
0.19735674560070038,
-0.03551921248435974,
-0.08521962910890579,
0.020131373777985573,
0.02735883742570877,
-0.02776256389915943,
0.10749414563179016,
0.019579345360398293,
-0.004837906453758478,
-0.16151933372020721,
0.08257976174354553,
-0.005964108742773533,
-0.08297000825405121,
0.028665626421570778,
0.2024049311876297,
-0.12141239643096924,
-0.10309756547212601,
-0.06804922968149185,
0.07315051555633545,
-0.09220825880765915,
0.016043387353420258,
-0.005091092549264431,
-0.1521538347005844,
0.06916408240795135,
0.07598215341567993,
0.04075418785214424,
0.06513199955224991,
-0.11743064224720001,
-0.015730571001768112,
-0.04170290008187294,
-0.002195435343310237,
0.03521120920777321,
0.01863143965601921,
-0.057492829859256744,
0.15846455097198486,
-0.0676199421286583,
0.08538917452096939,
-0.0744810476899147,
-0.1058846190571785,
-0.1395980566740036,
0.04660497233271599,
-0.08038312196731567,
-0.07247276604175568,
-0.12832807004451752,
-0.052204377949237823,
-0.0067099276930093765,
-0.03388519585132599,
0.006552806124091148,
-0.06627799570560455,
-0.10922821611166,
0.01822470687329769,
-0.00743203004822135,
-0.009385870769619942,
-0.06096754968166351,
0.026706209406256676,
0.06246216222643852,
-0.039788868278265,
0.15730851888656616,
0.22509248554706573,
-0.13591648638248444,
0.11564400047063828,
-0.09797432273626328,
-0.105463907122612,
0.046008042991161346,
0.009427277371287346,
0.03594303876161575,
0.0503489226102829,
-0.03594081476330757,
0.0044484552927315235,
0.03905477747321129,
0.08074651658535004,
0.08456914126873016,
-0.06776505708694458,
0.020801106467843056,
-0.05122765153646469,
-0.14904099702835083,
-0.016655439510941505,
-0.0464773029088974,
0.06876829266548157,
-0.006725262850522995,
0.11020535975694656,
-0.0515950471162796,
0.07739507406949997,
-0.07558431476354599,
0.050614211708307266,
0.021146971732378006,
-0.14688286185264587,
-0.006612539757043123,
-0.07093682140111923,
0.042144812643527985,
-0.008834975771605968,
0.20241086184978485,
-0.03228091076016426,
0.010342049412429333,
0.033811055123806,
0.06203942745923996,
-0.01957780309021473,
0.009357001632452011,
0.2014283686876297,
0.12640917301177979,
-0.08496357500553131,
-0.02679651789367199,
0.06793134659528732,
0.07248228788375854,
0.07093550264835358,
0.10807815194129944,
-0.015352966263890266,
0.028434239327907562,
0.07829629629850388,
-0.060215238481760025,
0.07576877623796463,
-0.08603982627391815,
-0.11668483167886734,
0.05793621391057968,
0.012955795042216778,
-0.055695828050374985,
0.20305177569389343,
0.19142870604991913,
-0.026278704404830933,
0.018410727381706238,
-0.0029499190859496593,
-0.10117456316947937,
-0.15619947016239166,
-0.05423750728368759,
-0.07170962542295456,
-0.1319410353899002,
-0.004549739416688681,
-0.16646917164325714,
0.022016216069459915,
-0.01132756657898426,
0.09506805986166,
-0.06855440139770508,
-0.01345991250127554,
0.1364889293909073,
-0.1055467277765274,
0.0847758799791336,
-0.024517204612493515,
0.07877567410469055,
-0.03746940940618515,
-0.018209461122751236,
-0.10342709720134735,
0.007514837197959423,
0.01131442841142416,
0.06840907037258148,
-0.10897937417030334,
0.02432350255548954,
-0.12208317965269089,
-0.08617185056209564,
-0.026142612099647522,
0.09279687702655792,
-0.0403008833527565,
0.15116846561431885,
0.02645145356655121,
-0.06710928678512573,
-0.004313822835683823,
0.2646709978580475,
-0.08046227693557739,
-0.08319197595119476,
-0.030799202620983124,
0.2152107208967209,
0.04053696244955063,
0.06396269053220749,
0.019140036776661873,
0.038027774542570114,
-0.07184682041406631,
0.2957373559474945,
0.34401440620422363,
-0.1318037211894989,
-0.007773484103381634,
0.04225075617432594,
0.04406323283910751,
0.14687567949295044,
0.07998795062303543,
0.11360671371221542,
0.2849363386631012,
-0.09197647124528885,
0.016657205298542976,
-0.04230864346027374,
-0.01424806285649538,
-0.06908884644508362,
0.045314885675907135,
0.08216670155525208,
-0.09241747111082077,
-0.022950593382120132,
0.08125471323728561,
-0.29741767048835754,
0.10791494697332382,
-0.15600289404392242,
-0.14948409795761108,
-0.05027429759502411,
-0.008771711029112339,
0.014683255925774574,
0.019041186198592186,
0.09663030505180359,
0.025651484727859497,
-0.07275258749723434,
0.07816889137029648,
0.024486342445015907,
-0.23020237684249878,
-0.01345184724777937,
0.1456068754196167,
-0.06789913028478622,
-0.025938833132386208,
-0.021313713863492012,
0.051610056310892105,
0.05763651058077812,
0.09027529507875443,
-0.03809558227658272,
-0.0746568813920021,
-0.007141788024455309,
-0.022818787023425102,
0.01914946548640728,
0.0597183033823967,
0.06841408461332321,
-0.0920223817229271,
0.1167774423956871,
-0.07350476831197739,
0.0650370642542839,
0.037623800337314606,
-0.022277191281318665,
0.0018526542698964477,
0.013183658011257648,
-0.06512464582920074,
0.05533479526638985,
0.1295643299818039,
-0.025459708645939827,
-0.002524374984204769,
-0.028180841356515884,
-0.0767761766910553,
-0.024015206843614578,
-0.04643676429986954,
-0.09101243317127228,
-0.18130090832710266,
-0.12738600373268127,
0.041754670441150665,
-0.03240608796477318,
-0.2046082615852356,
0.0060346988029778,
-0.1128578633069992,
0.03700976446270943,
-0.14154092967510223,
0.10004086047410965,
0.07216610759496689,
0.004716616589576006,
0.006774604320526123,
0.0675399899482727,
0.045677728950977325,
0.14796748757362366,
-0.16543124616146088,
-0.04919974133372307
] |
null | null | null |
pip install vaderSentiment
from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer
analyser = SentimentIntensityAnalyzer()
analyser.polarity_scores("I hate watching movies")
import nltk
from nltk.tokenize import word_tokenize, RegexpTokenizer
from nltk.sentiment.vader import SentimentIntensityAnalyzer
nltk.download('all')
import numpy as np
sentence = """I love dancing & painting"""
tokenized_sentence = nltk.word_tokenize(sentence)
from nltk import word_tokenize
from typing import List
Analyzer = SentimentIntensityAnalyzer()
pos_word_list=[]
neu_word_list=[]
neg_word_list=[]
pos_score_list=[]
neg_score_list=[]
score_list=[]
for word in tokenized_sentence:
if (Analyzer.polarity_scores(word)['compound']) >= 0.1:
pos_word_list.append(word)
score_list.append(Analyzer.polarity_scores(word)['compound'])
elif (Analyzer.polarity_scores(word)['compound']) <= -0.1:
neg_word_list.append(word)
score_list.append(Analyzer.polarity_scores(word)['compound'])
else:
neu_word_list.append(word)
score_list.append(Analyzer.polarity_scores(word)['compound'])
print('Positive:',pos_word_list)
print('Neutral:',neu_word_list)
print('Negative:',neg_word_list)
print('Score:', score_list)
score = Analyzer.polarity_scores(sentence)
print('\nScores:', score)
predict_log=score.values()
value_iterator=iter(predict_log)
neg_prediction=next(value_iterator)
neu_prediction=next(value_iterator)
pos_prediction=next(value_iterator)
prediction_list=[neg_prediction, pos_prediction]
prediction_list_array=np.array(prediction_list)
def predict():
probs = []
for text in texts:
offset = (self.score(text) + 1) / 2.
binned = np.digitize(5 * offset, self.classes) + 1
simulated_probs = scipy.stats.norm.pdf(self.classes, binned, scale=0.5)
probs.append(simulated_probs)
return np.array(probs)
latex_special_token = ["!@#$%^&*()"]
import operator
def generate(text_list, attention_list, latex_file, color_neg='red', color_pos='green', rescale_value = False):
print("hello")
attention_list = rescale(attention_list)
word_num = len(text_list)
print(len(attention_list))
print(len(text_list))
text_list = clean_word(text_list)
with open(latex_file,'w') as f:
f.write(r'''\documentclass[varwidth]{standalone}
\special{papersize=210mm,297mm}
\usepackage{color}
\usepackage{tcolorbox}
\usepackage{CJK}
\usepackage{adjustbox}
\tcbset{width=0.9\textwidth,boxrule=0pt,colback=red,arc=0pt,auto outer arc,left=0pt,right=0pt,boxsep=5pt}
\begin{document}
\begin{CJK*}{UTF8}{gbsn}'''+'\n')
string = r'''{\setlength{\fboxsep}{0pt}\colorbox{white!0}{\parbox{0.9\textwidth}{'''+"\n"
for idx in range(len(attention_list)):
if attention_list[idx] > 0:
string += "\\colorbox{%s!%s}{"%(color_pos, attention_list[idx])+"\\strut " + text_list[idx]+"} "
else:
string += "\\colorbox{%s!%s}{"%(color_neg, -attention_list[idx])+"\\strut " + text_list[idx]+"} "
string += "\n}}}"
f.write(string+'\n')
f.write(r'''\end{CJK*}
\end{document}''')
def rescale(input_list):
the_array = np.asarray(input_list)
the_max = np.max(abs(the_array))
rescale = the_array/the_max
rescale = rescale*100
rescale = np.round(rescale, 3)
'''
the_array = np.asarray(input_list)
the_max = np.max(the_array)
the_min = np.min(the_array)
rescale = ((the_array - the_min)/(the_max-the_min))*100
for i in rescale:
print(rescale)
'''
return rescale.tolist()
def clean_word(word_list):
new_word_list = []
for word in word_list:
for latex_sensitive in ["\\", "%", "&", "^", "#", "_", "{", "}"]:
if latex_sensitive in word:
word = word.replace(latex_sensitive, '\\'+latex_sensitive)
new_word_list.append(word)
return new_word_list
if __name__ == '__main__':
color_1 = 'red'
color_2 = 'green'
words = word_tokenize(sentence)
word_num = len(words)
generate(words, score_list, "sple.tex", color_1, color_2)
|
{}
| null |
aishoo1612/VADER-With-heatmaps
|
[
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#region-us
|
pip install vaderSentiment
from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer
analyser = SentimentIntensityAnalyzer()
analyser.polarity_scores("I hate watching movies")
import nltk
from nltk.tokenize import word_tokenize, RegexpTokenizer
from URL import SentimentIntensityAnalyzer
nltk.download('all')
import numpy as np
sentence = """I love dancing & painting"""
tokenized_sentence = nltk.word_tokenize(sentence)
from nltk import word_tokenize
from typing import List
Analyzer = SentimentIntensityAnalyzer()
pos_word_list=[]
neu_word_list=[]
neg_word_list=[]
pos_score_list=[]
neg_score_list=[]
score_list=[]
for word in tokenized_sentence:
if (Analyzer.polarity_scores(word)['compound']) >= 0.1:
pos_word_list.append(word)
score_list.append(Analyzer.polarity_scores(word)['compound'])
elif (Analyzer.polarity_scores(word)['compound']) <= -0.1:
neg_word_list.append(word)
score_list.append(Analyzer.polarity_scores(word)['compound'])
else:
neu_word_list.append(word)
score_list.append(Analyzer.polarity_scores(word)['compound'])
print('Positive:',pos_word_list)
print('Neutral:',neu_word_list)
print('Negative:',neg_word_list)
print('Score:', score_list)
score = Analyzer.polarity_scores(sentence)
print('\nScores:', score)
predict_log=URL()
value_iterator=iter(predict_log)
neg_prediction=next(value_iterator)
neu_prediction=next(value_iterator)
pos_prediction=next(value_iterator)
prediction_list=[neg_prediction, pos_prediction]
prediction_list_array=URL(prediction_list)
def predict():
probs = []
for text in texts:
offset = (URL(text) + 1) / 2.
binned = np.digitize(5 * offset, self.classes) + 1
simulated_probs = URL(self.classes, binned, scale=0.5)
URL(simulated_probs)
return URL(probs)
latex_special_token = ["!@#$%^&*()"]
import operator
def generate(text_list, attention_list, latex_file, color_neg='red', color_pos='green', rescale_value = False):
print("hello")
attention_list = rescale(attention_list)
word_num = len(text_list)
print(len(attention_list))
print(len(text_list))
text_list = clean_word(text_list)
with open(latex_file,'w') as f:
f.write(r'''\documentclass[varwidth]{standalone}
\special{papersize=210mm,297mm}
\usepackage{color}
\usepackage{tcolorbox}
\usepackage{CJK}
\usepackage{adjustbox}
\tcbset{width=0.9\textwidth,boxrule=0pt,colback=red,arc=0pt,auto outer arc,left=0pt,right=0pt,boxsep=5pt}
\begin{document}
\begin{CJK*}{UTF8}{gbsn}'''+'\n')
string = r'''{\setlength{\fboxsep}{0pt}\colorbox{white!0}{\parbox{0.9\textwidth}{'''+"\n"
for idx in range(len(attention_list)):
if attention_list[idx] > 0:
string += "\\colorbox{%s!%s}{"%(color_pos, attention_list[idx])+"\\strut " + text_list[idx]+"} "
else:
string += "\\colorbox{%s!%s}{"%(color_neg, -attention_list[idx])+"\\strut " + text_list[idx]+"} "
string += "\n}}}"
f.write(string+'\n')
f.write(r'''\end{CJK*}
\end{document}''')
def rescale(input_list):
the_array = np.asarray(input_list)
the_max = URL(abs(the_array))
rescale = the_array/the_max
rescale = rescale*100
rescale = URL(rescale, 3)
'''
the_array = np.asarray(input_list)
the_max = URL(the_array)
the_min = URL(the_array)
rescale = ((the_array - the_min)/(the_max-the_min))*100
for i in rescale:
print(rescale)
'''
return URL()
def clean_word(word_list):
new_word_list = []
for word in word_list:
for latex_sensitive in ["\\", "%", "&", "^", "#", "_", "{", "}"]:
if latex_sensitive in word:
word = word.replace(latex_sensitive, '\\'+latex_sensitive)
new_word_list.append(word)
return new_word_list
if __name__ == '__main__':
color_1 = 'red'
color_2 = 'green'
words = word_tokenize(sentence)
word_num = len(words)
generate(words, score_list, "URL", color_1, color_2)
|
[] |
[
"TAGS\n#region-us \n"
] |
[
6
] |
[
"passage: TAGS\n#region-us \n"
] |
[
0.024608636274933815,
-0.026205500587821007,
-0.009666500613093376,
-0.10395516455173492,
0.08638657629489899,
0.059816278517246246,
0.01882290467619896,
0.020661840215325356,
0.23975107073783875,
-0.005599027033895254,
0.1219947561621666,
0.0015615287702530622,
-0.037353623658418655,
0.03733762726187706,
-0.0035912662278860807,
-0.17583473026752472,
0.03876631706953049,
-0.018274923786520958,
0.01843859627842903,
0.026470553129911423,
-0.07776834815740585,
-0.07564429938793182,
0.015296397730708122,
-0.10247814655303955,
-0.083692267537117,
0.11002834886312485,
0.031466204673051834,
-0.019670886918902397,
0.10779199749231339,
-0.04243955761194229,
0.18699054419994354,
-0.011512263678014278,
-0.11213519424200058,
-0.2536850869655609,
0.021806683391332626,
-0.01765260472893715,
-0.08747660368680954,
0.01506110467016697,
0.0665089413523674,
-0.09014441072940826,
-0.0588928684592247,
0.0795099288225174,
-0.01132340170443058,
0.04246443510055542,
-0.27593839168548584,
-0.12684126198291779,
-0.05297930911183357,
-0.1421966552734375,
0.08651168644428253,
0.04035491496324539,
0.008764253929257393,
0.15506891906261444,
-0.20897391438484192,
0.004104613792151213,
0.08255259692668915,
-0.2538507878780365,
0.05591634660959244,
0.17671173810958862,
0.03623908758163452,
0.18037272989749908,
0.0060391901060938835,
0.11029672622680664,
0.0716743916273117,
-0.024263937026262283,
-0.17590197920799255,
-0.08127854019403458,
-0.04696211963891983,
0.16642488539218903,
-0.06727185100317001,
-0.14248386025428772,
0.34701237082481384,
0.00015008423360995948,
0.009657775051891804,
0.16921205818653107,
-0.059524230659008026,
-0.09972117841243744,
0.07259953022003174,
0.016484731808304787,
0.018492350354790688,
0.1471305936574936,
0.16307872533798218,
-0.0458691343665123,
-0.13837823271751404,
-0.018630273640155792,
-0.22798998653888702,
0.17510560154914856,
-0.03248048573732376,
0.13137903809547424,
-0.27447956800460815,
0.01684025302529335,
-0.2570667266845703,
0.0032130838371813297,
0.04178816080093384,
-0.06004921346902847,
-0.0226522795855999,
-0.013265985064208508,
-0.08018817007541656,
0.004899587947875261,
0.06192673370242119,
0.1266920566558838,
-0.06128726154565811,
0.06128238886594772,
-0.09319206327199936,
0.141696035861969,
0.07166698575019836,
0.07868369668722153,
0.13037432730197906,
0.041205424815416336,
-0.07187089323997498,
-0.21872246265411377,
-0.0026476888451725245,
-0.06275863200426102,
-0.09502086788415909,
-0.0020165652967989445,
-0.11606067419052124,
0.17244569957256317,
-0.030802514404058456,
-0.09825427830219269,
-0.11208184063434601,
0.09148659557104111,
-0.032992321997880936,
-0.03437839448451996,
-0.03552987426519394,
-0.020977836102247238,
0.019381176680326462,
0.04704452306032181,
-0.1548958420753479,
-0.005131472367793322,
0.07039852440357208,
0.11502562463283539,
-0.1346137970685959,
-0.003783059772104025,
-0.07908964157104492,
0.03039063885807991,
0.07654735445976257,
-0.16510222852230072,
0.03158547356724739,
-0.1124754324555397,
-0.07531405985355377,
0.002912673633545637,
-0.015710093080997467,
-0.016202643513679504,
0.166526660323143,
-0.0020451415330171585,
0.0714716836810112,
-0.026345307007431984,
-0.05890209600329399,
-0.11243434250354767,
-0.08489254862070084,
0.05390460044145584,
0.03670717030763626,
0.03266148269176483,
-0.2193479984998703,
0.014805203303694725,
-0.12762966752052307,
0.1360815018415451,
-0.10566820204257965,
-0.04705966264009476,
-0.022842247039079666,
0.20562705397605896,
0.037286072969436646,
0.08762791007757187,
-0.22171171009540558,
0.039756543934345245,
-0.05404696613550186,
0.18480908870697021,
-0.1502426266670227,
-0.0799463614821434,
0.20813211798667908,
-0.07964949309825897,
-0.10115210711956024,
0.021235812455415726,
0.020391687750816345,
0.026287272572517395,
0.0766737088561058,
0.4564172327518463,
-0.09766800701618195,
-0.09146861732006073,
0.10178250074386597,
0.17055274546146393,
-0.12427149713039398,
-0.1827561855316162,
0.06446871906518936,
-0.16666454076766968,
-0.1973118633031845,
0.0018917324487119913,
0.09222044050693512,
0.038269978016614914,
-0.07875611633062363,
-0.020746968686580658,
0.06325206160545349,
-0.0007678253459744155,
0.09095914661884308,
0.03755716234445572,
0.09034032374620438,
-0.08716782182455063,
0.11115926504135132,
-0.05017651244997978,
0.004037132486701012,
0.1343354731798172,
0.027325427159667015,
-0.03223329409956932,
0.08694463223218918,
-0.0485352948307991,
0.05295134335756302,
-0.1662379503250122,
-0.15068690478801727,
0.03398871049284935,
0.06283251196146011,
0.03186952322721481,
0.1280253529548645,
0.08141885697841644,
-0.10732853412628174,
0.022690722718834877,
-0.004228927195072174,
0.058398615568876266,
0.03891623765230179,
0.006107209715992212,
0.008764320984482765,
0.0961301177740097,
-0.10607069730758667,
-0.13589619100093842,
-0.07336436957120895,
-0.014715781435370445,
0.14371353387832642,
-0.0302802175283432,
0.07690227776765823,
-0.004240254405885935,
0.00013200697139836848,
0.06930823624134064,
0.08137880265712738,
0.016412746161222458,
0.08971183747053146,
-0.05237193778157234,
-0.05160155147314072,
0.10863113403320312,
-0.13533565402030945,
0.17837053537368774,
0.14053137600421906,
-0.20532016456127167,
0.029453208670020103,
-0.06838275492191315,
0.03670361638069153,
-0.008162540383636951,
0.0975119024515152,
-0.08272241055965424,
-0.02106042578816414,
0.013134466484189034,
0.0052274600602686405,
-0.013007243163883686,
0.017682146281003952,
-0.07295988500118256,
-0.07787393033504486,
-0.10233919322490692,
0.08436838537454605,
0.11562882363796234,
-0.10282530635595322,
0.14214380085468292,
0.4384984076023102,
0.11495281755924225,
0.21582984924316406,
-0.09581480920314789,
-0.0412987545132637,
0.007486371789127588,
0.0001535322517156601,
-0.04476691037416458,
0.08031861484050751,
-0.15973517298698425,
-0.038901735097169876,
0.027348900213837624,
0.07128690183162689,
0.11475157737731934,
-0.14959022402763367,
-0.09639324247837067,
-0.00793045200407505,
0.0022841424215584993,
-0.1249532699584961,
0.023905446752905846,
-0.03974650055170059,
0.04015624523162842,
0.07232289016246796,
-0.021535737439990044,
0.13939237594604492,
-0.04166141897439957,
-0.0639561116695404,
0.07585346698760986,
-0.2017085999250412,
-0.23179671168327332,
-0.12309670448303223,
-0.14680525660514832,
0.04366797208786011,
0.05154111236333847,
0.01726446859538555,
-0.17635835707187653,
-0.015074856579303741,
0.07706750929355621,
0.07820965349674225,
-0.20886357128620148,
-0.022814949974417686,
-0.004290030337870121,
0.0895976573228836,
-0.10227091610431671,
-0.0017130117630586028,
-0.04419664293527603,
-0.10150232166051865,
0.0017003051470965147,
0.07279510796070099,
-0.137485533952713,
0.13807645440101624,
0.21589438617229462,
0.07225540280342102,
0.07359948754310608,
-0.019093448296189308,
0.09936179965734482,
-0.10856141895055771,
-0.16549113392829895,
0.08348225057125092,
-0.06234746053814888,
0.047262318432331085,
0.17534415423870087,
0.03307317942380905,
-0.13904969394207,
-0.015682822093367577,
-0.0402069091796875,
-0.15603256225585938,
-0.238995760679245,
-0.09178274869918823,
-0.1182505264878273,
0.16442428529262543,
0.0009358620154671371,
0.06651917099952698,
0.08258313685655594,
-0.022042419761419296,
0.16447891294956207,
-0.07379321753978729,
-0.07578866183757782,
-0.006978808436542749,
0.12375060468912125,
-0.056660156697034836,
-0.03080669604241848,
-0.10566964000463486,
-0.008295975625514984,
0.1151021271944046,
0.15304014086723328,
0.12214863300323486,
0.2957419455051422,
0.08268889784812927,
0.026645636186003685,
0.08958091586828232,
0.17622539401054382,
0.09495089203119278,
0.07838419824838638,
-0.045413073152303696,
-0.014814783819019794,
0.014317171648144722,
-0.04022889584302902,
0.010141594335436821,
0.14683100581169128,
-0.2679629921913147,
-0.006678564939647913,
-0.2710230350494385,
0.0965198427438736,
-0.10913380235433578,
0.11837165057659149,
-0.01015760749578476,
0.10194015502929688,
0.11082887649536133,
0.03233652561903,
-0.03858073800802231,
0.16613617539405823,
0.08450309932231903,
-0.11277695000171661,
0.001758623169735074,
0.03737903758883476,
0.09715615212917328,
-0.02818971499800682,
0.12721189856529236,
-0.11048974841833115,
-0.1464834064245224,
0.013753619976341724,
0.07152791321277618,
-0.15373679995536804,
0.3138748109340668,
0.012069208547472954,
-0.13481520116329193,
-0.01481647603213787,
-0.09957809001207352,
-0.006440147757530212,
0.1254177987575531,
0.09333524852991104,
0.07935678958892822,
-0.2185502052307129,
-0.13339371979236603,
0.05872276425361633,
-0.00575496768578887,
0.22408108413219452,
-0.034034017473459244,
-0.11356475204229355,
-0.027013886719942093,
0.04241163283586502,
-0.06043251231312752,
0.08524788916110992,
0.023536119610071182,
-0.08113526552915573,
-0.032957352697849274,
0.05323701351881027,
0.012368366122245789,
0.00524376705288887,
0.09360801428556442,
0.020107939839363098,
-0.0009265501867048442,
0.01785753294825554,
0.047885000705718994,
-0.0675911232829094,
-0.1984109878540039,
0.09357594698667526,
-0.05215044692158699,
0.0015536568826064467,
-0.08013670891523361,
-0.15122665464878082,
-0.08837161958217621,
-0.16009655594825745,
0.12540200352668762,
-0.034406669437885284,
0.12700119614601135,
-0.06619787961244583,
0.17341409623622894,
-0.07871770113706589,
0.04481020197272301,
-0.047349292784929276,
0.050332702696323395,
-0.007268077693879604,
-0.07756082713603973,
0.16585899889469147,
-0.15564003586769104,
0.01809087023139,
0.19572502374649048,
-0.018915493041276932,
0.07177707552909851,
0.021322092041373253,
-0.0636206790804863,
0.23147478699684143,
0.3014698624610901,
0.008138049393892288,
0.1665448248386383,
0.3018903136253357,
-0.07466315478086472,
-0.2642788887023926,
-0.05505012720823288,
-0.2841376066207886,
-0.05371501296758652,
0.10716094076633453,
-0.22523896396160126,
0.06986407935619354,
0.14383509755134583,
-0.06471995264291763,
0.30228954553604126,
-0.21825523674488068,
0.012589273042976856,
0.15434536337852478,
-0.08868814259767532,
0.5515313148498535,
-0.1133413165807724,
-0.17677772045135498,
-0.008122089318931103,
-0.08741296827793121,
0.10602109134197235,
-0.0340677872300148,
0.06877441704273224,
0.013465235009789467,
0.04797380417585373,
0.048932258039712906,
-0.03111894056200981,
0.22701001167297363,
0.008710170164704323,
0.09015397727489471,
-0.07378865778446198,
-0.18624304234981537,
0.11639340221881866,
-0.04359482601284981,
-0.08891059458255768,
0.0849778801202774,
-0.05942516401410103,
-0.11078983545303345,
0.04663389176130295,
-0.07950539886951447,
-0.024862350896000862,
0.08423490077257156,
-0.04678233340382576,
-0.042606171220541,
-0.008054176345467567,
-0.1618063747882843,
-0.0002289071271661669,
0.31360217928886414,
-0.07096036523580551,
0.16695955395698547,
0.03677211329340935,
0.00038613268407061696,
-0.11027684062719345,
0.030288029462099075,
-0.05203165486454964,
-0.021576624363660812,
0.09578979015350342,
-0.11096979677677155,
0.03204701095819473,
0.14160704612731934,
-0.04864364117383957,
0.05846960097551346,
0.09256096184253693,
-0.0849417969584465,
0.007583672646433115,
0.17753590643405914,
-0.17537221312522888,
-0.1273445188999176,
-0.006135711446404457,
-0.09862716495990753,
0.14055661857128143,
0.04394126310944557,
0.05191568285226822,
0.16669964790344238,
0.03967129811644554,
-0.029474308714270592,
-0.02817419543862343,
-0.1153380498290062,
-0.0201893113553524,
0.040153320878744125,
0.00045633706031367183,
-0.08791285753250122,
0.2262638509273529,
0.06409153342247009,
-0.1328488290309906,
-0.051157206296920776,
0.2161225974559784,
-0.06805316358804703,
-0.04911920800805092,
-0.223562553524971,
0.10752306133508682,
-0.07112517952919006,
-0.0965060144662857,
0.05453834682703018,
-0.02270081453025341,
0.005106312222778797,
0.181985542178154,
0.03941008821129799,
0.11070270836353302,
0.03738937899470329,
-0.02448922023177147,
0.15798696875572205,
-0.142850860953331,
-0.14191335439682007,
-0.025354057550430298,
-0.08757315576076508,
-0.13844476640224457,
-0.026804137974977493,
0.1617041826248169,
-0.09177309274673462,
-0.14772607386112213,
-0.2621181011199951,
0.10968475043773651,
-0.16432365775108337,
-0.10192688554525375,
-0.03469514101743698,
-0.08968492597341537,
0.0696166530251503,
0.030301768332719803,
-0.03093348816037178,
-0.06706760823726654,
-0.18593791127204895,
0.0816768929362297,
0.06349513679742813,
0.045533183962106705,
-0.017847947776317596,
0.0067379772663116455,
0.1720137596130371,
0.025955144315958023,
0.10040043294429779,
0.16762186586856842,
0.011397695168852806,
0.2246655523777008,
-0.1671202927827835,
-0.11496317386627197,
0.1336962729692459,
-0.026543032377958298,
0.06762003898620605,
0.16792191565036774,
-0.0772583931684494,
0.015526676550507545,
-0.028136352077126503,
0.07066910713911057,
-0.11003983020782471,
-0.105624258518219,
0.007937257178127766,
0.02567129209637642,
-0.2755882740020752,
-0.005599735304713249,
-0.19717298448085785,
0.14788752794265747,
0.02579621411859989,
0.03297143429517746,
0.10257530212402344,
0.10404334217309952,
0.08312062919139862,
-0.0017710148822516203,
0.03226327523589134,
-0.1176818460226059,
0.02753005363047123,
-0.059239376336336136,
-0.020663779228925705,
0.017624232918024063,
0.36952024698257446,
-0.03603357449173927,
-0.046802736818790436,
0.003710439894348383,
0.1307835876941681,
-0.02139742486178875,
0.017395347356796265,
0.13209912180900574,
0.12607666850090027,
-0.08595693111419678,
-0.1504845917224884,
0.04888554662466049,
-0.04565655067563057,
-0.02836887165904045,
0.1464131623506546,
0.05905961990356445,
0.1050296202301979,
0.0908031314611435,
-0.014463032595813274,
-0.00318976235575974,
0.012856799177825451,
-0.15486004948616028,
0.06223496049642563,
-0.010558074340224266,
0.012565906159579754,
0.017934376373887062,
0.15238402783870697,
-0.005540105979889631,
0.07739730179309845,
-0.09889880567789078,
0.004208535887300968,
-0.13498884439468384,
-0.07913459837436676,
0.03617347031831741,
-0.13393273949623108,
0.04141177982091904,
-0.01871878281235695,
0.029611799865961075,
0.30386561155319214,
0.02558239921927452,
-0.020639164373278618,
0.12512871623039246,
-0.1214587539434433,
-0.12050267308950424,
-0.001594188273884356,
-0.029960084706544876,
0.0791488066315651,
-0.02633434161543846,
-0.0997740775346756,
-0.1001306027173996,
-0.15166029334068298,
-0.09759195148944855,
0.05182836204767227,
-0.04993441700935364,
-0.059362251311540604,
-0.17634081840515137,
-0.05707859992980957,
-0.05147340148687363,
0.14025864005088806,
-0.12263951450586319,
0.15159130096435547,
-0.014490418136119843,
0.004084470681846142,
0.04405883327126503,
0.1950942426919937,
-0.03644494712352753,
0.08714226633310318,
0.0154351145029068,
0.1522706001996994,
-0.05119588226079941,
0.14720745384693146,
-0.10931728035211563,
-0.04014137014746666,
-0.06710435450077057,
0.21513493359088898,
0.25630924105644226,
-0.06136954948306084,
-0.008937356993556023,
-0.012760217301547527,
0.058654606342315674,
0.1073930487036705,
0.16049085557460785,
0.002326392102986574,
0.2802925705909729,
-0.03133585304021835,
0.04815128445625305,
0.02901598811149597,
0.013607407920062542,
-0.06336209923028946,
0.03397751972079277,
0.07539387792348862,
-0.035039983689785004,
-0.1412304788827896,
0.15837742388248444,
-0.21980468928813934,
0.18157227337360382,
0.11640069633722305,
-0.19996967911720276,
-0.013728445395827293,
-0.04882071167230606,
0.1689416468143463,
-0.0856364443898201,
0.1637246012687683,
-0.0903693437576294,
-0.2108195722103119,
-0.2056000679731369,
0.03867346793413162,
-0.34623071551322937,
-0.254462867975235,
0.10422009229660034,
0.1488201916217804,
0.04015883058309555,
-0.018507536500692368,
-0.019967829808592796,
-0.018367022275924683,
0.04877542704343796,
-0.0067357709631323814,
0.06014643982052803,
0.031397558748722076,
-0.02988368645310402,
-0.24127542972564697,
-0.029804671183228493,
0.023964406922459602,
-0.07093082368373871,
0.07464958727359772,
-0.06874357163906097,
-0.022495782002806664,
0.08059766888618469,
-0.03066304884850979,
0.03298592567443848,
-0.035373736172914505,
-0.16326889395713806,
0.027529051527380943,
0.03900543600320816,
0.036012712866067886,
0.00634160777553916,
0.0008072225609794259,
-0.03455270454287529,
0.0644603744149208,
-0.16716794669628143,
-0.16015739738941193,
0.14140215516090393,
-0.06745140254497528,
0.2779497504234314,
-0.05812826007604599,
-0.0809100940823555,
0.04766704887151718,
-0.03426874056458473,
0.1807648241519928,
-0.07756473124027252,
0.047254521399736404,
0.12766779959201813,
0.011127962730824947,
0.03121316432952881,
-0.3092964291572571,
0.11082969605922699,
-0.000795336440205574,
-0.006093299947679043,
-0.07581598311662674
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# wav2vec2-large-xls-r-300m-hi-colab_new
This model is a fine-tuned version of [facebook/wav2vec2-xls-r-300m](https://huggingface.co/facebook/wav2vec2-xls-r-300m) on the common_voice dataset.
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 0.0003
- train_batch_size: 16
- eval_batch_size: 8
- seed: 42
- gradient_accumulation_steps: 2
- total_train_batch_size: 32
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- lr_scheduler_warmup_steps: 500
- num_epochs: 30
- mixed_precision_training: Native AMP
### Training results
### Framework versions
- Transformers 4.11.3
- Pytorch 1.10.0+cu111
- Datasets 1.18.3
- Tokenizers 0.10.3
|
{"license": "apache-2.0", "tags": ["generated_from_trainer"], "datasets": ["common_voice"], "model-index": [{"name": "wav2vec2-large-xls-r-300m-hi-colab_new", "results": []}]}
|
automatic-speech-recognition
|
ajaiswal1008/wav2vec2-large-xls-r-300m-hi-colab_new
|
[
"transformers",
"pytorch",
"tensorboard",
"wav2vec2",
"automatic-speech-recognition",
"generated_from_trainer",
"dataset:common_voice",
"license:apache-2.0",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tensorboard #wav2vec2 #automatic-speech-recognition #generated_from_trainer #dataset-common_voice #license-apache-2.0 #endpoints_compatible #region-us
|
# wav2vec2-large-xls-r-300m-hi-colab_new
This model is a fine-tuned version of facebook/wav2vec2-xls-r-300m on the common_voice dataset.
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 0.0003
- train_batch_size: 16
- eval_batch_size: 8
- seed: 42
- gradient_accumulation_steps: 2
- total_train_batch_size: 32
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- lr_scheduler_warmup_steps: 500
- num_epochs: 30
- mixed_precision_training: Native AMP
### Training results
### Framework versions
- Transformers 4.11.3
- Pytorch 1.10.0+cu111
- Datasets 1.18.3
- Tokenizers 0.10.3
|
[
"# wav2vec2-large-xls-r-300m-hi-colab_new\n\nThis model is a fine-tuned version of facebook/wav2vec2-xls-r-300m on the common_voice dataset.",
"## Model description\n\nMore information needed",
"## Intended uses & limitations\n\nMore information needed",
"## Training and evaluation data\n\nMore information needed",
"## Training procedure",
"### Training hyperparameters\n\nThe following hyperparameters were used during training:\n- learning_rate: 0.0003\n- train_batch_size: 16\n- eval_batch_size: 8\n- seed: 42\n- gradient_accumulation_steps: 2\n- total_train_batch_size: 32\n- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n- lr_scheduler_type: linear\n- lr_scheduler_warmup_steps: 500\n- num_epochs: 30\n- mixed_precision_training: Native AMP",
"### Training results",
"### Framework versions\n\n- Transformers 4.11.3\n- Pytorch 1.10.0+cu111\n- Datasets 1.18.3\n- Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #wav2vec2 #automatic-speech-recognition #generated_from_trainer #dataset-common_voice #license-apache-2.0 #endpoints_compatible #region-us \n",
"# wav2vec2-large-xls-r-300m-hi-colab_new\n\nThis model is a fine-tuned version of facebook/wav2vec2-xls-r-300m on the common_voice dataset.",
"## Model description\n\nMore information needed",
"## Intended uses & limitations\n\nMore information needed",
"## Training and evaluation data\n\nMore information needed",
"## Training procedure",
"### Training hyperparameters\n\nThe following hyperparameters were used during training:\n- learning_rate: 0.0003\n- train_batch_size: 16\n- eval_batch_size: 8\n- seed: 42\n- gradient_accumulation_steps: 2\n- total_train_batch_size: 32\n- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n- lr_scheduler_type: linear\n- lr_scheduler_warmup_steps: 500\n- num_epochs: 30\n- mixed_precision_training: Native AMP",
"### Training results",
"### Framework versions\n\n- Transformers 4.11.3\n- Pytorch 1.10.0+cu111\n- Datasets 1.18.3\n- Tokenizers 0.10.3"
] |
[
65,
54,
6,
12,
8,
3,
140,
4,
35
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #wav2vec2 #automatic-speech-recognition #generated_from_trainer #dataset-common_voice #license-apache-2.0 #endpoints_compatible #region-us \n# wav2vec2-large-xls-r-300m-hi-colab_new\n\nThis model is a fine-tuned version of facebook/wav2vec2-xls-r-300m on the common_voice dataset.## Model description\n\nMore information needed## Intended uses & limitations\n\nMore information needed## Training and evaluation data\n\nMore information needed## Training procedure### Training hyperparameters\n\nThe following hyperparameters were used during training:\n- learning_rate: 0.0003\n- train_batch_size: 16\n- eval_batch_size: 8\n- seed: 42\n- gradient_accumulation_steps: 2\n- total_train_batch_size: 32\n- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n- lr_scheduler_type: linear\n- lr_scheduler_warmup_steps: 500\n- num_epochs: 30\n- mixed_precision_training: Native AMP### Training results### Framework versions\n\n- Transformers 4.11.3\n- Pytorch 1.10.0+cu111\n- Datasets 1.18.3\n- Tokenizers 0.10.3"
] |
[
-0.08972933143377304,
0.15540553629398346,
-0.0020041661337018013,
0.02871023863554001,
0.12383127957582474,
0.016970481723546982,
0.08076003938913345,
0.12764744460582733,
-0.07208251953125,
0.1114870011806488,
0.059391286224126816,
-0.005239564925432205,
0.08960801362991333,
0.09211379289627075,
0.010438635013997555,
-0.25595811009407043,
-0.006628275848925114,
-0.0025065827649086714,
-0.061067160218954086,
0.08590751886367798,
0.11225888878107071,
-0.07901519536972046,
0.029447302222251892,
0.023880798369646072,
-0.12109200656414032,
0.03129567950963974,
-0.07061224430799484,
-0.0643451064825058,
0.07947750389575958,
0.030815627425909042,
0.042915020138025284,
0.0019146846607327461,
0.1096896231174469,
-0.28249478340148926,
0.0021171558182686567,
0.06938518583774567,
0.03797822818160057,
0.07414915412664413,
0.076631560921669,
0.011377615854144096,
0.11853741854429245,
-0.1679004728794098,
0.10031982511281967,
0.04828789830207825,
-0.04675509035587311,
-0.170758917927742,
-0.08541545271873474,
0.08169283717870712,
0.1164112538099289,
0.12048914283514023,
-0.017624713480472565,
0.10914628207683563,
-0.07920186966657639,
0.05289807915687561,
0.16908758878707886,
-0.2434583455324173,
-0.0562775619328022,
-0.0008535723318345845,
0.06237858906388283,
0.05880173295736313,
-0.11892587691545486,
-0.005183153785765171,
0.04058253765106201,
0.02331167459487915,
0.060969144105911255,
0.02032579481601715,
-0.008243226446211338,
-0.009778101928532124,
-0.12248102575540543,
-0.02969863824546337,
0.16760243475437164,
0.11382412910461426,
-0.03424592688679695,
-0.16869668662548065,
0.01501717884093523,
-0.12245050817728043,
-0.030632363632321358,
-0.028412483632564545,
0.013972293585538864,
-0.04116196185350418,
-0.09514368325471878,
-0.03153739869594574,
-0.06864682585000992,
-0.052700284868478775,
0.05568845197558403,
0.10128626972436905,
0.02869177795946598,
-0.028540628030896187,
0.0074330526404082775,
0.06380172818899155,
0.03323343023657799,
-0.11088883876800537,
-0.013868380337953568,
0.02415785938501358,
-0.12631750106811523,
-0.0488651804625988,
-0.03505254164338112,
-0.05632337927818298,
0.0006890725344419479,
0.10467828065156937,
0.010671787895262241,
0.07796190679073334,
0.013702090829610825,
-0.0018459740094840527,
0.010205930098891258,
0.13149426877498627,
-0.05331377312541008,
-0.08164715766906738,
-0.03818598762154579,
0.09536372870206833,
-0.004508626647293568,
-0.013814353384077549,
-0.07106474041938782,
-0.012560046277940273,
0.08616424351930618,
0.07002074271440506,
-0.021400585770606995,
-0.01741213910281658,
-0.05377553030848503,
-0.034860190004110336,
0.024044353514909744,
-0.12117316573858261,
0.06232817843556404,
-0.006209906190633774,
-0.04290291666984558,
0.01302681490778923,
-0.013039859011769295,
0.02114230953156948,
-0.04861028119921684,
0.067677341401577,
-0.061756569892168045,
-0.030546516180038452,
-0.04966205731034279,
-0.04095742851495743,
0.041542548686265945,
-0.018672149628400803,
0.00693494314327836,
-0.06127387285232544,
-0.10545670986175537,
-0.06153937429189682,
0.02647014893591404,
-0.06910358369350433,
-0.08660800755023956,
-0.03604981303215027,
-0.007994753308594227,
0.03305063769221306,
-0.016635440289974213,
0.14942769706249237,
-0.03270697221159935,
0.06219310685992241,
-0.018163762986660004,
0.006595690734684467,
0.06948258727788925,
0.06377148628234863,
-0.04820144549012184,
0.03792828321456909,
-0.03873731568455696,
0.09843450784683228,
-0.10211654007434845,
0.027862388640642166,
-0.14839336276054382,
-0.10738754272460938,
-0.05464700981974602,
-0.022875726222991943,
0.06824010610580444,
0.09012472629547119,
-0.15432654321193695,
-0.060050126165151596,
0.14409354329109192,
-0.044183582067489624,
-0.09474856406450272,
0.12577231228351593,
-0.017002763226628304,
-0.01891101524233818,
0.056536730378866196,
0.12833581864833832,
0.12255740910768509,
-0.119610495865345,
-0.050029244273900986,
-0.011455364525318146,
0.09853267669677734,
0.016693877056241035,
0.09365241229534149,
-0.03750471770763397,
0.03272726386785507,
-0.004187295213341713,
-0.00212701503187418,
0.029651805758476257,
-0.06143350899219513,
-0.08042314648628235,
-0.027300627902150154,
-0.0956498235464096,
-0.0019191262545064092,
0.029719961807131767,
0.00972509104758501,
-0.08008602261543274,
-0.13854999840259552,
0.0716865062713623,
0.1385989785194397,
-0.06783612072467804,
0.0070187184028327465,
-0.07046937197446823,
-0.010026443749666214,
-0.040210649371147156,
-0.01769905537366867,
-0.16773512959480286,
-0.06427556276321411,
0.049858562648296356,
-0.09686024487018585,
0.04584328085184097,
0.001458524027839303,
0.05773884430527687,
0.03837532922625542,
-0.0482146292924881,
-0.02702454850077629,
-0.10513617098331451,
0.008907991461455822,
-0.07221215218305588,
-0.14300614595413208,
-0.07609673589468002,
-0.031213926151394844,
0.21632714569568634,
-0.21341697871685028,
-0.011037946678698063,
0.01622319407761097,
0.14071200788021088,
0.011376291513442993,
-0.07646077126264572,
0.012991653755307198,
0.04914601892232895,
0.01132851280272007,
-0.0872410237789154,
0.008660192601382732,
0.009180409833788872,
-0.13372109830379486,
-0.050095632672309875,
-0.11602764576673508,
0.02750326879322529,
0.06738012284040451,
0.11189166456460953,
-0.07530888170003891,
-0.08302810043096542,
-0.062105920165777206,
-0.045293428003787994,
-0.07263711839914322,
-0.02435799315571785,
0.21576382219791412,
0.040275316685438156,
0.08626478165388107,
-0.04936197027564049,
-0.06748958677053452,
0.015906939283013344,
0.04330902174115181,
-0.060198795050382614,
0.08392965793609619,
0.058747027069330215,
-0.13441202044487,
0.06316868960857391,
0.055791739374399185,
-0.017884396016597748,
0.1293114870786667,
-0.05032242834568024,
-0.12281014770269394,
-0.023507313802838326,
0.003347153076902032,
0.008830489590764046,
0.09183468669652939,
-0.11796018481254578,
0.006830242928117514,
0.03958769887685776,
0.000377275311620906,
0.03726787865161896,
-0.1276184618473053,
0.000015906585758784786,
0.048318903893232346,
-0.022612737491726875,
-0.016949594020843506,
-0.03828644007444382,
0.008778971619904041,
0.05764167383313179,
0.04565392807126045,
0.017746517434716225,
-0.002357994671911001,
-0.024787282571196556,
-0.09461381286382675,
0.1436072438955307,
-0.09708506613969803,
-0.19863352179527283,
-0.13338638842105865,
0.04076908156275749,
-0.027302969247102737,
-0.04567020386457443,
0.012253801338374615,
-0.1149456724524498,
-0.06211193650960922,
-0.0788717120885849,
-0.014939816668629646,
-0.07973256707191467,
-0.0029577314853668213,
0.08809752762317657,
0.03388649597764015,
0.08236567676067352,
-0.11779289692640305,
0.031066391617059708,
0.003194889286532998,
-0.03845767304301262,
-0.042583443224430084,
0.037430331110954285,
0.10528141260147095,
0.1262335181236267,
0.01668059639632702,
0.025910789147019386,
-0.03334544599056244,
0.18756605684757233,
-0.11329282075166702,
-0.00937303900718689,
0.11222347617149353,
0.030796412378549576,
0.03697061538696289,
0.08162838965654373,
0.03855948895215988,
-0.07470627874135971,
0.036359984427690506,
0.06351833790540695,
-0.0163597222417593,
-0.24333830177783966,
-0.06412968784570694,
-0.05130913853645325,
-0.1080414429306984,
0.14896968007087708,
0.06143158674240112,
-0.010485070757567883,
0.04005959630012512,
-0.033677536994218826,
0.03506547957658768,
-0.0007475396269001067,
0.07645691186189651,
0.052696797996759415,
0.04413183033466339,
0.08088140934705734,
-0.02660202980041504,
-0.03456031158566475,
0.05794842168688774,
0.014946515671908855,
0.21646049618721008,
0.013865581713616848,
0.15258589386940002,
0.007086028344929218,
0.11988949775695801,
-0.011433747597038746,
0.033031217753887177,
0.021923143416643143,
-0.011609245091676712,
0.03089303895831108,
-0.06621529906988144,
-0.028533192351460457,
0.04875585809350014,
0.10938946902751923,
0.014974451623857021,
-0.08522143959999084,
0.0013692707289010286,
-0.0033046542666852474,
0.2873713970184326,
0.0657106339931488,
-0.2635514736175537,
-0.08123519271612167,
0.017535991966724396,
-0.05209983512759209,
-0.07391181588172913,
0.02078385464847088,
0.08898614346981049,
-0.12783123552799225,
0.087796151638031,
-0.04138627648353577,
0.10034016519784927,
-0.06695347279310226,
-0.013454623520374298,
0.046606387943029404,
0.08857076615095139,
0.0009855774696916342,
0.10323068499565125,
-0.1531522125005722,
0.18131890892982483,
0.012217630632221699,
0.1007293090224266,
-0.08346886932849884,
0.04711851850152016,
-0.012312449514865875,
-0.01360497996211052,
0.10660464316606522,
-0.0041071451269090176,
-0.05304424837231636,
-0.15260328352451324,
-0.10367216914892197,
0.03473905101418495,
0.11579066514968872,
-0.057359348982572556,
0.07384861260652542,
-0.0369703583419323,
-0.010546091943979263,
0.04369369149208069,
-0.06980732083320618,
-0.18391498923301697,
-0.2092514932155609,
0.030612971633672714,
0.0419277660548687,
0.04540480300784111,
-0.09224629402160645,
-0.103690005838871,
-0.04144406318664551,
0.21968574821949005,
0.022996259853243828,
-0.01953003741800785,
-0.15051083266735077,
0.10632426291704178,
0.16072691977024078,
-0.04891342669725418,
0.019481193274259567,
0.034862030297517776,
0.19492501020431519,
0.002602160209789872,
-0.04442533105611801,
0.051351431757211685,
-0.06315214186906815,
-0.12624050676822662,
-0.043273381888866425,
0.1888693869113922,
0.04848778247833252,
0.06894587725400925,
0.011328421533107758,
0.015296404249966145,
0.012230448424816132,
-0.08214223384857178,
0.06233448162674904,
0.05492640659213066,
0.0275215245783329,
0.07131096720695496,
-0.04513026028871536,
0.025950776413083076,
-0.056724581867456436,
-0.04169728234410286,
0.1739681363105774,
0.21277204155921936,
-0.07152712345123291,
0.07749038934707642,
0.07696095108985901,
-0.049561191350221634,
-0.1291625052690506,
0.027145501226186752,
0.12464550882577896,
0.044468287378549576,
0.05495471879839897,
-0.20758649706840515,
0.07972736656665802,
0.11604343354701996,
-0.013557097874581814,
0.002674839226529002,
-0.28977125883102417,
-0.11018339544534683,
0.09127204120159149,
0.05898375064134598,
-0.09181011468172073,
-0.11670981347560883,
-0.058553729206323624,
-0.07583845406770706,
-0.11302237957715988,
0.0830126628279686,
-0.024459023028612137,
0.10244852304458618,
0.008983821608126163,
0.06755314767360687,
0.03904508054256439,
-0.037520263344049454,
0.14557893574237823,
0.010058127343654633,
0.027240002527832985,
-0.022695517167448997,
0.06948138028383255,
0.04244151711463928,
-0.05728558450937271,
0.060270994901657104,
-0.07134417444467545,
0.04593833163380623,
-0.152708500623703,
-0.04777437821030617,
-0.050032708793878555,
0.042856764048337936,
-0.041947703808546066,
-0.04861101880669594,
-0.042701512575149536,
0.05713885650038719,
0.07674793154001236,
-0.022375626489520073,
0.06326217949390411,
0.0034476860892027617,
0.10479024052619934,
0.06646232306957245,
0.11447738856077194,
-0.0043586441315710545,
-0.13894586265087128,
-0.031650789082050323,
-0.028121598064899445,
0.047940194606781006,
-0.07044482231140137,
0.022321447730064392,
0.11700360476970673,
0.060929469764232635,
0.15432116389274597,
0.009977796114981174,
-0.09207122027873993,
0.001524407765828073,
0.03876039758324623,
-0.02361947111785412,
-0.19307056069374084,
-0.0434243381023407,
0.06550479680299759,
-0.17714829742908478,
-0.03469451144337654,
0.08250558376312256,
-0.04824918881058693,
-0.02305581420660019,
-0.006856806110590696,
0.031668394804000854,
-0.030004967004060745,
0.16314782202243805,
0.019036533311009407,
0.08333788812160492,
-0.0656125396490097,
0.09111320972442627,
0.10753432661294937,
-0.13279202580451965,
0.0716915875673294,
0.054679274559020996,
-0.04209860414266586,
-0.014750783331692219,
0.041450388729572296,
0.06768423318862915,
0.020626796409487724,
-0.04273873567581177,
-0.0400334857404232,
-0.12292221933603287,
0.062263358384370804,
-0.03115326724946499,
-0.003546688240021467,
-0.026149891316890717,
-0.03158959746360779,
0.021906105801463127,
-0.15117183327674866,
0.07899877429008484,
0.03454052284359932,
0.04819837212562561,
-0.13464805483818054,
0.05239075422286987,
0.018677910789847374,
0.019649766385555267,
0.004514739383012056,
-0.005438704509288073,
-0.05995241552591324,
-0.009032268077135086,
-0.11816045641899109,
-0.0402783639729023,
-0.05150803551077843,
0.013612738810479641,
-0.019986921921372414,
-0.03308863937854767,
-0.04751111939549446,
0.037090614438056946,
-0.05977131798863411,
-0.09095963090658188,
-0.004383197519928217,
0.08249401301145554,
-0.1104787215590477,
0.016030550003051758,
0.04275286942720413,
-0.10749003291130066,
0.07061672955751419,
0.040551282465457916,
0.04594266414642334,
0.011379237286746502,
-0.05940597131848335,
-0.016257936134934425,
0.04040529951453209,
0.03585914149880409,
0.04868801683187485,
-0.14307714998722076,
-0.010260251350700855,
-0.00793667696416378,
0.0069804019294679165,
0.009237711317837238,
-0.0002834252954926342,
-0.11188303679227829,
-0.0646478459239006,
-0.07788387686014175,
-0.04471895098686218,
-0.0581933856010437,
0.05128065496683121,
0.10356136411428452,
0.03366657346487045,
0.13390783965587616,
-0.06582118570804596,
0.06492085009813309,
-0.21134516596794128,
-0.018756020814180374,
-0.02775619737803936,
0.017592137679457664,
-0.017324520274996758,
-0.01920289359986782,
0.07765254378318787,
-0.03174615278840065,
0.1227419376373291,
-0.050580523908138275,
0.07537335902452469,
0.04960642755031586,
-0.05384133756160736,
-0.006449560169130564,
0.004033592529594898,
0.20787829160690308,
0.09388202428817749,
-0.018275436013936996,
0.10076556354761124,
-0.05465971305966377,
0.05412329360842705,
0.1085607260465622,
0.09772788733243942,
0.15763893723487854,
0.02571251057088375,
0.047182243317365646,
0.08450967818498611,
-0.13138869404792786,
-0.12065163254737854,
0.1380019634962082,
-0.0171713475137949,
0.11352656036615372,
-0.026026640087366104,
0.1718667894601822,
0.10151655972003937,
-0.17580516636371613,
0.04915575310587883,
-0.049065981060266495,
-0.10372146964073181,
-0.07094106823205948,
-0.08118171244859695,
-0.07939334958791733,
-0.11693865060806274,
0.02387252077460289,
-0.10341359674930573,
0.018782511353492737,
0.03114754892885685,
0.019422277808189392,
0.023090697824954987,
0.12782807648181915,
-0.0243869349360466,
-0.005284924991428852,
0.09700524806976318,
-0.0008390706498175859,
-0.010135778225958347,
-0.06622077524662018,
-0.04256562516093254,
0.07756669819355011,
0.010866638273000717,
0.10752041637897491,
-0.041885312646627426,
-0.03854326531291008,
0.06065083667635918,
0.020442895591259003,
-0.08505459129810333,
0.02099696919322014,
-0.01666230522096157,
0.04641695320606232,
0.07345559448003769,
0.053978610783815384,
-0.003274392569437623,
-0.05997758358716965,
0.2355920523405075,
-0.06048258766531944,
-0.04267150163650513,
-0.13305142521858215,
0.11260377615690231,
0.026751389726996422,
-0.008371994830667973,
0.053470876067876816,
-0.11081500351428986,
-0.011647539213299751,
0.11244001984596252,
0.12766540050506592,
-0.014158527366816998,
-0.01452858466655016,
-0.001814369112253189,
-0.01769568957388401,
-0.06227324903011322,
0.07880914956331253,
0.10469012707471848,
0.023739540949463844,
-0.03831735998392105,
0.03373154625296593,
-0.014027927070856094,
-0.06767056882381439,
-0.054527174681425095,
0.08990858495235443,
0.02158827893435955,
0.011820416897535324,
-0.014969092793762684,
0.12902294099330902,
0.005337963346391916,
-0.17629089951515198,
0.0003518119629006833,
-0.14892013370990753,
-0.20696817338466644,
-0.020578494295477867,
0.05271759256720543,
0.004548089113086462,
0.05027499794960022,
0.004397228360176086,
-0.007257642690092325,
0.1198783814907074,
0.0038420537021011114,
-0.04893198981881142,
-0.10257790237665176,
0.09433267265558243,
-0.0216695424169302,
0.20062336325645447,
0.00247917789965868,
0.04532602056860924,
0.10336239635944366,
0.037528738379478455,
-0.139861062169075,
0.0250613484531641,
0.08035222440958023,
-0.06761365383863449,
0.07144524902105331,
0.19797977805137634,
-0.05364818871021271,
0.1251562386751175,
0.05292737856507301,
-0.1255519539117813,
-0.019748661667108536,
-0.09552682936191559,
0.021066371351480484,
-0.08671560883522034,
0.03022974170744419,
-0.045976731926202774,
0.1691949963569641,
0.17624858021736145,
-0.06820549815893173,
-0.03786478564143181,
-0.051990244537591934,
0.02613740973174572,
0.04952039569616318,
0.14690354466438293,
-0.0193868987262249,
-0.20542412996292114,
0.00633176788687706,
-0.028559789061546326,
0.0355520024895668,
-0.25519585609436035,
-0.09559013694524765,
0.05638774111866951,
-0.06556712836027145,
-0.027187928557395935,
0.11492941528558731,
0.07001273334026337,
0.010626770555973053,
-0.04716232419013977,
-0.14989425241947174,
-0.023622605949640274,
0.1296287178993225,
-0.16734915971755981,
-0.020120978355407715
] |
null | null |
transformers
|
# greens
Autogenerated by HuggingPics🤗🖼️
Create your own image classifier for **anything** by running [the demo on Google Colab](https://colab.research.google.com/github/nateraw/huggingpics/blob/main/HuggingPics.ipynb).
Report any issues with the demo at the [github repo](https://github.com/nateraw/huggingpics).
## Example Images
#### cucumber

#### green beans

#### okra

#### pickle

#### zucinni

|
{"tags": ["image-classification", "pytorch", "huggingpics"], "metrics": ["accuracy"]}
|
image-classification
|
ajanco/greens
|
[
"transformers",
"pytorch",
"tensorboard",
"vit",
"image-classification",
"huggingpics",
"model-index",
"autotrain_compatible",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tensorboard #vit #image-classification #huggingpics #model-index #autotrain_compatible #endpoints_compatible #region-us
|
# greens
Autogenerated by HuggingPics️
Create your own image classifier for anything by running the demo on Google Colab.
Report any issues with the demo at the github repo.
## Example Images
#### cucumber
!cucumber
#### green beans
!green beans
#### okra
!okra
#### pickle
!pickle
#### zucinni
!zucinni
|
[
"# greens\n\n\nAutogenerated by HuggingPics️\n\nCreate your own image classifier for anything by running the demo on Google Colab.\n\nReport any issues with the demo at the github repo.",
"## Example Images",
"#### cucumber\n\n!cucumber",
"#### green beans\n\n!green beans",
"#### okra\n\n!okra",
"#### pickle\n\n!pickle",
"#### zucinni\n\n!zucinni"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #vit #image-classification #huggingpics #model-index #autotrain_compatible #endpoints_compatible #region-us \n",
"# greens\n\n\nAutogenerated by HuggingPics️\n\nCreate your own image classifier for anything by running the demo on Google Colab.\n\nReport any issues with the demo at the github repo.",
"## Example Images",
"#### cucumber\n\n!cucumber",
"#### green beans\n\n!green beans",
"#### okra\n\n!okra",
"#### pickle\n\n!pickle",
"#### zucinni\n\n!zucinni"
] |
[
49,
41,
4,
9,
9,
6,
7,
9
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #vit #image-classification #huggingpics #model-index #autotrain_compatible #endpoints_compatible #region-us \n# greens\n\n\nAutogenerated by HuggingPics️\n\nCreate your own image classifier for anything by running the demo on Google Colab.\n\nReport any issues with the demo at the github repo.## Example Images#### cucumber\n\n!cucumber#### green beans\n\n!green beans#### okra\n\n!okra#### pickle\n\n!pickle#### zucinni\n\n!zucinni"
] |
[
-0.11635752767324448,
0.2100878357887268,
-0.0019205182325094938,
0.11264282464981079,
0.16000820696353912,
-0.011923654936254025,
0.04627173766493797,
0.11192784458398819,
0.15490266680717468,
0.11882397532463074,
0.1360846608877182,
0.26803481578826904,
0.003105561248958111,
0.24046814441680908,
-0.003452843753620982,
-0.19453319907188416,
0.009433458559215069,
0.08580228686332703,
-0.026749953627586365,
0.09770957380533218,
0.048486918210983276,
-0.10396701097488403,
0.17171652615070343,
0.022196536883711815,
-0.2786792814731598,
-0.05457829311490059,
-0.007275596726685762,
-0.06765247881412506,
0.09805488586425781,
0.033723484724760056,
0.04167575016617775,
-0.008563465438783169,
-0.013351226225495338,
-0.03950430452823639,
0.06126425042748451,
0.02076093852519989,
-0.06787922233343124,
0.09255640208721161,
0.07438013702630997,
0.006130722817033529,
0.0816551148891449,
-0.019057009369134903,
-0.04922259598970413,
0.041165806353092194,
-0.1397668868303299,
0.0015068071661517024,
-0.09157951921224594,
0.10328395664691925,
0.05484388768672943,
-0.004405218176543713,
-0.00514615885913372,
0.1052020713686943,
-0.08547013252973557,
0.07930179685354233,
0.2316848635673523,
-0.027728598564863205,
-0.11254797130823135,
-0.0020929703023284674,
0.09647685289382935,
-0.0063411640003323555,
-0.11406410485506058,
0.07426588237285614,
0.06539218872785568,
-0.027159979566931725,
-0.0809357687830925,
-0.09805025160312653,
-0.044050589203834534,
-0.03807659447193146,
-0.12473264336585999,
0.03825422003865242,
0.12635795772075653,
0.08548758924007416,
-0.04703078418970108,
-0.00019699634867720306,
-0.07104043662548065,
-0.018580622971057892,
-0.05859626829624176,
0.01721312664449215,
0.0576641783118248,
-0.027864785864949226,
-0.1191769614815712,
-0.05278189107775688,
-0.11367157846689224,
0.026537420228123665,
-0.05144008249044418,
0.19385652244091034,
0.058906007558107376,
-0.010285882279276848,
0.03381407633423805,
0.00831772480159998,
0.0171359870582819,
-0.06900979578495026,
-0.003007887862622738,
-0.05605049058794975,
0.040310315787792206,
-0.05030025541782379,
0.04512133076786995,
0.1097167432308197,
0.1518222987651825,
0.21219557523727417,
0.029933154582977295,
0.06767010688781738,
0.04224833473563194,
0.03740030899643898,
0.07529989629983902,
0.15044496953487396,
-0.09029722213745117,
-0.054222382605075836,
0.06224728375673294,
-0.013535624369978905,
0.004703660495579243,
-0.011648041196167469,
-0.09507670253515244,
-0.0021599819883704185,
0.04179687798023224,
-0.026623666286468506,
0.11624060571193695,
0.025553910061717033,
-0.13456140458583832,
-0.06959393620491028,
0.13123247027397156,
0.00041544169653207064,
0.000569141295272857,
-0.05514330044388771,
-0.03301272541284561,
0.008784312754869461,
0.09925941377878189,
0.04599250853061676,
-0.006751862820237875,
0.011231094598770142,
-0.14964492619037628,
-0.004404161591082811,
0.028353143483400345,
0.07917053997516632,
0.03934961184859276,
-0.2224729061126709,
0.02879898063838482,
-0.1699332445859909,
0.1113576665520668,
-0.05881914496421814,
0.07005247473716736,
0.0022007522638887167,
-0.1596427857875824,
0.07619782537221909,
0.03558386489748955,
-0.10359690338373184,
0.053118083626031876,
-0.04140721261501312,
-0.010113871656358242,
0.07685626298189163,
0.010965070687234402,
0.08547373861074448,
-0.03750055283308029,
0.008240255527198315,
-0.11989220231771469,
0.029208954423666,
-0.22106623649597168,
0.025066183879971504,
-0.049155671149492264,
0.15498214960098267,
-0.12258883565664291,
-0.005659888498485088,
0.04515305161476135,
0.014376508072018623,
-0.017269743606448174,
0.1559477597475052,
-0.020577071234583855,
-0.10018745064735413,
0.04632958024740219,
-0.04285237565636635,
-0.10828904062509537,
0.1356155276298523,
0.012100095860660076,
0.03075289912521839,
0.11944151669740677,
0.25196948647499084,
-0.011614546179771423,
-0.17285458743572235,
-0.00748411612585187,
-0.054705582559108734,
-0.06327932327985764,
0.04554388299584389,
-0.0038835934828966856,
0.03152933716773987,
-0.10383522510528564,
0.03638296574354172,
-0.07922779768705368,
0.07791205495595932,
-0.09118776023387909,
-0.05287559702992439,
-0.04817755147814751,
-0.11815696954727173,
0.11462966352701187,
0.08075486123561859,
-0.02185448631644249,
0.01747553050518036,
-0.030594119802117348,
-0.18721313774585724,
0.05945127457380295,
-0.0023350482806563377,
-0.007267156150192022,
-0.08976545929908752,
0.21084870398044586,
-0.047005120664834976,
-0.029169319197535515,
-0.07996288686990738,
-0.06726440042257309,
0.017908373847603798,
-0.0012545272475108504,
0.11795030534267426,
-0.02597075141966343,
-0.01924363151192665,
0.022127652540802956,
0.016785969957709312,
-0.014467040076851845,
0.09555909037590027,
-0.04308554157614708,
-0.12454557418823242,
-0.12368044257164001,
0.05199939012527466,
0.014584680087864399,
0.07771936804056168,
-0.13090984523296356,
-0.01751081459224224,
0.09650230407714844,
0.11917147040367126,
0.02184239588677883,
-0.06501228362321854,
0.050874341279268265,
-0.045198339968919754,
-0.029790010303258896,
-0.055474650114774704,
0.08382484316825867,
-0.009668305516242981,
-0.045309048146009445,
0.08802857249975204,
0.013068812899291515,
0.09320628643035889,
0.124160535633564,
-0.12222374975681305,
-0.08574438095092773,
-0.06843411922454834,
-0.06632079929113388,
0.010591955855488777,
0.016026683151721954,
0.07942578196525574,
-0.024025214836001396,
-0.04029904678463936,
0.08683444559574127,
-0.044730931520462036,
0.0067660328932106495,
0.0757073238492012,
-0.10324453562498093,
-0.03126545622944832,
0.10275959968566895,
0.08737454563379288,
-0.10990812629461288,
0.08703701198101044,
0.06776371598243713,
0.06388920545578003,
0.05172749236226082,
0.03656216338276863,
0.04804280027747154,
-0.061127204447984695,
0.04470321908593178,
0.028280772268772125,
0.11868829280138016,
-0.21144241094589233,
-0.01436079852283001,
0.013923466205596924,
-0.13719134032726288,
0.01833501271903515,
-0.1264335960149765,
-0.006130182184278965,
-0.03500112146139145,
0.04066404700279236,
0.23527252674102783,
0.07185547053813934,
-0.06967928260564804,
0.03811202570796013,
0.029258741065859795,
-0.0013432400301098824,
0.007838611491024494,
0.06109777092933655,
0.008196615613996983,
0.21392717957496643,
-0.029100336134433746,
-0.24688075482845306,
-0.02925458736717701,
-0.2760932445526123,
0.0032434542663395405,
0.12043791264295578,
0.10096215456724167,
-0.1366385817527771,
-0.03576236218214035,
0.051566231995821,
0.03202483803033829,
0.11869261413812637,
0.035549309104681015,
-0.11493248492479324,
-0.02173849381506443,
-0.00798766128718853,
-0.03852152079343796,
-0.03935107961297035,
-0.012733270414173603,
-0.02894943580031395,
0.14399906992912292,
-0.06631249934434891,
0.15393014252185822,
0.10081170499324799,
0.03976661339402199,
-0.024275610223412514,
0.042247526347637177,
0.2302694320678711,
-0.14043578505516052,
0.08749876916408539,
0.16449227929115295,
0.0004361486353445798,
0.1000056117773056,
0.06257330626249313,
-0.010705385357141495,
-0.10518252849578857,
0.034107133746147156,
0.04176989942789078,
-0.09001657366752625,
-0.17899812757968903,
-0.08948896825313568,
-0.05195372924208641,
0.14171911776065826,
0.13353370130062103,
0.07246245443820953,
0.1233222708106041,
0.22147126495838165,
-0.019727908074855804,
0.12528841197490692,
-0.031888823956251144,
0.048239730298519135,
0.04448878392577171,
-0.06858046352863312,
0.03855571150779724,
0.051540084183216095,
-0.0890776738524437,
0.12968306243419647,
0.04822568967938423,
0.1337425708770752,
0.043156009167432785,
0.1440775990486145,
0.09016989171504974,
0.15985505282878876,
0.04718843474984169,
-0.10307108610868454,
0.04074094444513321,
0.0006546159274876118,
-0.03302381932735443,
-0.04030882939696312,
-0.051897868514060974,
-0.01067079696804285,
0.08379492908716202,
-0.15089769661426544,
-0.006947601679712534,
0.010305400937795639,
0.009336722083389759,
0.1360633224248886,
-0.03981870040297508,
-0.23741410672664642,
0.01941850408911705,
-0.010861240327358246,
-0.025653749704360962,
-0.05136154964566231,
-0.02408340387046337,
-0.012954272329807281,
-0.1500130295753479,
0.009797651320695877,
-0.08790252357721329,
0.09386308491230011,
-0.11646833270788193,
-0.009058615192770958,
0.035092826932668686,
0.018083298578858376,
0.002819991437718272,
0.016721928492188454,
-0.032739799469709396,
0.04059874266386032,
-0.055910997092723846,
-0.051361676305532455,
-0.08542831242084503,
-0.02993086166679859,
0.06213369220495224,
0.10759551078081131,
0.10979153215885162,
0.022040050476789474,
0.10442750155925751,
-0.19249798357486725,
-0.09202912449836731,
-0.017228255048394203,
0.012693473137915134,
-0.08725447207689285,
-0.007303030230104923,
-0.004925182554870844,
-0.055570732802152634,
-0.055505137890577316,
0.0478643998503685,
-0.09467699378728867,
-0.06951319426298141,
0.056625012308359146,
-0.034943871200084686,
0.12411319464445114,
-0.03829503059387207,
-0.017739806324243546,
-0.08436903357505798,
0.16415642201900482,
0.03982793539762497,
-0.03178524598479271,
-0.12288448959589005,
0.03370215371251106,
0.0599064864218235,
-0.08876281976699829,
0.0927940309047699,
-0.11049623787403107,
0.11298768222332001,
-0.03132946044206619,
-0.02832293137907982,
0.11669784784317017,
-0.06375519186258316,
-0.15412704646587372,
-0.05807836726307869,
0.0733056515455246,
0.09218200296163559,
-0.04259900003671646,
0.04072006419301033,
0.049089398235082626,
-0.11023428291082382,
-0.05882713943719864,
-0.014958196319639683,
-0.05852299928665161,
0.08332758396863937,
0.10982443392276764,
-0.051418643444776535,
-0.021932251751422882,
-0.08046256005764008,
-0.03924662247300148,
0.08430946618318558,
0.13861393928527832,
0.0033604095224291086,
0.07717294245958328,
0.0530344657599926,
-0.04581715166568756,
-0.26180288195610046,
-0.05718105286359787,
0.06915423274040222,
-0.03827465698122978,
0.02800569124519825,
-0.11946510523557663,
0.24322500824928284,
0.14730684459209442,
-0.023072825744748116,
0.21916957199573517,
-0.11696107685565948,
-0.08265146613121033,
0.028547827154397964,
0.08521883189678192,
0.10956640541553497,
-0.2154792696237564,
-0.044999897480010986,
-0.018807921558618546,
-0.002854128135368228,
0.17699532210826874,
-0.0312625914812088,
0.10075751692056656,
-0.0407368503510952,
0.014127104543149471,
0.03205587714910507,
0.007632750552147627,
0.1429344117641449,
0.04701835289597511,
-0.051779866218566895,
-0.12883548438549042,
-0.2813549041748047,
-0.03269674628973007,
-0.0007593706832267344,
-0.07055104523897171,
0.03742237016558647,
0.07412216812372208,
-0.10415393859148026,
-0.005200694780796766,
-0.12608511745929718,
0.13156574964523315,
-0.03958619013428688,
0.004715274088084698,
-0.09545077383518219,
0.07768094539642334,
-0.050960276275873184,
0.123030886054039,
0.20494677126407623,
-0.016766533255577087,
0.06690365821123123,
0.07688862830400467,
-0.01925533637404442,
-0.019863756373524666,
-0.13169459998607635,
-0.10812627524137497,
-0.05085508152842522,
0.029327454045414925,
-0.13129054009914398,
0.01912774331867695,
0.05396217852830887,
0.025113698095083237,
0.0687544122338295,
-0.014397533610463142,
-0.007094565778970718,
0.0376594103872776,
0.10638878494501114,
-0.08941817283630371,
0.007167311850935221,
-0.007412073202431202,
-0.05193842574954033,
-0.061290398240089417,
-0.002308372175320983,
0.1074998527765274,
0.04802411422133446,
-0.08592627197504044,
0.02244805172085762,
0.02710239216685295,
0.0010256597306579351,
0.11994748562574387,
0.21831879019737244,
-0.01662738248705864,
-0.1198083758354187,
0.03263974189758301,
0.10658521950244904,
-0.10875476151704788,
-0.10861189663410187,
0.07611442357301712,
-0.09603466838598251,
-0.09794385731220245,
0.027248680591583252,
0.05617695301771164,
-0.17040620744228363,
0.048083219677209854,
-0.006233518943190575,
-0.020528221502900124,
0.01522987149655819,
-0.0017624718602746725,
0.07821383327245712,
0.030408794060349464,
0.0598599947988987,
-0.001857616240158677,
-0.06303990632295609,
0.021839868277311325,
0.12347951531410217,
0.1349826455116272,
-0.16343405842781067,
-0.08719039708375931,
-0.0330590084195137,
0.18804103136062622,
-0.06320729106664658,
-0.05145874246954918,
-0.16022856533527374,
-0.08636188507080078,
-0.04709254205226898,
0.15043878555297852,
-0.07350648194551468,
0.00902430061250925,
-0.0625896006822586,
-0.0713682770729065,
-0.009119689464569092,
-0.016142837703227997,
-0.0824837014079094,
-0.034115251153707504,
-0.011306987144052982,
0.06796835362911224,
-0.018209384754300117,
-0.04263472929596901,
0.1733701080083847,
-0.055021677166223526,
0.1374966949224472,
-0.005967825651168823,
-0.0008343599620275199,
-0.05451396107673645,
-0.15195561945438385,
0.015303644351661205,
0.11250912398099899,
-0.05115513503551483,
0.0018967270152643323,
-0.02453954704105854,
0.0413147509098053,
-0.05778247490525246,
0.0746782124042511,
-0.03801964595913887,
0.14184318482875824,
-0.19264167547225952,
-0.01325862668454647,
-0.04730696603655815,
-0.13616836071014404,
-0.06987319886684418,
0.023620011284947395,
0.01917218044400215,
-0.004844969138503075,
0.05824563279747963,
-0.020013291388750076,
0.05572417378425598,
-0.11680832505226135,
0.03548303619027138,
-0.03971230611205101,
-0.12363236397504807,
-0.012549210339784622,
-0.039663951843976974,
0.009364081546664238,
0.045932427048683167,
0.033217109739780426,
0.05153655633330345,
0.024840781465172768,
-0.0035953540354967117,
0.1191234141588211,
0.05791314318776131,
0.017640074715018272,
0.06313152611255646,
-0.025699537247419357,
0.03938605636358261,
-0.02541046403348446,
0.02733127772808075,
0.06823215633630753,
-0.19305017590522766,
-0.03025740757584572,
0.06038086861371994,
-0.046914197504520416,
0.0050858003087341785,
-0.03808483108878136,
0.0472017265856266,
-0.09840425848960876,
0.06572547554969788,
-0.12487383931875229,
0.03891739621758461,
-0.019250819459557533,
0.03326975554227829,
0.06028711795806885,
-0.10545898973941803,
-0.01018430758267641,
0.07601877301931381,
-0.06362016499042511,
-0.00930659007281065,
-0.11566326767206192,
-0.09308511018753052,
-0.10235901176929474,
0.06830219179391861,
-0.04080204293131828,
-0.022455230355262756,
0.006121320184320211,
-0.05683372542262077,
-0.08228106796741486,
0.17393484711647034,
0.0012459222925826907,
-0.11279277503490448,
0.0683177188038826,
0.01888522133231163,
-0.10033794492483139,
0.04987645149230957,
-0.10021156817674637,
-0.093793585896492,
0.11148279905319214,
0.03515336290001869,
0.005294006317853928,
-0.004161784425377846,
0.06387285143136978,
-0.0799395740032196,
-0.08186545222997665,
-0.02260262332856655,
-0.016011860221624374,
0.058030497282743454,
-0.06332028657197952,
-0.08313329517841339,
-0.010973901487886906,
0.0031556193716824055,
0.1853770613670349,
0.03384045511484146,
0.09553041309118271,
-0.05415710434317589,
-0.06149674206972122,
-0.08819004148244858,
-0.02255156636238098,
-0.07364969700574875,
-0.04738340154290199,
0.03891449049115181,
0.28970733284950256,
0.05587080493569374,
-0.047341927886009216,
-0.011972584761679173,
0.018670231103897095,
-0.0034410886000841856,
0.07188915461301804,
0.08299750089645386,
-0.015215513296425343,
0.1140531599521637,
-0.10672695189714432,
-0.011693225242197514,
0.03759346902370453,
0.007342055439949036,
-0.18897393345832825,
-0.06005635857582092,
0.04900045692920685,
-0.01071459986269474,
-0.11530261486768723,
0.0771019235253334,
-0.10362477600574493,
-0.12518192827701569,
0.1829146444797516,
-0.13164079189300537,
-0.15595029294490814,
-0.045596472918987274,
0.04199659824371338,
0.05540919303894043,
0.09707461297512054,
-0.012659505009651184,
-0.044131528586149216,
-0.008228492923080921,
0.006368562113493681,
-0.19353662431240082,
0.004679029807448387,
0.021942533552646637,
-0.017364224418997765,
0.19512680172920227,
-0.06143902614712715,
0.013739371672272682,
0.08509092032909393,
0.010240748524665833,
-0.08355776965618134,
-0.10257154703140259,
-0.024957984685897827,
-0.13927555084228516,
0.0699334591627121,
0.006741051096469164,
0.010840504430234432,
-0.1190219596028328,
0.04512733221054077,
0.01728152111172676,
-0.042007312178611755,
0.13446329534053802,
0.13304145634174347,
-0.07755990326404572,
0.12992282211780548,
-0.17481525242328644,
0.1020478755235672,
0.007404840085655451,
-0.05175318196415901,
-0.03404375910758972,
-0.05729566887021065,
0.025074930861592293,
0.09862540662288666,
-0.07638495415449142,
-0.06467418372631073,
-0.17462965846061707,
-0.04367845132946968,
-0.05285812169313431,
-0.05096468701958656,
-0.04809316247701645,
-0.04924866929650307,
-0.1492270529270172,
0.024790430441498756,
-0.07800315320491791,
0.13522569835186005,
0.04796833172440529,
-0.01872979663312435,
0.051338374614715576,
0.04231442138552666,
-0.02473120391368866,
0.08047520369291306,
-0.10030646622180939,
-0.0839252918958664
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.