sha
null | last_modified
null | library_name
stringclasses 154
values | text
stringlengths 1
900k
| metadata
stringlengths 2
348k
| pipeline_tag
stringclasses 45
values | id
stringlengths 5
122
| tags
listlengths 1
1.84k
| created_at
stringlengths 25
25
| arxiv
listlengths 0
201
| languages
listlengths 0
1.83k
| tags_str
stringlengths 17
9.34k
| text_str
stringlengths 0
389k
| text_lists
listlengths 0
722
| processed_texts
listlengths 1
723
| tokens_length
listlengths 1
723
| input_texts
listlengths 1
61
| embeddings
listlengths 768
768
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
null | null |
transformers
|
#Joey DialoGPT Model
|
{"tags": ["conversational"]}
|
text-generation
|
houssaineamzil/DialoGPT-small-joey
|
[
"transformers",
"pytorch",
"gpt2",
"text-generation",
"conversational",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
#Joey DialoGPT Model
|
[] |
[
"TAGS\n#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n"
] |
[
51
] |
[
"passage: TAGS\n#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n"
] |
[
-0.009697278961539268,
0.03208012506365776,
-0.007204889785498381,
0.004809224978089333,
0.16726240515708923,
0.014898733235895634,
0.09765533357858658,
0.13672804832458496,
-0.007841327227652073,
-0.031050153076648712,
0.14490588009357452,
0.20411323010921478,
-0.006439372431486845,
0.0661218985915184,
-0.07572533935308456,
-0.2683109939098358,
0.05759621039032936,
0.046649303287267685,
0.016515716910362244,
0.1200079694390297,
0.08573378622531891,
-0.05473608896136284,
0.08714032918214798,
-0.014583407901227474,
-0.150366872549057,
0.017733458429574966,
0.043394338339567184,
-0.12260226160287857,
0.11910516023635864,
0.05462685227394104,
0.07063519209623337,
0.014929565601050854,
-0.07541623711585999,
-0.1631229966878891,
0.03031250834465027,
0.01425902172923088,
-0.0594632662832737,
0.04757995903491974,
0.059961482882499695,
-0.10165371745824814,
0.10819483548402786,
0.09530027210712433,
-0.013078106567263603,
0.06798283755779266,
-0.16849711537361145,
-0.020869607105851173,
-0.01446688175201416,
0.009899779222905636,
0.05550243332982063,
0.09964893013238907,
-0.03413357585668564,
0.10497362166643143,
-0.09214533120393753,
0.11017382889986038,
0.10932035744190216,
-0.32057443261146545,
-0.005767723545432091,
0.09167823940515518,
0.039358653128147125,
0.07352814823389053,
-0.04467793554067612,
0.06258884817361832,
0.018015462905168533,
0.017986174672842026,
-0.014015024527907372,
-0.07283061742782593,
-0.11612214148044586,
0.04717336222529411,
-0.08668071031570435,
-0.059868961572647095,
0.2244078367948532,
-0.05464440956711769,
0.06881742179393768,
-0.05281897634267807,
-0.10522868484258652,
-0.04308144748210907,
-0.029833965003490448,
0.00475557055324316,
-0.07660607248544693,
0.08692064881324768,
0.00869679357856512,
-0.09547875821590424,
-0.1376667022705078,
-0.02496783249080181,
-0.1776352822780609,
0.16140350699424744,
0.02465328387916088,
0.05232657864689827,
-0.2027255892753601,
0.09623090922832489,
0.017906051129102707,
-0.08045592904090881,
0.022091427817940712,
-0.10046248883008957,
0.029131146147847176,
0.013760408386588097,
-0.04754498973488808,
-0.061387211084365845,
0.0843690037727356,
0.11199145019054413,
-0.01731434464454651,
0.025486016646027565,
-0.039331406354904175,
0.08100687712430954,
0.03553595021367073,
0.09077847748994827,
0.007288969587534666,
-0.028338588774204254,
0.025842782109975815,
-0.13719046115875244,
-0.003647835226729512,
-0.07116208970546722,
-0.16572439670562744,
-0.021088803187012672,
0.02994808368384838,
0.08289173990488052,
0.015449047088623047,
0.11682453751564026,
-0.03272046521306038,
-0.025152435526251793,
0.03602350503206253,
-0.047656361013650894,
-0.012649794109165668,
0.016648368909955025,
0.013163427822291851,
0.12399329990148544,
-0.0022096503525972366,
0.03235051408410072,
-0.13653022050857544,
0.031423524022102356,
-0.06793295592069626,
-0.003740974934771657,
-0.03486552834510803,
-0.040637075901031494,
0.009043924510478973,
-0.06862333416938782,
0.003486064961180091,
-0.15030112862586975,
-0.15063877403736115,
0.007587034720927477,
-0.007836631499230862,
-0.04107699543237686,
-0.06370922178030014,
-0.06952770054340363,
-0.013550350442528725,
0.04251532256603241,
-0.07093454152345657,
-0.011352915316820145,
-0.06403283774852753,
0.11004766076803207,
-0.03197755664587021,
0.07921615242958069,
-0.11953279376029968,
0.08390819281339645,
-0.11260783672332764,
-0.02386913076043129,
-0.060801517218351364,
0.09317506104707718,
-0.0006014376995153725,
0.09549830108880997,
-0.006563255097717047,
-0.017931854352355003,
-0.07981178909540176,
0.06445012241601944,
-0.042872510850429535,
0.21701598167419434,
-0.0615808479487896,
-0.11181682348251343,
0.28781595826148987,
-0.052628401666879654,
-0.1370542049407959,
0.11647392809391022,
0.008682746440172195,
0.05777018144726753,
0.10703510791063309,
0.19733482599258423,
-0.015276194550096989,
0.004040541127324104,
0.09471915662288666,
0.11263324320316315,
-0.11276852339506149,
-0.033160366117954254,
0.013019153848290443,
-0.04081077128648758,
-0.10867965966463089,
0.04689536616206169,
0.09810488671064377,
0.07090286910533905,
-0.04786505550146103,
-0.03377414867281914,
-0.01366397924721241,
0.0052589005790650845,
0.08885077387094498,
-0.007157256826758385,
0.10962837189435959,
-0.05819983780384064,
-0.03796621412038803,
-0.029282379895448685,
-0.012126247398555279,
-0.03951939567923546,
0.03137664496898651,
-0.043376367539167404,
0.10821941494941711,
-0.011204327456653118,
0.06364280730485916,
-0.16185984015464783,
-0.07691477984189987,
-0.017002692446112633,
0.1581239402294159,
0.024538565427064896,
0.09859629720449448,
0.0552486926317215,
-0.040398042649030685,
-0.0012767292791977525,
0.012792680412530899,
0.15581141412258148,
-0.022091681137681007,
-0.065607450902462,
-0.052166227251291275,
0.08642971515655518,
-0.05641226842999458,
0.04504093527793884,
-0.05937713757157326,
0.012367865070700645,
0.05064384639263153,
0.10342344641685486,
-0.00018274025933351368,
0.03323284164071083,
-0.008164864964783192,
0.002145637758076191,
-0.058205123990774155,
0.007405933458358049,
0.10799351334571838,
0.00036868182360194623,
-0.07365862280130386,
0.22074243426322937,
-0.17796069383621216,
0.1765957772731781,
0.1893044263124466,
-0.299345999956131,
0.017949223518371582,
-0.10759581625461578,
-0.04561871662735939,
0.014407722279429436,
0.05567655712366104,
-0.0454222597181797,
0.1703362911939621,
-0.009871348738670349,
0.18874616920948029,
-0.04946064203977585,
-0.04464937001466751,
-0.0200483538210392,
-0.05118836089968681,
-0.0024189651012420654,
0.07781197130680084,
0.10685696452856064,
-0.13992026448249817,
0.1964332014322281,
0.1621224284172058,
0.048237916082143784,
0.19945049285888672,
0.015346456319093704,
-0.011589210480451584,
0.0909530371427536,
0.005220826715230942,
-0.058739423751831055,
-0.07409929484128952,
-0.2594851851463318,
-0.030033592134714127,
0.07992640137672424,
0.0422382652759552,
0.1212305948138237,
-0.11349532753229141,
-0.038956157863140106,
-0.01763172075152397,
-0.023146281018853188,
0.021672505885362625,
0.0914369598031044,
0.06075398623943329,
0.13201528787612915,
-0.001710098935291171,
-0.007300339173525572,
0.10524573177099228,
0.01783694699406624,
-0.09354141354560852,
0.18308524787425995,
-0.13652534782886505,
-0.37097251415252686,
-0.13911493122577667,
-0.18057456612586975,
-0.05449081212282181,
0.05712554603815079,
0.11679314076900482,
-0.12011238187551498,
-0.018752124160528183,
0.01578843593597412,
0.10931742936372757,
-0.08449502289295197,
0.0021454424131661654,
-0.06880278885364532,
0.0321490578353405,
-0.10310184955596924,
-0.09194442629814148,
-0.055416494607925415,
-0.031392451375722885,
-0.08001253753900528,
0.1423761546611786,
-0.10777941346168518,
0.04476889222860336,
0.20262959599494934,
0.04653622955083847,
0.05625178664922714,
-0.044105201959609985,
0.19377262890338898,
-0.11264272034168243,
-0.01661740615963936,
0.19215328991413116,
-0.048360925167798996,
0.07476246356964111,
0.1232115849852562,
-0.006348740309476852,
-0.08765771239995956,
0.03011748194694519,
-0.02085109055042267,
-0.07988511025905609,
-0.23219464719295502,
-0.13938382267951965,
-0.12429051846265793,
0.09477275609970093,
0.028005298227071762,
0.056365787982940674,
0.17219258844852448,
0.06577219814062119,
-0.038416244089603424,
0.006410336587578058,
0.02959546446800232,
0.08237514644861221,
0.23417828977108002,
-0.06035616248846054,
0.1364797055721283,
-0.03420931473374367,
-0.14982740581035614,
0.08169995993375778,
0.0713929831981659,
0.10213395953178406,
0.06678459793329239,
0.0804823637008667,
0.0149586396291852,
0.06188136339187622,
0.1311223804950714,
0.08191446959972382,
0.019586285576224327,
-0.02480296604335308,
-0.03388110175728798,
-0.025523077696561813,
-0.05937909707427025,
0.040128443390131,
0.06589099019765854,
-0.16763372719287872,
-0.039227183908224106,
-0.09338314831256866,
0.09657008945941925,
0.0873042419552803,
0.06609832495450974,
-0.1842060089111328,
-0.008006223477423191,
0.08488986641168594,
-0.03854905813932419,
-0.13727426528930664,
0.09535189718008041,
0.01523482333868742,
-0.15144726634025574,
0.03139317408204079,
-0.04061909019947052,
0.12188644707202911,
-0.07804752141237259,
0.09809603542089462,
-0.08108244836330414,
-0.07448557764291763,
0.02123199962079525,
0.1261177361011505,
-0.30527687072753906,
0.20240111649036407,
-0.0024993624538183212,
-0.06486981362104416,
-0.1243603527545929,
-0.0032166161108762026,
0.002410882618278265,
0.07357452809810638,
0.10519039630889893,
-0.007196315098553896,
0.001897757756523788,
-0.06300821900367737,
-0.01829923689365387,
0.032471053302288055,
0.13080233335494995,
-0.0401318334043026,
-0.021158374845981598,
-0.050194524228572845,
-0.001653497340157628,
-0.03173094615340233,
-0.06934895366430283,
0.02002747356891632,
-0.19509181380271912,
0.08751901984214783,
0.04166261479258537,
0.09648149460554123,
0.029994789510965347,
0.004265148192644119,
-0.09651939570903778,
0.24698667228221893,
-0.07148019969463348,
-0.10072879493236542,
-0.10919588059186935,
-0.046813901513814926,
0.03569883480668068,
-0.05628936365246773,
0.04309194162487984,
-0.0788632407784462,
0.028997479006648064,
-0.06352769583463669,
-0.19235502183437347,
0.12410202622413635,
-0.09027006477117538,
-0.04412810131907463,
-0.02371402643620968,
0.2110891044139862,
-0.05598580464720726,
0.010335659608244896,
0.02930437959730625,
0.01208863127976656,
-0.11645778268575668,
-0.09678568691015244,
0.031018631532788277,
-0.007351789623498917,
0.050603240728378296,
0.041841957718133926,
-0.05915454775094986,
-0.017138581722974777,
-0.052199993282556534,
-0.022926922887563705,
0.3496883809566498,
0.14231905341148376,
-0.043836336582899094,
0.19347235560417175,
0.12347975373268127,
-0.07452994585037231,
-0.3159443140029907,
-0.1066238060593605,
-0.10937739163637161,
-0.04680149629712105,
-0.07012093812227249,
-0.2002030611038208,
0.06474938243627548,
0.00662544509395957,
-0.013415241613984108,
0.12749312818050385,
-0.2561831772327423,
-0.07571036368608475,
0.15906259417533875,
-0.017980827018618584,
0.3745945692062378,
-0.1168576180934906,
-0.10926306992769241,
-0.03950892388820648,
-0.14175476133823395,
0.16968177258968353,
-0.01989765651524067,
0.11221715062856674,
-0.009765521623194218,
0.14388824999332428,
0.05548359826207161,
-0.023479344323277473,
0.08544106781482697,
0.004999885335564613,
-0.03290518373250961,
-0.10304180532693863,
-0.05676887184381485,
0.007092386484146118,
0.02477436140179634,
0.018026655539870262,
-0.041834570467472076,
0.02227151393890381,
-0.11731979995965958,
-0.04657655209302902,
-0.08982590585947037,
0.04431166127324104,
0.03899754583835602,
-0.07325074821710587,
-0.002380647463724017,
-0.07165111601352692,
-0.012272949330508709,
0.022334342822432518,
0.20356793701648712,
-0.08029330521821976,
0.16448934376239777,
0.09239562600851059,
0.12419285625219345,
-0.14376309514045715,
-0.00019283240544609725,
-0.0762530043721199,
-0.05611240118741989,
0.07737895101308823,
-0.09433035552501678,
0.058893077075481415,
0.10901971161365509,
-0.04567738622426987,
0.08828683942556381,
0.10377411544322968,
0.008936077356338501,
0.003213887568563223,
0.10916902124881744,
-0.2667325437068939,
-0.0296600554138422,
-0.07532413303852081,
0.000883326749317348,
0.09092561900615692,
0.08562852442264557,
0.18840822577476501,
0.025361526757478714,
-0.04293036088347435,
-0.002770674182102084,
0.028597986325621605,
-0.039021048694849014,
0.051667019724845886,
0.001123449532315135,
0.01947369985282421,
-0.1530752182006836,
0.072522833943367,
0.01490565575659275,
-0.15215420722961426,
0.021316176280379295,
0.16572684049606323,
-0.11656328290700912,
-0.1283872276544571,
-0.06520111113786697,
0.08313824236392975,
-0.11755692958831787,
-0.01578943058848381,
-0.03279297426342964,
-0.13145680725574493,
0.07992171496152878,
0.12629036605358124,
0.05557859688997269,
0.0972496047616005,
-0.06061713397502899,
-0.020469192415475845,
-0.018721895292401314,
-0.014099318534135818,
-0.012384648434817791,
-0.007667020428925753,
-0.055978111922740936,
0.0590752474963665,
-0.026677248999476433,
0.1425808072090149,
-0.09221141785383224,
-0.1037059873342514,
-0.16142144799232483,
0.0374140702188015,
-0.11013076454401016,
-0.08825794607400894,
-0.08821134269237518,
-0.050188567489385605,
0.002360827289521694,
-0.019856395199894905,
-0.04037635400891304,
-0.05829505994915962,
-0.12300454825162888,
0.0338277705013752,
-0.040771447122097015,
0.024727050215005875,
-0.07512269169092178,
0.015856385231018066,
0.08507686108350754,
-0.03285100311040878,
0.15655414760112762,
0.1450488418340683,
-0.1006515845656395,
0.10741901397705078,
-0.14806775748729706,
-0.09138492494821548,
0.11116421222686768,
0.015329592861235142,
0.0449691042304039,
0.09723787009716034,
0.013362943194806576,
0.0635865181684494,
0.032776717096567154,
0.05308786407113075,
0.027619892731308937,
-0.11959987878799438,
0.06483134627342224,
-0.03626115620136261,
-0.14700546860694885,
-0.049338050186634064,
-0.05282869189977646,
0.01647452637553215,
0.013054544106125832,
0.09622690081596375,
-0.05301849544048309,
0.10698331147432327,
-0.04055701196193695,
0.0346808135509491,
0.017554637044668198,
-0.1730053424835205,
-0.03816922754049301,
-0.08538098633289337,
0.03681723028421402,
0.014741539023816586,
0.25266793370246887,
0.030072299763560295,
0.012416383251547813,
0.032671261578798294,
0.08285367488861084,
0.03899408504366875,
0.010228337720036507,
0.17482228577136993,
0.1162426546216011,
-0.06621865928173065,
-0.10445023328065872,
0.0729617029428482,
0.016332454979419708,
0.01286179106682539,
0.13617953658103943,
0.008365051820874214,
0.005795429926365614,
0.08649782836437225,
-0.016865963116288185,
0.009968153201043606,
-0.10052056610584259,
-0.13426925241947174,
-0.022176474332809448,
0.05151832848787308,
-0.04655967652797699,
0.11727844923734665,
0.1406494379043579,
-0.01806013658642769,
0.03222079202532768,
-0.021771740168333054,
-0.05699979141354561,
-0.1683429479598999,
-0.1429590880870819,
-0.06883849948644638,
-0.13416796922683716,
0.00897989235818386,
-0.11180389672517776,
0.05395037308335304,
0.06001098081469536,
0.06750501692295074,
-0.06899319589138031,
0.10220931470394135,
0.04626858979463577,
-0.11440542340278625,
0.06264589726924896,
-0.0296088308095932,
0.09430401772260666,
-0.02759445086121559,
-0.019505485892295837,
-0.09039592742919922,
0.014574515633285046,
0.011419114656746387,
0.06245238706469536,
-0.04707273095846176,
0.007463190704584122,
-0.14696238934993744,
-0.08972041308879852,
-0.0523175448179245,
0.0718572810292244,
-0.050409089773893356,
0.14282815158367157,
0.00775480642914772,
-0.0170906875282526,
0.039554283022880554,
0.22787313163280487,
-0.07476283609867096,
-0.04778539761900902,
-0.05269690603017807,
0.20717895030975342,
0.02975541539490223,
0.1171872541308403,
-0.022938819602131844,
-0.006106364540755749,
-0.0919521227478981,
0.3764844834804535,
0.30030161142349243,
-0.09031439572572708,
0.011794124729931355,
0.02137952297925949,
0.04502861574292183,
0.1316293478012085,
0.1216534823179245,
0.10318691283464432,
0.3006802201271057,
-0.07452366501092911,
-0.04653361067175865,
-0.012629742734134197,
-0.023858042433857918,
-0.09059546142816544,
0.1021224707365036,
0.04839762672781944,
-0.06382183730602264,
-0.03313443064689636,
0.0954432487487793,
-0.25862133502960205,
0.1277991235256195,
-0.12311873584985733,
-0.17578600347042084,
-0.06654827296733856,
0.009760108776390553,
0.10465722531080246,
0.015642458572983742,
0.0946015790104866,
0.007128213066607714,
-0.11252258718013763,
0.06305865943431854,
0.03397420793771744,
-0.22762253880500793,
0.0006893770187161863,
0.06642123311758041,
-0.07006710022687912,
-0.0024247700348496437,
-0.026499588042497635,
0.05657242611050606,
0.0656052976846695,
0.054629553109407425,
-0.00971333310008049,
0.03816632181406021,
0.0034184439573436975,
-0.0585215799510479,
0.016623929142951965,
0.05121519789099693,
0.02472509816288948,
-0.09763528406620026,
0.06927435845136642,
-0.1574270874261856,
0.04766253009438515,
-0.0030655991286039352,
-0.04124255105853081,
0.006064958870410919,
0.008823691867291927,
-0.06491616368293762,
0.05165379121899605,
0.07916834205389023,
-0.0016257909592241049,
-0.0062433634884655476,
-0.057178743183612823,
-0.02632102556526661,
-0.027755750343203545,
-0.09291748702526093,
-0.10495562851428986,
-0.14682936668395996,
-0.11640441417694092,
0.09368976950645447,
-0.01011267676949501,
-0.1848134547472,
0.022154374048113823,
-0.08606051653623581,
0.08319322764873505,
-0.1670055389404297,
0.08040720224380493,
0.07041648775339127,
0.013038921169936657,
-0.0031511052511632442,
-0.02002427540719509,
0.054132770746946335,
0.086809903383255,
-0.10407156497240067,
-0.07400695979595184
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# wav2vec2-base-timit-demo-colab
This model is a fine-tuned version of [facebook/wav2vec2-base](https://huggingface.co/facebook/wav2vec2-base) on the None dataset.
It achieves the following results on the evaluation set:
- Loss: 0.4241
- Wer: 0.3381
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 0.0001
- train_batch_size: 32
- eval_batch_size: 8
- seed: 42
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- lr_scheduler_warmup_steps: 1000
- num_epochs: 30
- mixed_precision_training: Native AMP
### Training results
| Training Loss | Epoch | Step | Validation Loss | Wer |
|:-------------:|:-----:|:----:|:---------------:|:------:|
| 3.7749 | 4.0 | 500 | 2.0639 | 1.0018 |
| 0.9252 | 8.0 | 1000 | 0.4853 | 0.4821 |
| 0.3076 | 12.0 | 1500 | 0.4507 | 0.4044 |
| 0.1732 | 16.0 | 2000 | 0.4315 | 0.3688 |
| 0.1269 | 20.0 | 2500 | 0.4481 | 0.3559 |
| 0.1087 | 24.0 | 3000 | 0.4354 | 0.3464 |
| 0.0832 | 28.0 | 3500 | 0.4241 | 0.3381 |
### Framework versions
- Transformers 4.11.3
- Pytorch 1.10.0+cu111
- Datasets 1.13.3
- Tokenizers 0.10.3
|
{"license": "apache-2.0", "tags": ["generated_from_trainer"], "model-index": [{"name": "wav2vec2-base-timit-demo-colab", "results": []}]}
|
automatic-speech-recognition
|
hrdipto/wav2vec2-base-timit-demo-colab
|
[
"transformers",
"pytorch",
"tensorboard",
"wav2vec2",
"automatic-speech-recognition",
"generated_from_trainer",
"license:apache-2.0",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tensorboard #wav2vec2 #automatic-speech-recognition #generated_from_trainer #license-apache-2.0 #endpoints_compatible #region-us
|
wav2vec2-base-timit-demo-colab
==============================
This model is a fine-tuned version of facebook/wav2vec2-base on the None dataset.
It achieves the following results on the evaluation set:
* Loss: 0.4241
* Wer: 0.3381
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 0.0001
* train\_batch\_size: 32
* eval\_batch\_size: 8
* seed: 42
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* lr\_scheduler\_warmup\_steps: 1000
* num\_epochs: 30
* mixed\_precision\_training: Native AMP
### Training results
### Framework versions
* Transformers 4.11.3
* Pytorch 1.10.0+cu111
* Datasets 1.13.3
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0001\n* train\\_batch\\_size: 32\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_steps: 1000\n* num\\_epochs: 30\n* mixed\\_precision\\_training: Native AMP",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.10.0+cu111\n* Datasets 1.13.3\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #wav2vec2 #automatic-speech-recognition #generated_from_trainer #license-apache-2.0 #endpoints_compatible #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0001\n* train\\_batch\\_size: 32\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_steps: 1000\n* num\\_epochs: 30\n* mixed\\_precision\\_training: Native AMP",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.10.0+cu111\n* Datasets 1.13.3\n* Tokenizers 0.10.3"
] |
[
56,
130,
4,
33
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #wav2vec2 #automatic-speech-recognition #generated_from_trainer #license-apache-2.0 #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0001\n* train\\_batch\\_size: 32\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_steps: 1000\n* num\\_epochs: 30\n* mixed\\_precision\\_training: Native AMP### Training results### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.10.0+cu111\n* Datasets 1.13.3\n* Tokenizers 0.10.3"
] |
[
-0.10822959244251251,
0.099675752222538,
-0.003300065640360117,
0.06340761482715607,
0.10860926657915115,
-0.020167825743556023,
0.1288769543170929,
0.15049001574516296,
-0.09271349757909775,
0.07457399368286133,
0.12636904418468475,
0.1505885273218155,
0.04232662543654442,
0.1459311991930008,
-0.05005314201116562,
-0.2829117476940155,
0.046162717044353485,
0.0348406657576561,
-0.0121766971424222,
0.12717968225479126,
0.08421128243207932,
-0.12457819283008575,
0.057947319000959396,
0.034365277737379074,
-0.1584104299545288,
-0.003970644436776638,
-0.005117249675095081,
-0.10462873429059982,
0.12317385524511337,
0.006251727696508169,
0.07064329087734222,
0.04838201776146889,
0.06631770730018616,
-0.2193475365638733,
0.006690362934023142,
0.043937861919403076,
0.028387419879436493,
0.07415802031755447,
0.0581994503736496,
-0.02945130504667759,
0.10399823635816574,
-0.07501231133937836,
0.08020306378602982,
0.03743743896484375,
-0.10571174323558807,
-0.29113149642944336,
-0.08603336662054062,
0.04763360694050789,
0.06857916712760925,
0.08891522884368896,
-0.012067495845258236,
0.144022136926651,
-0.05461663380265236,
0.11053165048360825,
0.28164494037628174,
-0.31383398175239563,
-0.04501998424530029,
-0.03997642174363136,
0.05645865947008133,
0.060465965420007706,
-0.09994802623987198,
-0.017910847440361977,
0.015012132935225964,
0.044832006096839905,
0.13800188899040222,
-0.016268642619252205,
-0.05933629348874092,
-0.006875182036310434,
-0.149040088057518,
-0.060275666415691376,
0.11524058878421783,
0.022648070007562637,
-0.039792802184820175,
-0.09874942153692245,
-0.055090054869651794,
-0.21280622482299805,
-0.06727685779333115,
-0.01608140580356121,
0.04322221875190735,
-0.0424761027097702,
-0.10445226728916168,
-0.011462483555078506,
-0.067214734852314,
-0.07468024641275406,
-0.04020216315984726,
0.19049659371376038,
0.0569683313369751,
-0.0015599278267472982,
-0.03913375735282898,
0.07671435922384262,
-0.02081478387117386,
-0.13849472999572754,
-0.024057583883404732,
0.036882609128952026,
-0.020599735900759697,
-0.01540920790284872,
-0.041751619428396225,
-0.059525419026613235,
0.02147202007472515,
0.16159066557884216,
-0.10229084640741348,
0.09610845148563385,
-0.02040909230709076,
0.03964505344629288,
-0.1023506298661232,
0.20751461386680603,
-0.04149479418992996,
0.017300888895988464,
-0.01036039274185896,
0.055753905326128006,
0.029493195936083794,
-0.026112813502550125,
-0.0944448858499527,
0.03134589642286301,
0.1209908202290535,
0.04713206738233566,
-0.04747193679213524,
0.06453514844179153,
-0.034078627824783325,
-0.00976975541561842,
0.0015425614546984434,
-0.1116979643702507,
0.036166802048683167,
0.019734438508749008,
-0.06563939899206161,
0.004243024159222841,
0.014517679810523987,
0.007364774588495493,
-0.054604124277830124,
0.08333495259284973,
-0.06161367520689964,
0.03338611125946045,
-0.05673642084002495,
-0.1255759745836258,
0.0254832673817873,
-0.11468040943145752,
-0.003398764180019498,
-0.09991598129272461,
-0.10067108273506165,
-0.011766123585402966,
0.03731279447674751,
-0.03822978585958481,
-0.02582731656730175,
-0.07831884920597076,
-0.0903376117348671,
0.045774877071380615,
-0.03446253389120102,
0.07107189297676086,
-0.07455966621637344,
0.09409195184707642,
0.03365432471036911,
0.08763306587934494,
-0.01564944162964821,
0.06029713898897171,
-0.07134567946195602,
0.026744363829493523,
-0.19970214366912842,
0.07492507249116898,
-0.08829209953546524,
0.05765917897224426,
-0.12506166100502014,
-0.11516561359167099,
0.02212962694466114,
-0.007345497142523527,
0.09889665991067886,
0.0976170003414154,
-0.17107638716697693,
-0.08861831575632095,
0.20791228115558624,
-0.08212082087993622,
-0.08377639949321747,
0.12448340654373169,
-0.02486608363687992,
-0.00034487590892240405,
0.05570755526423454,
0.25771892070770264,
0.04567098990082741,
-0.12561871111392975,
0.007944315671920776,
-0.040438469499349594,
0.0426238514482975,
-0.035683345049619675,
0.058901671320199966,
-0.028174052014946938,
0.06841765344142914,
0.01783875562250614,
-0.004300459288060665,
0.0377449095249176,
-0.08730132132768631,
-0.0771728903055191,
-0.043716900050640106,
-0.07817266881465912,
0.029336441308259964,
0.032532043755054474,
0.06398753076791763,
-0.11690137535333633,
-0.10784720629453659,
0.03895878419280052,
0.0814940482378006,
-0.10364940762519836,
0.07184524834156036,
-0.1202312484383583,
0.08338981866836548,
-0.01493844948709011,
-0.005216938443481922,
-0.19063900411128998,
0.03534865006804466,
0.03775133937597275,
-0.028579330071806908,
0.04033041372895241,
-0.06452071666717529,
0.07755736261606216,
0.045356228947639465,
-0.026059629395604134,
-0.04673822969198227,
-0.009306485764682293,
0.010259725153446198,
-0.08931370079517365,
-0.20704664289951324,
-0.03785887360572815,
-0.038044244050979614,
0.07835710793733597,
-0.13819026947021484,
0.034040216356515884,
0.07705976814031601,
0.0922568067908287,
0.032501887530088425,
-0.03155825659632683,
-0.0013533032033592463,
0.08992743492126465,
-0.020763428881764412,
-0.06439613550901413,
0.05805477127432823,
0.020028982311487198,
-0.08660950511693954,
0.03891601413488388,
-0.14935077726840973,
0.12675049901008606,
0.14704614877700806,
-0.015051227062940598,
-0.06689473241567612,
0.00010667734750313684,
-0.04766694828867912,
-0.03477296233177185,
-0.0042805140838027,
0.03377611190080643,
0.2151905596256256,
0.013937880285084248,
0.14332830905914307,
-0.0892372876405716,
-0.04220341518521309,
0.04966939240694046,
-0.02212832309305668,
-0.0064864978194236755,
0.11720538139343262,
0.0451214499771595,
-0.05501340702176094,
0.11844924837350845,
0.0907815545797348,
-0.0788188949227333,
0.12142251431941986,
-0.06029483675956726,
-0.07461198419332504,
-0.020842645317316055,
0.005617763847112656,
0.023748908191919327,
0.09859650582075119,
-0.16244098544120789,
-0.039806708693504333,
0.025940977036952972,
0.025764435529708862,
0.020472196862101555,
-0.20870044827461243,
0.014138329774141312,
0.02901417203247547,
-0.08571688830852509,
-0.04336029291152954,
0.0030441186390817165,
0.012708943337202072,
0.09419949352741241,
0.01257222518324852,
-0.0939040407538414,
0.01075243204832077,
0.003870375920087099,
-0.07392288744449615,
0.1760009527206421,
-0.11667042225599289,
-0.17668895423412323,
-0.10546509921550751,
-0.09277024120092392,
-0.03984987363219261,
-0.002946222200989723,
0.08907544612884521,
-0.09253612160682678,
-0.03951948508620262,
-0.08322479575872421,
-0.015800848603248596,
-0.02584817260503769,
0.041999366134405136,
0.0313355028629303,
-0.011593472212553024,
0.06448721885681152,
-0.11675503849983215,
-0.021844986826181412,
-0.0398770235478878,
-0.0008108904585242271,
0.05417420715093613,
0.03741366043686867,
0.10862545669078827,
0.15839046239852905,
-0.01037275604903698,
0.050479814410209656,
-0.0457041934132576,
0.18834930658340454,
-0.07471095770597458,
-0.03741134703159332,
0.11121487617492676,
-0.0058354721404612064,
0.06876740604639053,
0.11724447458982468,
0.048488009721040726,
-0.09788484871387482,
-0.012771572917699814,
0.004045606590807438,
-0.04586487263441086,
-0.21520774066448212,
-0.03567230701446533,
-0.04488169774413109,
-0.0015765558928251266,
0.10597339272499084,
0.04105941206216812,
0.03757038712501526,
0.021633010357618332,
0.03250035271048546,
0.0055378032848238945,
0.0024906140752136707,
0.09663364291191101,
0.1290869563817978,
0.040204159915447235,
0.13291816413402557,
-0.03813957795500755,
-0.03726104274392128,
0.030234666541218758,
0.00462446128949523,
0.23055092990398407,
0.019664591178297997,
0.19055898487567902,
0.056628961116075516,
0.17497165501117706,
0.04161965847015381,
0.06674608588218689,
-0.001665950519964099,
-0.011428255587816238,
0.011377641931176186,
-0.05277388170361519,
-0.039488013833761215,
0.024215510115027428,
0.024078506976366043,
0.010328367352485657,
-0.11433999240398407,
-0.011104782111942768,
0.046694785356521606,
0.35245031118392944,
0.028211859986186028,
-0.33761468529701233,
-0.09064370393753052,
-0.012201257050037384,
-0.08551396429538727,
-0.030578618869185448,
0.04586395248770714,
0.08793317526578903,
-0.08076810091733932,
0.06415379047393799,
-0.062390632927417755,
0.08992937952280045,
-0.0642600953578949,
0.03401235491037369,
0.03723759949207306,
0.07146970927715302,
0.004128440748900175,
0.03326454013586044,
-0.29203230142593384,
0.28165560960769653,
0.005191357806324959,
0.07652265578508377,
-0.06112175062298775,
0.008107251487672329,
0.025618722662329674,
0.01830456405878067,
0.08772759884595871,
-0.025723259896039963,
-0.11981545388698578,
-0.17462708055973053,
-0.09302173554897308,
0.011321182362735271,
0.12884265184402466,
0.01404081005603075,
0.11067666113376617,
-0.011263678781688213,
-0.016661478206515312,
0.049431778490543365,
-0.09618551284074783,
-0.06534599512815475,
-0.09206702560186386,
0.011860211379826069,
0.08234149217605591,
0.03347118943929672,
-0.07286433130502701,
-0.10325606167316437,
-0.08850222080945969,
0.14942961931228638,
-0.05208592489361763,
-0.042645301669836044,
-0.11885630339384079,
0.008311794139444828,
0.109124094247818,
-0.07936578243970871,
0.06090658903121948,
0.009680752642452717,
0.10459772497415543,
0.011390188708901405,
-0.06779034435749054,
0.11945819109678268,
-0.06419113278388977,
-0.16671337187290192,
-0.028847509995102882,
0.14494214951992035,
0.03056386671960354,
0.060433026403188705,
-0.008058210834860802,
0.038120876997709274,
-0.021853651851415634,
-0.0774228423833847,
0.0406605489552021,
0.026499440893530846,
0.0439123660326004,
-0.013164152391254902,
-0.018967239186167717,
-0.006070209201425314,
-0.09074874222278595,
-0.01814614050090313,
0.2064867615699768,
0.24344108998775482,
-0.09640686959028244,
0.09291441738605499,
0.06943506747484207,
-0.042097147554159164,
-0.17234089970588684,
-0.0038790483959019184,
0.06509050726890564,
0.000005351470463210717,
-0.0248651634901762,
-0.1938454508781433,
0.023908907547593117,
0.06926876306533813,
-0.020998604595661163,
0.08171622455120087,
-0.3183232247829437,
-0.1406307816505432,
0.1374066323041916,
0.11396436393260956,
0.059524938464164734,
-0.14593273401260376,
-0.05537234991788864,
-0.010357857681810856,
-0.1036871075630188,
0.09447412192821503,
-0.07449747622013092,
0.1356905996799469,
-0.02407083474099636,
0.09048546850681305,
0.011327960528433323,
-0.05825302377343178,
0.10642484575510025,
0.012443309649825096,
0.059944190084934235,
-0.045728690922260284,
0.017388567328453064,
0.04785845801234245,
-0.06322921067476273,
0.055156588554382324,
-0.08024109899997711,
0.02839946746826172,
-0.08033619076013565,
-0.03248301148414612,
-0.08508959412574768,
0.01420549862086773,
-0.009605566039681435,
-0.0333847776055336,
-0.037120092660188675,
0.0018844814039766788,
0.06282699108123779,
-0.010366815142333508,
0.15573710203170776,
-0.027310438454151154,
0.12642912566661835,
0.16214096546173096,
0.10141889750957489,
-0.10404428839683533,
-0.07683391124010086,
0.005353863351047039,
-0.03425366058945656,
0.05507161468267441,
-0.11772949248552322,
0.0374416708946228,
0.1360854059457779,
0.031792279332876205,
0.1228531077504158,
0.06948218494653702,
-0.06524974852800369,
0.03323432430624962,
0.04207287356257439,
-0.13784939050674438,
-0.12749193608760834,
0.013279353268444538,
0.02333078719675541,
-0.07195265591144562,
0.07305441796779633,
0.11555314809083939,
-0.055095698684453964,
-0.013801833614706993,
-0.0019095407333225012,
0.013798229396343231,
-0.04101138189435005,
0.19526535272598267,
0.03678850829601288,
0.06154259294271469,
-0.1245705634355545,
0.08053390681743622,
0.038583576679229736,
-0.1331944614648819,
0.060929812490940094,
0.10616770386695862,
-0.09484384208917618,
-0.02851886674761772,
0.028711074963212013,
0.11185205727815628,
-0.028263479471206665,
-0.07390765845775604,
-0.14269445836544037,
-0.1429070234298706,
0.10887688398361206,
0.20547187328338623,
0.056251514703035355,
0.016643211245536804,
-0.05918126553297043,
0.016913002356886864,
-0.11840061843395233,
0.06926038861274719,
0.04077918455004692,
0.06004178896546364,
-0.1290147453546524,
0.14634470641613007,
0.01732582412660122,
0.03992059826850891,
-0.014602077193558216,
-0.011380162090063095,
-0.11204449087381363,
0.03977004438638687,
-0.12899863719940186,
0.004968761000782251,
-0.06649181246757507,
0.0010107652051374316,
0.003637960646301508,
-0.04961981624364853,
-0.06380630284547806,
0.034933269023895264,
-0.11994827538728714,
-0.023454628884792328,
0.0013668711762875319,
0.03702240437269211,
-0.12869490683078766,
-0.00937681831419468,
0.01491378154605627,
-0.09351558983325958,
0.09738873690366745,
0.08695000410079956,
-0.03262457251548767,
0.05093376338481903,
-0.060065679252147675,
-0.026180030778050423,
0.07850224524736404,
-0.006546197924762964,
0.05116262659430504,
-0.13098447024822235,
-0.019763074815273285,
0.011079980991780758,
0.034322094172239304,
0.024183884263038635,
0.11216950416564941,
-0.11596840620040894,
0.0009172951686196029,
-0.027726253494620323,
-0.05208310857415199,
-0.06831369549036026,
0.05034910887479782,
0.10944218933582306,
0.027158264070749283,
0.16378004848957062,
-0.09329521656036377,
0.02864367887377739,
-0.1659409999847412,
0.006244651973247528,
-0.015402473509311676,
-0.12141422927379608,
-0.05091831088066101,
-0.031923726201057434,
0.07782353460788727,
-0.06372612714767456,
0.12926429510116577,
-0.0302314143627882,
0.02521517500281334,
0.03747618943452835,
-0.07651915401220322,
-0.05347057059407234,
0.039878156036138535,
0.20521073043346405,
0.038992080837488174,
-0.04332895576953888,
0.0748397707939148,
0.020881792530417442,
0.08104509860277176,
0.12795478105545044,
0.17392674088478088,
0.16054309904575348,
0.06415445357561111,
0.11675389856100082,
0.0548175610601902,
-0.05325957387685776,
-0.17404964566230774,
0.09129635989665985,
-0.05973295867443085,
0.1303301602602005,
-0.013782957568764687,
0.2406129240989685,
0.12073571979999542,
-0.15380768477916718,
0.06590574234724045,
-0.019002273678779602,
-0.08930869400501251,
-0.11625064164400101,
-0.0640975832939148,
-0.08643919974565506,
-0.17592790722846985,
0.009026954881846905,
-0.10206138342618942,
0.06300023943185806,
0.046582844108343124,
0.037413351237773895,
0.016993701457977295,
0.1380058079957962,
0.015221303328871727,
0.0026881019584834576,
0.09175070375204086,
-0.003382439725100994,
-0.055894702672958374,
-0.07345172762870789,
-0.0844438374042511,
0.03444278612732887,
-0.013464136980473995,
0.0579255074262619,
-0.0041413637809455395,
-0.06932219862937927,
0.04745379090309143,
-0.038733821362257004,
-0.09639431536197662,
0.023092305287718773,
0.02144113928079605,
0.06993499398231506,
0.050396792590618134,
0.03458376228809357,
-0.041390322148799896,
-0.0023561420384794474,
0.19505612552165985,
-0.09454663842916489,
-0.09351488947868347,
-0.10949129611253738,
0.25379374623298645,
0.039379071444272995,
-0.015554843470454216,
0.02151809260249138,
-0.060560062527656555,
-0.03180092200636864,
0.2114194929599762,
0.1723226010799408,
-0.01116170920431614,
0.004614291246980429,
-0.01414461899548769,
-0.006181462202221155,
-0.03659471869468689,
0.07935505360364914,
0.14721040427684784,
0.0624801442027092,
-0.06336896121501923,
-0.051964882761240005,
-0.05117638781666756,
-0.03481784462928772,
-0.06592334061861038,
0.07547760754823685,
0.006828696001321077,
-0.025172237306833267,
-0.044893521815538406,
0.06380100548267365,
-0.09479472041130066,
-0.08201537281274796,
0.024797851219773293,
-0.19570329785346985,
-0.14996619522571564,
0.006833694875240326,
0.07076682895421982,
0.011772987432777882,
0.034874558448791504,
0.003135041566565633,
-0.009663884527981281,
0.08166079223155975,
-0.0014469854068011045,
-0.08074266463518143,
-0.06594680994749069,
0.08451119065284729,
-0.1334533542394638,
0.1663215011358261,
-0.04209939017891884,
0.04780808091163635,
0.12325333803892136,
0.08858786523342133,
-0.08054462820291519,
0.08672730624675751,
0.04238315671682358,
-0.10697498172521591,
0.021263642236590385,
0.1536252200603485,
-0.033488329499959946,
0.09508569538593292,
0.030688641592860222,
-0.11497800052165985,
0.014703071676194668,
-0.08972270041704178,
-0.03808770328760147,
-0.04114031791687012,
-0.050166599452495575,
-0.044312071055173874,
0.10966888070106506,
0.1632404923439026,
-0.04387403652071953,
0.003933595027774572,
-0.05213035270571709,
0.011972117237746716,
0.04762331768870354,
-0.0004025105736218393,
-0.061575230211019516,
-0.27876561880111694,
0.011589550413191319,
0.036713045090436935,
0.0030818863306194544,
-0.2576640844345093,
-0.09719633311033249,
0.013703498058021069,
-0.04294035583734512,
-0.08798902481794357,
0.08574584126472473,
0.07478064298629761,
0.04632873460650444,
-0.0524776466190815,
-0.057823486626148224,
-0.03551657870411873,
0.18890078365802765,
-0.1751941740512848,
-0.05986809358000755
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# wav2vec2-xls-r-300m-bangla-command-generated-data-finetune
This model is a fine-tuned version of [hrdipto/wav2vec2-xls-r-300m-bangla-command-data](https://huggingface.co/hrdipto/wav2vec2-xls-r-300m-bangla-command-data) on the None dataset.
It achieves the following results on the evaluation set:
- eval_loss: 0.0099
- eval_wer: 0.0208
- eval_runtime: 2.5526
- eval_samples_per_second: 75.217
- eval_steps_per_second: 9.402
- epoch: 71.43
- step: 2000
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 0.0001
- train_batch_size: 32
- eval_batch_size: 8
- seed: 42
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- lr_scheduler_warmup_steps: 1000
- num_epochs: 100
- mixed_precision_training: Native AMP
### Framework versions
- Transformers 4.16.2
- Pytorch 1.10.0+cu111
- Datasets 1.18.3
- Tokenizers 0.11.0
|
{"tags": ["generated_from_trainer"], "model-index": [{"name": "wav2vec2-xls-r-300m-bangla-command-generated-data-finetune", "results": []}]}
|
automatic-speech-recognition
|
hrdipto/wav2vec2-xls-r-300m-bangla-command-generated-data-finetune
|
[
"transformers",
"pytorch",
"tensorboard",
"wav2vec2",
"automatic-speech-recognition",
"generated_from_trainer",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tensorboard #wav2vec2 #automatic-speech-recognition #generated_from_trainer #endpoints_compatible #region-us
|
# wav2vec2-xls-r-300m-bangla-command-generated-data-finetune
This model is a fine-tuned version of hrdipto/wav2vec2-xls-r-300m-bangla-command-data on the None dataset.
It achieves the following results on the evaluation set:
- eval_loss: 0.0099
- eval_wer: 0.0208
- eval_runtime: 2.5526
- eval_samples_per_second: 75.217
- eval_steps_per_second: 9.402
- epoch: 71.43
- step: 2000
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 0.0001
- train_batch_size: 32
- eval_batch_size: 8
- seed: 42
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- lr_scheduler_warmup_steps: 1000
- num_epochs: 100
- mixed_precision_training: Native AMP
### Framework versions
- Transformers 4.16.2
- Pytorch 1.10.0+cu111
- Datasets 1.18.3
- Tokenizers 0.11.0
|
[
"# wav2vec2-xls-r-300m-bangla-command-generated-data-finetune\n\nThis model is a fine-tuned version of hrdipto/wav2vec2-xls-r-300m-bangla-command-data on the None dataset.\nIt achieves the following results on the evaluation set:\n- eval_loss: 0.0099\n- eval_wer: 0.0208\n- eval_runtime: 2.5526\n- eval_samples_per_second: 75.217\n- eval_steps_per_second: 9.402\n- epoch: 71.43\n- step: 2000",
"## Model description\n\nMore information needed",
"## Intended uses & limitations\n\nMore information needed",
"## Training and evaluation data\n\nMore information needed",
"## Training procedure",
"### Training hyperparameters\n\nThe following hyperparameters were used during training:\n- learning_rate: 0.0001\n- train_batch_size: 32\n- eval_batch_size: 8\n- seed: 42\n- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n- lr_scheduler_type: linear\n- lr_scheduler_warmup_steps: 1000\n- num_epochs: 100\n- mixed_precision_training: Native AMP",
"### Framework versions\n\n- Transformers 4.16.2\n- Pytorch 1.10.0+cu111\n- Datasets 1.18.3\n- Tokenizers 0.11.0"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #wav2vec2 #automatic-speech-recognition #generated_from_trainer #endpoints_compatible #region-us \n",
"# wav2vec2-xls-r-300m-bangla-command-generated-data-finetune\n\nThis model is a fine-tuned version of hrdipto/wav2vec2-xls-r-300m-bangla-command-data on the None dataset.\nIt achieves the following results on the evaluation set:\n- eval_loss: 0.0099\n- eval_wer: 0.0208\n- eval_runtime: 2.5526\n- eval_samples_per_second: 75.217\n- eval_steps_per_second: 9.402\n- epoch: 71.43\n- step: 2000",
"## Model description\n\nMore information needed",
"## Intended uses & limitations\n\nMore information needed",
"## Training and evaluation data\n\nMore information needed",
"## Training procedure",
"### Training hyperparameters\n\nThe following hyperparameters were used during training:\n- learning_rate: 0.0001\n- train_batch_size: 32\n- eval_batch_size: 8\n- seed: 42\n- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n- lr_scheduler_type: linear\n- lr_scheduler_warmup_steps: 1000\n- num_epochs: 100\n- mixed_precision_training: Native AMP",
"### Framework versions\n\n- Transformers 4.16.2\n- Pytorch 1.10.0+cu111\n- Datasets 1.18.3\n- Tokenizers 0.11.0"
] |
[
48,
145,
6,
12,
8,
3,
117,
35
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #wav2vec2 #automatic-speech-recognition #generated_from_trainer #endpoints_compatible #region-us \n# wav2vec2-xls-r-300m-bangla-command-generated-data-finetune\n\nThis model is a fine-tuned version of hrdipto/wav2vec2-xls-r-300m-bangla-command-data on the None dataset.\nIt achieves the following results on the evaluation set:\n- eval_loss: 0.0099\n- eval_wer: 0.0208\n- eval_runtime: 2.5526\n- eval_samples_per_second: 75.217\n- eval_steps_per_second: 9.402\n- epoch: 71.43\n- step: 2000## Model description\n\nMore information needed## Intended uses & limitations\n\nMore information needed## Training and evaluation data\n\nMore information needed## Training procedure### Training hyperparameters\n\nThe following hyperparameters were used during training:\n- learning_rate: 0.0001\n- train_batch_size: 32\n- eval_batch_size: 8\n- seed: 42\n- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n- lr_scheduler_type: linear\n- lr_scheduler_warmup_steps: 1000\n- num_epochs: 100\n- mixed_precision_training: Native AMP### Framework versions\n\n- Transformers 4.16.2\n- Pytorch 1.10.0+cu111\n- Datasets 1.18.3\n- Tokenizers 0.11.0"
] |
[
-0.098066046833992,
0.1494089663028717,
-0.003268929896876216,
0.06826438754796982,
0.13111427426338196,
0.022174298763275146,
0.04021425545215607,
0.1452433168888092,
-0.07391206175088882,
0.09603581577539444,
0.059002719819545746,
0.03360562399029732,
0.08463030308485031,
0.10883461683988571,
-0.011994152329862118,
-0.2062615007162094,
-0.011920780874788761,
-0.02238147146999836,
-0.02677474543452263,
0.09471409767866135,
0.11334613710641861,
-0.08197590708732605,
0.046680815517902374,
-0.0002164313627872616,
-0.0820002481341362,
0.020682333037257195,
-0.06093890219926834,
-0.05197788029909134,
0.08614788204431534,
0.011032764799892902,
0.056812722235918045,
0.0033560004085302353,
0.10216622799634933,
-0.2778412997722626,
-0.010114445351064205,
0.07749051600694656,
0.04539557546377182,
0.07302675396203995,
0.05221179500222206,
-0.011390033178031445,
0.06608284264802933,
-0.17598631978034973,
0.10981086641550064,
0.04634084179997444,
-0.08056635409593582,
-0.19627505540847778,
-0.09750755876302719,
0.09180822223424911,
0.11376900970935822,
0.10633260756731033,
-0.012204719707369804,
0.14386101067066193,
-0.0375291146337986,
0.07213501632213593,
0.22409096360206604,
-0.2507117688655853,
-0.04860403761267662,
0.0036570129450410604,
0.0689219981431961,
0.06723519414663315,
-0.10269072651863098,
-0.0014719716273248196,
0.04275243729352951,
0.00914312619715929,
0.08957545459270477,
0.0017520565306767821,
-0.08627064526081085,
-0.0011869946029037237,
-0.11249630898237228,
-0.05470091849565506,
0.1937645971775055,
0.08034110814332962,
-0.05227157846093178,
-0.1190778836607933,
-0.013427579775452614,
-0.16968858242034912,
-0.0023325097281485796,
-0.061691995710134506,
0.026928158476948738,
-0.05603363737463951,
-0.05107976123690605,
-0.05721397325396538,
-0.08077127486467361,
-0.05940905585885048,
0.05581611394882202,
0.151223286986351,
0.03692999854683876,
-0.010457666590809822,
0.0012080478481948376,
0.11587228626012802,
-0.002690029563382268,
-0.12648005783557892,
-0.06887591630220413,
0.01721986196935177,
-0.1138574481010437,
-0.05596844479441643,
-0.03930874913930893,
-0.034899890422821045,
-0.027730271220207214,
0.20224004983901978,
-0.029428552836179733,
0.07720240205526352,
0.03507273644208908,
-0.006798604968935251,
-0.02187349647283554,
0.13900186121463776,
-0.021407736465334892,
-0.06993257254362106,
-0.0394747219979763,
0.08631689846515656,
-0.018595775589346886,
-0.023551147431135178,
-0.04320458695292473,
-0.021321183070540428,
0.08449564129114151,
0.07936342805624008,
-0.021065613254904747,
0.012325964868068695,
-0.06673426181077957,
-0.019670000299811363,
-0.0013098509516566992,
-0.14382103085517883,
0.03516038507223129,
-0.003086087526753545,
-0.08059677481651306,
-0.052205175161361694,
0.05067410692572594,
0.014420350082218647,
-0.03578491136431694,
0.050484802573919296,
-0.04997176304459572,
-0.015955867245793343,
-0.05201799422502518,
-0.04405532777309418,
0.01816294528543949,
-0.08088849484920502,
0.0033116391859948635,
-0.06885188817977905,
-0.16730137169361115,
-0.050583381205797195,
0.030762555077672005,
-0.0669105052947998,
-0.047454994171857834,
-0.03569285199046135,
-0.044339731335639954,
0.013698115013539791,
-0.03027454763650894,
0.15706346929073334,
-0.0403936468064785,
0.07042287290096283,
-0.00895435456186533,
0.02605639584362507,
0.084478460252285,
0.04693829268217087,
-0.05716655030846596,
0.03803938999772072,
-0.05193641409277916,
0.10981398820877075,
-0.10885219275951385,
-0.0013603528495877981,
-0.16322313249111176,
-0.08539833128452301,
-0.02375294454395771,
-0.024741658940911293,
0.0798582062125206,
0.11083030700683594,
-0.15240293741226196,
-0.025996960699558258,
0.11677280068397522,
-0.03737746179103851,
-0.09171205759048462,
0.0876598060131073,
-0.03193732723593712,
0.07291889190673828,
0.05576416477560997,
0.1380889117717743,
0.1078236848115921,
-0.17097605764865875,
-0.04633687809109688,
0.0032529812306165695,
0.06032579764723778,
0.08717364072799683,
0.08281205594539642,
-0.02971681021153927,
0.046827416867017746,
0.01091018971055746,
-0.09037264436483383,
-0.023558560758829117,
-0.07273395359516144,
-0.09470134228467941,
-0.0372469462454319,
-0.07685735821723938,
0.02359815314412117,
0.014075384475290775,
0.01795504428446293,
-0.07523533701896667,
-0.143583744764328,
0.07817254960536957,
0.1474626213312149,
-0.05178660899400711,
0.018450886011123657,
-0.08284101635217667,
-0.02054041624069214,
0.0066374181769788265,
-0.02771431766450405,
-0.17747661471366882,
-0.0725976750254631,
0.039673976600170135,
-0.0895129069685936,
0.01337131205946207,
0.01152464933693409,
0.07042532414197922,
0.04790439456701279,
-0.0360843650996685,
-0.026008686050772667,
-0.12391851097345352,
0.0065592252649366856,
-0.08234058320522308,
-0.1427791267633438,
-0.07262866199016571,
-0.024361595511436462,
0.2362557202577591,
-0.21399334073066711,
0.011838086880743504,
0.03487769141793251,
0.13125862181186676,
0.019088543951511383,
-0.08620437979698181,
-0.0009339540265500546,
0.016094045713543892,
0.00020807275723200291,
-0.10048209875822067,
0.011334571056067944,
0.0054351557046175,
-0.09693057090044022,
-0.05042070150375366,
-0.1458662450313568,
-0.018108531832695007,
0.06543021649122238,
0.0921977162361145,
-0.11301301419734955,
-0.028285956010222435,
-0.05738071724772453,
-0.05216933786869049,
-0.07678146660327911,
-0.03782530501484871,
0.22143444418907166,
0.040521834045648575,
0.10877251625061035,
-0.045560695230960846,
-0.08341269195079803,
0.00258385157212615,
0.014362422749400139,
-0.014576866291463375,
0.10071557760238647,
0.022719301283359528,
-0.08474384248256683,
0.05645645409822464,
0.054450441151857376,
0.0005227295914664865,
0.12475242465734482,
-0.026080137118697166,
-0.10739560425281525,
-0.04505244269967079,
0.026983294636011124,
0.010107208974659443,
0.10168368369340897,
-0.11508326232433319,
0.007892719469964504,
0.05426061525940895,
-0.005691490601748228,
0.010745013132691383,
-0.14541099965572357,
-0.0016350488876923919,
0.062382303178310394,
-0.032735150307416916,
0.014382793568074703,
-0.035623129457235336,
0.004231957718729973,
0.05958620458841324,
0.026928922161459923,
0.014018242247402668,
0.0013558748178184032,
-0.015174887143075466,
-0.08429910987615585,
0.15228821337223053,
-0.08678343147039413,
-0.16969707608222961,
-0.12839184701442719,
0.053102049976587296,
-0.028726860880851746,
-0.03218607231974602,
0.026712415739893913,
-0.0843687430024147,
-0.0649283155798912,
-0.09537540376186371,
-0.00882098637521267,
-0.07800226658582687,
-0.02709139697253704,
0.07867145538330078,
0.04022130370140076,
0.09442736953496933,
-0.1339036226272583,
0.01783059351146221,
0.004160972777754068,
-0.07501546293497086,
-0.017142459750175476,
0.05167708545923233,
0.10763479769229889,
0.0891505628824234,
0.0014362101210281253,
0.027619633823633194,
-0.026383398100733757,
0.2011125534772873,
-0.10360685735940933,
-0.015367947518825531,
0.10181834548711777,
-0.0014818605268374085,
0.051468778401613235,
0.08958692103624344,
0.02016623131930828,
-0.08754310756921768,
0.032604970037937164,
0.0632481798529625,
-0.024661879986524582,
-0.2626587748527527,
-0.027770403772592545,
-0.011071780696511269,
-0.08060327917337418,
0.1377989947795868,
0.04987732321023941,
0.023957867175340652,
0.051705848425626755,
-0.042245835065841675,
0.034466955810785294,
0.002967879641801119,
0.09572850167751312,
0.05179159343242645,
0.028671501204371452,
0.08870309591293335,
-0.016521360725164413,
-0.004974856972694397,
0.046185001730918884,
0.026339013129472733,
0.24625436961650848,
-0.01050721574574709,
0.17700977623462677,
0.022911200299859047,
0.14842599630355835,
-0.05747930705547333,
0.0282184649258852,
0.04520101100206375,
0.007377965841442347,
0.017905905842781067,
-0.07021904736757278,
-0.033236242830753326,
0.06953377276659012,
0.01684078760445118,
0.026563342660665512,
-0.07350519299507141,
0.027221761643886566,
0.0071680727414786816,
0.28124967217445374,
0.037917610257864,
-0.2739280164241791,
-0.08085734397172928,
0.01709376461803913,
-0.02549920044839382,
-0.07060796767473221,
-0.023746514692902565,
0.09941338747739792,
-0.14643871784210205,
0.06650429964065552,
-0.028957216069102287,
0.08675572276115417,
-0.07093263417482376,
-0.00786295160651207,
0.012059216387569904,
0.06317681074142456,
0.006552651058882475,
0.09041596204042435,
-0.17132246494293213,
0.19359058141708374,
0.01650843396782875,
0.11752847582101822,
-0.06665871292352676,
0.0473463274538517,
-0.016169331967830658,
-0.0005436136270873249,
0.14771229028701782,
0.000004021901986561716,
-0.014602763578295708,
-0.2064928114414215,
-0.09509048610925674,
0.01868412271142006,
0.12738417088985443,
-0.11998292058706284,
0.09203849732875824,
-0.03215211257338524,
0.004333898425102234,
0.028341641649603844,
-0.05577632039785385,
-0.1723659783601761,
-0.1391671597957611,
0.04670637473464012,
0.004244882147759199,
0.031453266739845276,
-0.07257483899593353,
-0.09940903633832932,
-0.07068344950675964,
0.19558922946453094,
-0.062271252274513245,
-0.048646267503499985,
-0.14032411575317383,
0.06920429319143295,
0.14198806881904602,
-0.06677347421646118,
0.016635706648230553,
0.045101918280124664,
0.14501595497131348,
0.02380204387009144,
-0.017461707815527916,
0.041304636746644974,
-0.05890800058841705,
-0.16310365498065948,
-0.05309845879673958,
0.1465754210948944,
0.060250069946050644,
0.05775381252169609,
0.011332182213664055,
0.02671212889254093,
0.005517662037163973,
-0.0763939693570137,
0.024745913222432137,
0.07408862560987473,
0.04659005254507065,
0.04428810253739357,
-0.04743156582117081,
0.029232220724225044,
-0.08149024099111557,
-0.0425904206931591,
0.13808833062648773,
0.263062983751297,
-0.08573437482118607,
0.08308090269565582,
0.0417756587266922,
-0.08013373613357544,
-0.14084036648273468,
0.035105910152196884,
0.14400452375411987,
0.015421592630445957,
0.0962878167629242,
-0.1856817752122879,
0.07092753797769547,
0.13140283524990082,
-0.02239154279232025,
0.032378148287534714,
-0.2823062241077423,
-0.13779295980930328,
0.06487811356782913,
0.08076097071170807,
-0.03427225351333618,
-0.12212631851434708,
-0.057457175105810165,
-0.034802258014678955,
-0.1771387755870819,
0.06753834336996078,
-0.05241842195391655,
0.10352056473493576,
0.008154504932463169,
0.048113662749528885,
0.0486033596098423,
-0.03365147113800049,
0.16913962364196777,
0.06307022273540497,
0.0452527180314064,
-0.046734098345041275,
0.05509147793054581,
0.07941806316375732,
-0.08579464256763458,
0.09915990382432938,
-0.03615766391158104,
0.0449647419154644,
-0.1861059069633484,
-0.03644025698304176,
-0.04130897670984268,
0.06968645751476288,
-0.05313064157962799,
-0.05769032984972,
-0.035793501883745193,
0.0383845716714859,
0.06964443624019623,
-0.019901422783732414,
0.048772651702165604,
0.015435140579938889,
0.06353245675563812,
0.1217045858502388,
0.05630640685558319,
0.026188328862190247,
-0.16670575737953186,
-0.007136921398341656,
-0.0132339121773839,
0.05276482179760933,
-0.1301707923412323,
0.02946566604077816,
0.10953207314014435,
0.06161929666996002,
0.15039923787117004,
-0.005448496900498867,
-0.09095399081707001,
0.013474810868501663,
0.011102357879281044,
-0.06354378908872604,
-0.1596578061580658,
-0.015386788174510002,
-0.017285551875829697,
-0.13750138878822327,
-0.005056093912571669,
0.12155293673276901,
-0.057058364152908325,
-0.01151096448302269,
-0.03152288869023323,
0.01869436539709568,
-0.013551310636103153,
0.1831413209438324,
0.026359478011727333,
0.08744450658559799,
-0.0647791400551796,
0.1153983473777771,
0.09789116680622101,
-0.07919232547283173,
0.08589755743741989,
0.023570993915200233,
-0.06512000411748886,
-0.02306324802339077,
0.015562964603304863,
0.06535576283931732,
0.01180972345173359,
-0.030785292387008667,
-0.05195864662528038,
-0.05826595798134804,
0.04964328184723854,
0.0017707220977172256,
0.017029037699103355,
-0.014150994829833508,
-0.0032586429733783007,
0.023552684113383293,
-0.1368626207113266,
0.08229155093431473,
0.05523757264018059,
0.05162005126476288,
-0.10510393232107162,
0.11645609885454178,
0.033945564180612564,
0.02924237959086895,
0.005832446273416281,
-0.016166305169463158,
-0.04900607094168663,
0.007895659655332565,
-0.09783725440502167,
-0.015798209235072136,
-0.013791508041322231,
0.003468809649348259,
-0.016106506809592247,
-0.044686105102300644,
-0.03295460343360901,
0.06993094831705093,
-0.07107114046812057,
-0.09671542793512344,
0.007128689903765917,
0.08254476636648178,
-0.12797677516937256,
-0.01840030401945114,
0.055521462112665176,
-0.12216437608003616,
0.07826721668243408,
0.05592341348528862,
0.03094596602022648,
0.007102818228304386,
-0.05491005629301071,
0.016547465696930885,
0.02117919735610485,
0.04057811573147774,
0.04155413806438446,
-0.1158766821026802,
-0.015370135195553303,
-0.04233086109161377,
0.040572766214609146,
0.01592196151614189,
0.040446627885103226,
-0.12712816894054413,
-0.05654200166463852,
-0.056354474276304245,
-0.03252594545483589,
-0.04590236395597458,
0.044054482132196426,
0.08694954216480255,
0.04145205020904541,
0.146537646651268,
-0.038874551653862,
0.051300887018442154,
-0.21575361490249634,
-0.03365730121731758,
-0.013897202908992767,
-0.006447027437388897,
-0.03624751791357994,
-0.0403912179172039,
0.08529211580753326,
-0.050787027925252914,
0.09197119623422623,
-0.02686901018023491,
0.1230819970369339,
0.03814506530761719,
-0.05544433742761612,
-0.03380642458796501,
-0.002420528093352914,
0.1762782782316208,
0.08815343677997589,
0.00201216503046453,
0.07585372030735016,
-0.03986595198512077,
0.0750792920589447,
0.028920460492372513,
0.08816848695278168,
0.17324648797512054,
-0.0045432220213115215,
0.052058860659599304,
0.05026315897703171,
-0.13298901915550232,
-0.1559043824672699,
0.12238946557044983,
-0.03145699203014374,
0.10665907710790634,
-0.03563921898603439,
0.13790787756443024,
0.09405899792909622,
-0.17849262058734894,
0.05188068747520447,
-0.048604413866996765,
-0.09677481651306152,
-0.0837436318397522,
-0.05085030198097229,
-0.08701561391353607,
-0.09904774278402328,
0.04266601428389549,
-0.08948022127151489,
0.03616863861680031,
0.10645007342100143,
0.011190487071871758,
0.026195084676146507,
0.14349792897701263,
-0.0378989614546299,
-0.016934048384428024,
0.07360761612653732,
-0.00449328450486064,
-0.010844642296433449,
-0.09238999336957932,
-0.04499710351228714,
0.08713198453187943,
0.03379260376095772,
0.10452748090028763,
-0.033441513776779175,
0.003055780427530408,
0.019103936851024628,
0.01401175931096077,
-0.09349226951599121,
0.005341313313692808,
0.017305102199316025,
0.028805170208215714,
0.048398811370134354,
0.06375601142644882,
0.023024752736091614,
-0.046188466250896454,
0.25340700149536133,
-0.05096473544836044,
-0.06073868274688721,
-0.13014476001262665,
0.10926280915737152,
0.06837662309408188,
0.0074129244312644005,
0.06578731536865234,
-0.11764572560787201,
0.006314310245215893,
0.11683709174394608,
0.07984631508588791,
-0.005423134658485651,
-0.005824062507599592,
-0.007309050299227238,
-0.011682350188493729,
-0.05958803743124008,
0.06723155826330185,
0.09301003813743591,
-0.040789734572172165,
-0.04670441150665283,
0.03541874513030052,
0.007307141553610563,
-0.06234432011842728,
-0.06637298315763474,
0.07079554349184036,
-0.02335529401898384,
0.0256500244140625,
-0.024794377386569977,
0.08903494477272034,
0.034162089228630066,
-0.2521308362483978,
0.06218671426177025,
-0.16604529321193695,
-0.1824655681848526,
-0.00968905258923769,
0.08654364198446274,
-0.006397496443241835,
0.0437953844666481,
0.012999096885323524,
-0.018554754555225372,
0.17447201907634735,
-0.002644481835886836,
-0.049970969557762146,
-0.11513254046440125,
0.0828959047794342,
-0.06095777451992035,
0.25131353735923767,
-0.007172423880547285,
0.07534709572792053,
0.08697021752595901,
0.004958732053637505,
-0.149287149310112,
0.035023678094148636,
0.0839218944311142,
-0.03989195451140404,
0.05671415850520134,
0.1870889514684677,
-0.06192475184798241,
0.12303794920444489,
0.06348288804292679,
-0.1334395408630371,
-0.023496676236391068,
-0.013204537332057953,
0.018869176506996155,
-0.09138365089893341,
-0.002069182926788926,
-0.04394879192113876,
0.14696940779685974,
0.1738170087337494,
-0.05236386880278587,
-0.01278250478208065,
-0.08299519121646881,
0.01812518760561943,
0.04308062046766281,
0.1268271952867508,
-0.009213418699800968,
-0.1979709267616272,
0.02608715370297432,
0.010535134933888912,
0.05605803057551384,
-0.23933526873588562,
-0.10982687771320343,
0.07320735603570938,
-0.06551720201969147,
-0.0008780804928392172,
0.09902733564376831,
0.06705885380506516,
0.011579443700611591,
-0.03897972032427788,
-0.15363191068172455,
-0.025140687823295593,
0.14247441291809082,
-0.15949136018753052,
-0.03194015845656395
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# wav2vec2-xls-r-tf-left-right-shuru-word-level
This model is a fine-tuned version of [facebook/wav2vec2-xls-r-300m](https://huggingface.co/facebook/wav2vec2-xls-r-300m) on the None dataset.
It achieves the following results on the evaluation set:
- Loss: 1.0504
- Wer: 0.6859
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 0.0001
- train_batch_size: 32
- eval_batch_size: 8
- seed: 42
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- lr_scheduler_warmup_steps: 1000
- num_epochs: 100
- mixed_precision_training: Native AMP
### Training results
| Training Loss | Epoch | Step | Validation Loss | Wer |
|:-------------:|:-----:|:----:|:---------------:|:------:|
| 23.217 | 23.81 | 500 | 1.3437 | 0.6859 |
| 1.1742 | 47.62 | 1000 | 1.0397 | 0.6859 |
| 1.0339 | 71.43 | 1500 | 1.0155 | 0.6859 |
| 0.9909 | 95.24 | 2000 | 1.0504 | 0.6859 |
### Framework versions
- Transformers 4.11.3
- Pytorch 1.10.0+cu111
- Datasets 1.13.3
- Tokenizers 0.10.3
|
{"license": "apache-2.0", "tags": ["generated_from_trainer"], "model-index": [{"name": "wav2vec2-xls-r-tf-left-right-shuru-word-level", "results": []}]}
|
automatic-speech-recognition
|
hrdipto/wav2vec2-xls-r-tf-left-right-shuru-word-level
|
[
"transformers",
"pytorch",
"tensorboard",
"wav2vec2",
"automatic-speech-recognition",
"generated_from_trainer",
"license:apache-2.0",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tensorboard #wav2vec2 #automatic-speech-recognition #generated_from_trainer #license-apache-2.0 #endpoints_compatible #region-us
|
wav2vec2-xls-r-tf-left-right-shuru-word-level
=============================================
This model is a fine-tuned version of facebook/wav2vec2-xls-r-300m on the None dataset.
It achieves the following results on the evaluation set:
* Loss: 1.0504
* Wer: 0.6859
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 0.0001
* train\_batch\_size: 32
* eval\_batch\_size: 8
* seed: 42
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* lr\_scheduler\_warmup\_steps: 1000
* num\_epochs: 100
* mixed\_precision\_training: Native AMP
### Training results
### Framework versions
* Transformers 4.11.3
* Pytorch 1.10.0+cu111
* Datasets 1.13.3
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0001\n* train\\_batch\\_size: 32\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_steps: 1000\n* num\\_epochs: 100\n* mixed\\_precision\\_training: Native AMP",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.10.0+cu111\n* Datasets 1.13.3\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #wav2vec2 #automatic-speech-recognition #generated_from_trainer #license-apache-2.0 #endpoints_compatible #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0001\n* train\\_batch\\_size: 32\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_steps: 1000\n* num\\_epochs: 100\n* mixed\\_precision\\_training: Native AMP",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.10.0+cu111\n* Datasets 1.13.3\n* Tokenizers 0.10.3"
] |
[
56,
130,
4,
33
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #wav2vec2 #automatic-speech-recognition #generated_from_trainer #license-apache-2.0 #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0001\n* train\\_batch\\_size: 32\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_steps: 1000\n* num\\_epochs: 100\n* mixed\\_precision\\_training: Native AMP### Training results### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.10.0+cu111\n* Datasets 1.13.3\n* Tokenizers 0.10.3"
] |
[
-0.10954299569129944,
0.09873781353235245,
-0.0032914397306740284,
0.06422559171915054,
0.10840313881635666,
-0.020404495298862457,
0.1281270831823349,
0.15050694346427917,
-0.08948522806167603,
0.07442682236433029,
0.12633514404296875,
0.15167510509490967,
0.042937442660331726,
0.147494375705719,
-0.049959663301706314,
-0.2820800840854645,
0.04651576653122902,
0.03578434884548187,
-0.011033759452402592,
0.12720680236816406,
0.08339891582727432,
-0.12459003925323486,
0.05771542340517044,
0.03330190107226372,
-0.15850941836833954,
-0.004401085432618856,
-0.003849382046610117,
-0.10542039573192596,
0.12404133379459381,
0.004830718971788883,
0.07034512609243393,
0.048078637570142746,
0.06645245105028152,
-0.21960623562335968,
0.0066198622807860374,
0.04384505748748779,
0.02912098914384842,
0.07397374510765076,
0.05658084526658058,
-0.02874011918902397,
0.10129309445619583,
-0.07366795092821121,
0.08070402592420578,
0.03742528334259987,
-0.10602883994579315,
-0.29187870025634766,
-0.08709555864334106,
0.047631777822971344,
0.069968082010746,
0.08818896859884262,
-0.012929998338222504,
0.1432967483997345,
-0.05365323647856712,
0.11026857793331146,
0.28164875507354736,
-0.3138352036476135,
-0.04463854432106018,
-0.038351621478796005,
0.057463955134153366,
0.0600416474044323,
-0.09976568073034286,
-0.016860274598002434,
0.015416222624480724,
0.04476551339030266,
0.13859786093235016,
-0.016611158847808838,
-0.06146911904215813,
-0.007107859943062067,
-0.1479564905166626,
-0.05989835038781166,
0.11518239974975586,
0.0238510649651289,
-0.040472157299518585,
-0.0991148129105568,
-0.05493372306227684,
-0.21565939486026764,
-0.06793393194675446,
-0.01748397760093212,
0.043251726776361465,
-0.0424342043697834,
-0.10603396594524384,
-0.01101229339838028,
-0.06698963791131973,
-0.07462633401155472,
-0.04081804305315018,
0.1883065402507782,
0.05627438798546791,
-0.0017432107124477625,
-0.037408724427223206,
0.07680433988571167,
-0.022072777152061462,
-0.13786649703979492,
-0.023677462711930275,
0.0359673835337162,
-0.022386498749256134,
-0.015721648931503296,
-0.04241546615958214,
-0.059575241059064865,
0.02209477499127388,
0.16200461983680725,
-0.10040822625160217,
0.09642519056797028,
-0.018587378785014153,
0.039569173008203506,
-0.10244667530059814,
0.20787395536899567,
-0.04140487313270569,
0.015610133297741413,
-0.00963031966239214,
0.055843010544776917,
0.029491767287254333,
-0.02689044363796711,
-0.09504249691963196,
0.030841147527098656,
0.12314575165510178,
0.04651401937007904,
-0.04924074932932854,
0.06541424244642258,
-0.03411976620554924,
-0.009045064449310303,
-0.0020527690649032593,
-0.11186003684997559,
0.0366465225815773,
0.018929392099380493,
-0.06388428062200546,
0.004663754254579544,
0.013141433708369732,
0.007901394739747047,
-0.05453629046678543,
0.08279645442962646,
-0.06143476441502571,
0.03322938457131386,
-0.05721372365951538,
-0.12637211382389069,
0.025030886754393578,
-0.11678270995616913,
-0.004166305996477604,
-0.10063190758228302,
-0.09893867373466492,
-0.012530773878097534,
0.037305038422346115,
-0.03782259300351143,
-0.025493193417787552,
-0.07945683598518372,
-0.09107675403356552,
0.044905539602041245,
-0.03448178619146347,
0.07106243818998337,
-0.0742640420794487,
0.09402590990066528,
0.032980214804410934,
0.08860070258378983,
-0.014222322031855583,
0.06009787321090698,
-0.07107154279947281,
0.027142338454723358,
-0.19976618885993958,
0.07588707655668259,
-0.08786968886852264,
0.05833371356129646,
-0.12463777512311935,
-0.11377465724945068,
0.02130008675158024,
-0.006651066243648529,
0.09843911975622177,
0.09864544868469238,
-0.17343193292617798,
-0.08864743262529373,
0.2097746729850769,
-0.08163071423768997,
-0.08526843041181564,
0.12524977326393127,
-0.025563834235072136,
0.0007085074321366847,
0.05782746896147728,
0.2578134834766388,
0.04450339451432228,
-0.126304030418396,
0.007286184933036566,
-0.040850430727005005,
0.04321899265050888,
-0.03581277281045914,
0.05692686140537262,
-0.0282380860298872,
0.06803422421216965,
0.0179448239505291,
-0.0029752578120678663,
0.03754488378763199,
-0.08695269376039505,
-0.07732238620519638,
-0.0438486747443676,
-0.07849600166082382,
0.029588140547275543,
0.03242886811494827,
0.06447744369506836,
-0.11712539196014404,
-0.10780653357505798,
0.036879755556583405,
0.08001743257045746,
-0.10422737896442413,
0.07193200290203094,
-0.1206674724817276,
0.08479741960763931,
-0.014803782105445862,
-0.00472646439447999,
-0.18997853994369507,
0.037542033940553665,
0.03895549848675728,
-0.028763044625520706,
0.039093900471925735,
-0.06492841988801956,
0.07831571251153946,
0.0455205962061882,
-0.025972653180360794,
-0.04658856615424156,
-0.00902535766363144,
0.011431830935180187,
-0.09100779891014099,
-0.2054673284292221,
-0.03821024298667908,
-0.038168247789144516,
0.07931296527385712,
-0.13768185675144196,
0.034569498151540756,
0.07677564769983292,
0.09135384112596512,
0.03283943608403206,
-0.03209216892719269,
-0.0011433346662670374,
0.08997760713100433,
-0.02044878713786602,
-0.06446640938520432,
0.057467252016067505,
0.019382517784833908,
-0.0877213254570961,
0.037327561527490616,
-0.14881162345409393,
0.1265646070241928,
0.14716987311840057,
-0.014981868676841259,
-0.06841389089822769,
-0.0003773509815800935,
-0.046610649675130844,
-0.03420918062329292,
-0.004915814381092787,
0.03165717050433159,
0.215254008769989,
0.015380959957838058,
0.14271073043346405,
-0.08854681998491287,
-0.042830370366573334,
0.05004410818219185,
-0.021554579958319664,
-0.005497281439602375,
0.1166505515575409,
0.04522931948304176,
-0.05383561551570892,
0.11893770843744278,
0.09066524356603622,
-0.07976048439741135,
0.11864036321640015,
-0.060464076697826385,
-0.07414653897285461,
-0.020772606134414673,
0.005081075243651867,
0.023295555263757706,
0.09780607372522354,
-0.1640302836894989,
-0.04083320125937462,
0.02547384984791279,
0.025053782388567924,
0.01866116002202034,
-0.20824767649173737,
0.013832724653184414,
0.02837979793548584,
-0.08540216833353043,
-0.042559292167425156,
0.00245524849742651,
0.012299752794206142,
0.0940241813659668,
0.011955509893596172,
-0.09411153942346573,
0.010935957543551922,
0.0038194824010133743,
-0.07323139905929565,
0.1762324869632721,
-0.11563840508460999,
-0.1757233738899231,
-0.1042800173163414,
-0.09177547693252563,
-0.03876212239265442,
-0.002036147750914097,
0.08858535438776016,
-0.09243367612361908,
-0.03852825611829758,
-0.08446145057678223,
-0.015602856874465942,
-0.02577873505651951,
0.04237023741006851,
0.030467476695775986,
-0.012013492174446583,
0.06259190291166306,
-0.1164546087384224,
-0.022455401718616486,
-0.04039354249835014,
-0.001648631994612515,
0.05531787499785423,
0.03649210184812546,
0.10873953253030777,
0.1595853567123413,
-0.010676936246454716,
0.051155924797058105,
-0.04613202065229416,
0.18863095343112946,
-0.07514575868844986,
-0.03574934974312782,
0.10947290062904358,
-0.005132707301527262,
0.06845349073410034,
0.11863085627555847,
0.048924919217824936,
-0.09844417124986649,
-0.013117431662976742,
0.003806079737842083,
-0.045474790036678314,
-0.21340511739253998,
-0.033113978803157806,
-0.04456606134772301,
-0.0018143982160836458,
0.10627160221338272,
0.04060843586921692,
0.03797546401619911,
0.02357885241508484,
0.0330607108771801,
0.005766916088759899,
0.003448293311521411,
0.09625225514173508,
0.12913210690021515,
0.03935186564922333,
0.13320937752723694,
-0.036888789385557175,
-0.038714535534381866,
0.029400544241070747,
0.0046701314859092236,
0.2331010401248932,
0.020757542923092842,
0.19061440229415894,
0.05574365332722664,
0.1753501147031784,
0.041200362145900726,
0.06820358335971832,
-0.001597998314537108,
-0.010735834017395973,
0.010841122828423977,
-0.05208582058548927,
-0.03968871384859085,
0.023704299703240395,
0.02439628168940544,
0.009039584547281265,
-0.11376696825027466,
-0.013972120359539986,
0.046557970345020294,
0.3522767424583435,
0.028853682801127434,
-0.33673685789108276,
-0.08915157616138458,
-0.011283766478300095,
-0.08630307018756866,
-0.03059733472764492,
0.04531724005937576,
0.08898455649614334,
-0.08186520636081696,
0.06375271081924438,
-0.06276282668113708,
0.09019862860441208,
-0.06527306139469147,
0.033736422657966614,
0.035415928810834885,
0.07049499452114105,
0.003382492810487747,
0.032853864133358,
-0.29259711503982544,
0.2807064950466156,
0.004650570917874575,
0.0781450942158699,
-0.061624206602573395,
0.008002051152288914,
0.02557320147752762,
0.01668386347591877,
0.08807667344808578,
-0.025891076773405075,
-0.12134092301130295,
-0.1775466948747635,
-0.09239742904901505,
0.011056709103286266,
0.12792247533798218,
0.012478110380470753,
0.1106824055314064,
-0.010452601127326488,
-0.01659495197236538,
0.049141205847263336,
-0.09398899227380753,
-0.06534826010465622,
-0.09186127781867981,
0.010524587705731392,
0.08282053470611572,
0.03623576834797859,
-0.0721360296010971,
-0.10316373407840729,
-0.08822479099035263,
0.14760346710681915,
-0.05390321835875511,
-0.04336781054735184,
-0.11802789568901062,
0.007839902304112911,
0.11014439165592194,
-0.07925088703632355,
0.06107473745942116,
0.009906571358442307,
0.10448598116636276,
0.010325845330953598,
-0.06722518056631088,
0.11965961754322052,
-0.06345878541469574,
-0.16715823113918304,
-0.029435785487294197,
0.14541040360927582,
0.030541667714715004,
0.06024562940001488,
-0.006741285789757967,
0.03873484209179878,
-0.021533163264393806,
-0.0777856782078743,
0.04143873602151871,
0.027503248304128647,
0.04470732435584068,
-0.014206839725375175,
-0.020286962389945984,
-0.005458523984998465,
-0.092143215239048,
-0.017812080681324005,
0.20657967031002045,
0.2433132529258728,
-0.09671036899089813,
0.09242824465036392,
0.07054515182971954,
-0.04207838699221611,
-0.17109502851963043,
-0.005364830605685711,
0.06509749591350555,
0.00029050654848106205,
-0.025419609621167183,
-0.19329196214675903,
0.024708323180675507,
0.07068528980016708,
-0.020678779110312462,
0.08496245741844177,
-0.31854888796806335,
-0.14039675891399384,
0.1382562816143036,
0.11448084563016891,
0.06179714947938919,
-0.14635130763053894,
-0.05542339012026787,
-0.011684753932058811,
-0.10281585901975632,
0.09466679394245148,
-0.07480227202177048,
0.13611441850662231,
-0.023239154368638992,
0.09094065427780151,
0.011861932463943958,
-0.05805433914065361,
0.10578307509422302,
0.014349930919706821,
0.060503460466861725,
-0.04615020379424095,
0.016661815345287323,
0.04746484383940697,
-0.06291206181049347,
0.05584972724318504,
-0.08041075617074966,
0.02704249881207943,
-0.07878268510103226,
-0.033330343663692474,
-0.08450663834810257,
0.014150233939290047,
-0.009041558019816875,
-0.03433900326490402,
-0.037647053599357605,
0.0012825436424463987,
0.06266023218631744,
-0.010874048806726933,
0.15425506234169006,
-0.026982998475432396,
0.12771837413311005,
0.1612652689218521,
0.10134720057249069,
-0.10507036000490189,
-0.07794497162103653,
0.006444824859499931,
-0.0353928841650486,
0.05527971312403679,
-0.114251047372818,
0.036970868706703186,
0.13586045801639557,
0.0317814014852047,
0.1230761855840683,
0.07006236910820007,
-0.06595906615257263,
0.03336332365870476,
0.041813239455223083,
-0.13666492700576782,
-0.12745165824890137,
0.014391975477337837,
0.021960584446787834,
-0.07239434123039246,
0.07329856604337692,
0.11538799107074738,
-0.055434390902519226,
-0.014538846909999847,
-0.002790429862216115,
0.01461660023778677,
-0.04043827950954437,
0.19656917452812195,
0.036879219114780426,
0.061267055571079254,
-0.12445112317800522,
0.08026530593633652,
0.03825094550848007,
-0.13416461646556854,
0.06092957779765129,
0.10449139773845673,
-0.09554275870323181,
-0.028595883399248123,
0.028480472043156624,
0.11316141486167908,
-0.02681661583483219,
-0.07262279093265533,
-0.14142650365829468,
-0.14410068094730377,
0.10833471268415451,
0.20469264686107635,
0.056105438619852066,
0.01793164201080799,
-0.059262488037347794,
0.016316000372171402,
-0.1177837997674942,
0.06858782470226288,
0.04270758479833603,
0.059764523059129715,
-0.12803928554058075,
0.14716140925884247,
0.01723911054432392,
0.03987022116780281,
-0.014985362999141216,
-0.011578728444874287,
-0.11220239102840424,
0.04025993496179581,
-0.12759321928024292,
0.0053183394484221935,
-0.06666240096092224,
0.0005802881787531078,
0.004140312317758799,
-0.049399010837078094,
-0.06345752626657486,
0.03355974704027176,
-0.12033090740442276,
-0.022954951971769333,
0.0010879815090447664,
0.03561911731958389,
-0.12854722142219543,
-0.010034811682999134,
0.014622929506003857,
-0.09445216506719589,
0.09760427474975586,
0.08662255853414536,
-0.03380337730050087,
0.050895851105451584,
-0.06241556629538536,
-0.024912232533097267,
0.07826048880815506,
-0.006731427740305662,
0.05049201846122742,
-0.13120625913143158,
-0.01891012117266655,
0.010861457325518131,
0.03546803444623947,
0.02475726418197155,
0.11155343800783157,
-0.11613568663597107,
-0.0007936620968393981,
-0.027263224124908447,
-0.05233846604824066,
-0.06935308873653412,
0.049911659210920334,
0.11069650948047638,
0.02790077216923237,
0.16516898572444916,
-0.0933993011713028,
0.028691140934824944,
-0.16638906300067902,
0.006468876264989376,
-0.014837069436907768,
-0.12160038948059082,
-0.05011274665594101,
-0.032570239156484604,
0.07884865999221802,
-0.0631338506937027,
0.13064607977867126,
-0.03075088933110237,
0.025161447003483772,
0.036469586193561554,
-0.07828015089035034,
-0.0527469776570797,
0.040124084800481796,
0.20741739869117737,
0.03956456482410431,
-0.044178880751132965,
0.07282374054193497,
0.021301409229636192,
0.08074010163545609,
0.12784172594547272,
0.17232707142829895,
0.15909621119499207,
0.062180954962968826,
0.11706854403018951,
0.05342555046081543,
-0.05187973380088806,
-0.17054444551467896,
0.09200920164585114,
-0.06030571088194847,
0.12936437129974365,
-0.014301195740699768,
0.24417847394943237,
0.1201138123869896,
-0.15352948009967804,
0.06636855751276016,
-0.018945807591080666,
-0.0899410992860794,
-0.11638960242271423,
-0.06593874096870422,
-0.08715374767780304,
-0.17543204128742218,
0.009455111809074879,
-0.10179425776004791,
0.06203227862715721,
0.04726257175207138,
0.038152050226926804,
0.016367249190807343,
0.13674096763134003,
0.01531605701893568,
0.0026255035772919655,
0.09254135191440582,
-0.003501188475638628,
-0.05687446519732475,
-0.07224809378385544,
-0.08517061173915863,
0.03454108536243439,
-0.013258512131869793,
0.05836020037531853,
-0.003164408029988408,
-0.07028627395629883,
0.04676789417862892,
-0.0390552394092083,
-0.09649781137704849,
0.02226269245147705,
0.020778264850378036,
0.07001485675573349,
0.05092121288180351,
0.035551175475120544,
-0.04264792427420616,
-0.002246784046292305,
0.19426937401294708,
-0.09474879503250122,
-0.09459006786346436,
-0.1095619946718216,
0.2517842948436737,
0.04015745967626572,
-0.015760254114866257,
0.020758552476763725,
-0.060395412147045135,
-0.031212205067276955,
0.2123595029115677,
0.17211712896823883,
-0.010134766809642315,
0.004089081659913063,
-0.014844562858343124,
-0.006620477419346571,
-0.03648579493165016,
0.07974691689014435,
0.14597897231578827,
0.06169988587498665,
-0.06308671087026596,
-0.04906245321035385,
-0.05028046667575836,
-0.035623591393232346,
-0.06767010688781738,
0.07728166878223419,
0.0058279335498809814,
-0.024887727573513985,
-0.04564166069030762,
0.06562773138284683,
-0.09434399008750916,
-0.08332283794879913,
0.0257792379707098,
-0.19353632628917694,
-0.1487489640712738,
0.007178751286119223,
0.06937839835882187,
0.012977039441466331,
0.03497197851538658,
0.0036974602844566107,
-0.00788669940084219,
0.08064007014036179,
-0.0013644119026139379,
-0.08130128681659698,
-0.0664898157119751,
0.08444055914878845,
-0.13308702409267426,
0.16576837003231049,
-0.0416889451444149,
0.0483836755156517,
0.1232990100979805,
0.08817929774522781,
-0.07881080359220505,
0.08898291736841202,
0.0424179844558239,
-0.10575871169567108,
0.023239770904183388,
0.15274737775325775,
-0.03320731222629547,
0.09310232102870941,
0.03045591153204441,
-0.11389287561178207,
0.015381242148578167,
-0.0909588634967804,
-0.037897828966379166,
-0.04307923465967178,
-0.04908560588955879,
-0.04397739842534065,
0.10986648499965668,
0.16293127834796906,
-0.043940071016550064,
0.004727006424218416,
-0.05286566913127899,
0.010922370478510857,
0.04697117581963539,
-0.004585982766002417,
-0.06245831400156021,
-0.27924230694770813,
0.011095499619841576,
0.03904192894697189,
0.0024522216990590096,
-0.2558799386024475,
-0.09656772017478943,
0.01269526593387127,
-0.04261689633131027,
-0.08763118088245392,
0.08571159094572067,
0.07505708187818527,
0.04689347743988037,
-0.05196138471364975,
-0.057690251618623734,
-0.03496808931231499,
0.1900923252105713,
-0.17577718198299408,
-0.05937637761235237
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# wav2vec2-xls-r-tf-left-right-shuru
This model is a fine-tuned version of [facebook/wav2vec2-xls-r-300m](https://huggingface.co/facebook/wav2vec2-xls-r-300m) on the None dataset.
It achieves the following results on the evaluation set:
- Loss: 0.0921
- Wer: 1.2628
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 0.0001
- train_batch_size: 32
- eval_batch_size: 8
- seed: 42
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- lr_scheduler_warmup_steps: 1000
- num_epochs: 100
- mixed_precision_training: Native AMP
### Training results
| Training Loss | Epoch | Step | Validation Loss | Wer |
|:-------------:|:-----:|:----:|:---------------:|:------:|
| 6.5528 | 23.81 | 500 | 0.5509 | 1.9487 |
| 0.2926 | 47.62 | 1000 | 0.1306 | 1.2756 |
| 0.1171 | 71.43 | 1500 | 0.1189 | 1.2628 |
| 0.0681 | 95.24 | 2000 | 0.0921 | 1.2628 |
### Framework versions
- Transformers 4.11.3
- Pytorch 1.10.0+cu111
- Datasets 1.13.3
- Tokenizers 0.10.3
|
{"license": "apache-2.0", "tags": ["generated_from_trainer"], "model-index": [{"name": "wav2vec2-xls-r-tf-left-right-shuru", "results": []}]}
|
automatic-speech-recognition
|
hrdipto/wav2vec2-xls-r-tf-left-right-shuru
|
[
"transformers",
"pytorch",
"tensorboard",
"wav2vec2",
"automatic-speech-recognition",
"generated_from_trainer",
"license:apache-2.0",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tensorboard #wav2vec2 #automatic-speech-recognition #generated_from_trainer #license-apache-2.0 #endpoints_compatible #region-us
|
wav2vec2-xls-r-tf-left-right-shuru
==================================
This model is a fine-tuned version of facebook/wav2vec2-xls-r-300m on the None dataset.
It achieves the following results on the evaluation set:
* Loss: 0.0921
* Wer: 1.2628
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 0.0001
* train\_batch\_size: 32
* eval\_batch\_size: 8
* seed: 42
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* lr\_scheduler\_warmup\_steps: 1000
* num\_epochs: 100
* mixed\_precision\_training: Native AMP
### Training results
### Framework versions
* Transformers 4.11.3
* Pytorch 1.10.0+cu111
* Datasets 1.13.3
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0001\n* train\\_batch\\_size: 32\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_steps: 1000\n* num\\_epochs: 100\n* mixed\\_precision\\_training: Native AMP",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.10.0+cu111\n* Datasets 1.13.3\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #wav2vec2 #automatic-speech-recognition #generated_from_trainer #license-apache-2.0 #endpoints_compatible #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0001\n* train\\_batch\\_size: 32\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_steps: 1000\n* num\\_epochs: 100\n* mixed\\_precision\\_training: Native AMP",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.10.0+cu111\n* Datasets 1.13.3\n* Tokenizers 0.10.3"
] |
[
56,
130,
4,
33
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #wav2vec2 #automatic-speech-recognition #generated_from_trainer #license-apache-2.0 #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0001\n* train\\_batch\\_size: 32\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_steps: 1000\n* num\\_epochs: 100\n* mixed\\_precision\\_training: Native AMP### Training results### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.10.0+cu111\n* Datasets 1.13.3\n* Tokenizers 0.10.3"
] |
[
-0.10954299569129944,
0.09873781353235245,
-0.0032914397306740284,
0.06422559171915054,
0.10840313881635666,
-0.020404495298862457,
0.1281270831823349,
0.15050694346427917,
-0.08948522806167603,
0.07442682236433029,
0.12633514404296875,
0.15167510509490967,
0.042937442660331726,
0.147494375705719,
-0.049959663301706314,
-0.2820800840854645,
0.04651576653122902,
0.03578434884548187,
-0.011033759452402592,
0.12720680236816406,
0.08339891582727432,
-0.12459003925323486,
0.05771542340517044,
0.03330190107226372,
-0.15850941836833954,
-0.004401085432618856,
-0.003849382046610117,
-0.10542039573192596,
0.12404133379459381,
0.004830718971788883,
0.07034512609243393,
0.048078637570142746,
0.06645245105028152,
-0.21960623562335968,
0.0066198622807860374,
0.04384505748748779,
0.02912098914384842,
0.07397374510765076,
0.05658084526658058,
-0.02874011918902397,
0.10129309445619583,
-0.07366795092821121,
0.08070402592420578,
0.03742528334259987,
-0.10602883994579315,
-0.29187870025634766,
-0.08709555864334106,
0.047631777822971344,
0.069968082010746,
0.08818896859884262,
-0.012929998338222504,
0.1432967483997345,
-0.05365323647856712,
0.11026857793331146,
0.28164875507354736,
-0.3138352036476135,
-0.04463854432106018,
-0.038351621478796005,
0.057463955134153366,
0.0600416474044323,
-0.09976568073034286,
-0.016860274598002434,
0.015416222624480724,
0.04476551339030266,
0.13859786093235016,
-0.016611158847808838,
-0.06146911904215813,
-0.007107859943062067,
-0.1479564905166626,
-0.05989835038781166,
0.11518239974975586,
0.0238510649651289,
-0.040472157299518585,
-0.0991148129105568,
-0.05493372306227684,
-0.21565939486026764,
-0.06793393194675446,
-0.01748397760093212,
0.043251726776361465,
-0.0424342043697834,
-0.10603396594524384,
-0.01101229339838028,
-0.06698963791131973,
-0.07462633401155472,
-0.04081804305315018,
0.1883065402507782,
0.05627438798546791,
-0.0017432107124477625,
-0.037408724427223206,
0.07680433988571167,
-0.022072777152061462,
-0.13786649703979492,
-0.023677462711930275,
0.0359673835337162,
-0.022386498749256134,
-0.015721648931503296,
-0.04241546615958214,
-0.059575241059064865,
0.02209477499127388,
0.16200461983680725,
-0.10040822625160217,
0.09642519056797028,
-0.018587378785014153,
0.039569173008203506,
-0.10244667530059814,
0.20787395536899567,
-0.04140487313270569,
0.015610133297741413,
-0.00963031966239214,
0.055843010544776917,
0.029491767287254333,
-0.02689044363796711,
-0.09504249691963196,
0.030841147527098656,
0.12314575165510178,
0.04651401937007904,
-0.04924074932932854,
0.06541424244642258,
-0.03411976620554924,
-0.009045064449310303,
-0.0020527690649032593,
-0.11186003684997559,
0.0366465225815773,
0.018929392099380493,
-0.06388428062200546,
0.004663754254579544,
0.013141433708369732,
0.007901394739747047,
-0.05453629046678543,
0.08279645442962646,
-0.06143476441502571,
0.03322938457131386,
-0.05721372365951538,
-0.12637211382389069,
0.025030886754393578,
-0.11678270995616913,
-0.004166305996477604,
-0.10063190758228302,
-0.09893867373466492,
-0.012530773878097534,
0.037305038422346115,
-0.03782259300351143,
-0.025493193417787552,
-0.07945683598518372,
-0.09107675403356552,
0.044905539602041245,
-0.03448178619146347,
0.07106243818998337,
-0.0742640420794487,
0.09402590990066528,
0.032980214804410934,
0.08860070258378983,
-0.014222322031855583,
0.06009787321090698,
-0.07107154279947281,
0.027142338454723358,
-0.19976618885993958,
0.07588707655668259,
-0.08786968886852264,
0.05833371356129646,
-0.12463777512311935,
-0.11377465724945068,
0.02130008675158024,
-0.006651066243648529,
0.09843911975622177,
0.09864544868469238,
-0.17343193292617798,
-0.08864743262529373,
0.2097746729850769,
-0.08163071423768997,
-0.08526843041181564,
0.12524977326393127,
-0.025563834235072136,
0.0007085074321366847,
0.05782746896147728,
0.2578134834766388,
0.04450339451432228,
-0.126304030418396,
0.007286184933036566,
-0.040850430727005005,
0.04321899265050888,
-0.03581277281045914,
0.05692686140537262,
-0.0282380860298872,
0.06803422421216965,
0.0179448239505291,
-0.0029752578120678663,
0.03754488378763199,
-0.08695269376039505,
-0.07732238620519638,
-0.0438486747443676,
-0.07849600166082382,
0.029588140547275543,
0.03242886811494827,
0.06447744369506836,
-0.11712539196014404,
-0.10780653357505798,
0.036879755556583405,
0.08001743257045746,
-0.10422737896442413,
0.07193200290203094,
-0.1206674724817276,
0.08479741960763931,
-0.014803782105445862,
-0.00472646439447999,
-0.18997853994369507,
0.037542033940553665,
0.03895549848675728,
-0.028763044625520706,
0.039093900471925735,
-0.06492841988801956,
0.07831571251153946,
0.0455205962061882,
-0.025972653180360794,
-0.04658856615424156,
-0.00902535766363144,
0.011431830935180187,
-0.09100779891014099,
-0.2054673284292221,
-0.03821024298667908,
-0.038168247789144516,
0.07931296527385712,
-0.13768185675144196,
0.034569498151540756,
0.07677564769983292,
0.09135384112596512,
0.03283943608403206,
-0.03209216892719269,
-0.0011433346662670374,
0.08997760713100433,
-0.02044878713786602,
-0.06446640938520432,
0.057467252016067505,
0.019382517784833908,
-0.0877213254570961,
0.037327561527490616,
-0.14881162345409393,
0.1265646070241928,
0.14716987311840057,
-0.014981868676841259,
-0.06841389089822769,
-0.0003773509815800935,
-0.046610649675130844,
-0.03420918062329292,
-0.004915814381092787,
0.03165717050433159,
0.215254008769989,
0.015380959957838058,
0.14271073043346405,
-0.08854681998491287,
-0.042830370366573334,
0.05004410818219185,
-0.021554579958319664,
-0.005497281439602375,
0.1166505515575409,
0.04522931948304176,
-0.05383561551570892,
0.11893770843744278,
0.09066524356603622,
-0.07976048439741135,
0.11864036321640015,
-0.060464076697826385,
-0.07414653897285461,
-0.020772606134414673,
0.005081075243651867,
0.023295555263757706,
0.09780607372522354,
-0.1640302836894989,
-0.04083320125937462,
0.02547384984791279,
0.025053782388567924,
0.01866116002202034,
-0.20824767649173737,
0.013832724653184414,
0.02837979793548584,
-0.08540216833353043,
-0.042559292167425156,
0.00245524849742651,
0.012299752794206142,
0.0940241813659668,
0.011955509893596172,
-0.09411153942346573,
0.010935957543551922,
0.0038194824010133743,
-0.07323139905929565,
0.1762324869632721,
-0.11563840508460999,
-0.1757233738899231,
-0.1042800173163414,
-0.09177547693252563,
-0.03876212239265442,
-0.002036147750914097,
0.08858535438776016,
-0.09243367612361908,
-0.03852825611829758,
-0.08446145057678223,
-0.015602856874465942,
-0.02577873505651951,
0.04237023741006851,
0.030467476695775986,
-0.012013492174446583,
0.06259190291166306,
-0.1164546087384224,
-0.022455401718616486,
-0.04039354249835014,
-0.001648631994612515,
0.05531787499785423,
0.03649210184812546,
0.10873953253030777,
0.1595853567123413,
-0.010676936246454716,
0.051155924797058105,
-0.04613202065229416,
0.18863095343112946,
-0.07514575868844986,
-0.03574934974312782,
0.10947290062904358,
-0.005132707301527262,
0.06845349073410034,
0.11863085627555847,
0.048924919217824936,
-0.09844417124986649,
-0.013117431662976742,
0.003806079737842083,
-0.045474790036678314,
-0.21340511739253998,
-0.033113978803157806,
-0.04456606134772301,
-0.0018143982160836458,
0.10627160221338272,
0.04060843586921692,
0.03797546401619911,
0.02357885241508484,
0.0330607108771801,
0.005766916088759899,
0.003448293311521411,
0.09625225514173508,
0.12913210690021515,
0.03935186564922333,
0.13320937752723694,
-0.036888789385557175,
-0.038714535534381866,
0.029400544241070747,
0.0046701314859092236,
0.2331010401248932,
0.020757542923092842,
0.19061440229415894,
0.05574365332722664,
0.1753501147031784,
0.041200362145900726,
0.06820358335971832,
-0.001597998314537108,
-0.010735834017395973,
0.010841122828423977,
-0.05208582058548927,
-0.03968871384859085,
0.023704299703240395,
0.02439628168940544,
0.009039584547281265,
-0.11376696825027466,
-0.013972120359539986,
0.046557970345020294,
0.3522767424583435,
0.028853682801127434,
-0.33673685789108276,
-0.08915157616138458,
-0.011283766478300095,
-0.08630307018756866,
-0.03059733472764492,
0.04531724005937576,
0.08898455649614334,
-0.08186520636081696,
0.06375271081924438,
-0.06276282668113708,
0.09019862860441208,
-0.06527306139469147,
0.033736422657966614,
0.035415928810834885,
0.07049499452114105,
0.003382492810487747,
0.032853864133358,
-0.29259711503982544,
0.2807064950466156,
0.004650570917874575,
0.0781450942158699,
-0.061624206602573395,
0.008002051152288914,
0.02557320147752762,
0.01668386347591877,
0.08807667344808578,
-0.025891076773405075,
-0.12134092301130295,
-0.1775466948747635,
-0.09239742904901505,
0.011056709103286266,
0.12792247533798218,
0.012478110380470753,
0.1106824055314064,
-0.010452601127326488,
-0.01659495197236538,
0.049141205847263336,
-0.09398899227380753,
-0.06534826010465622,
-0.09186127781867981,
0.010524587705731392,
0.08282053470611572,
0.03623576834797859,
-0.0721360296010971,
-0.10316373407840729,
-0.08822479099035263,
0.14760346710681915,
-0.05390321835875511,
-0.04336781054735184,
-0.11802789568901062,
0.007839902304112911,
0.11014439165592194,
-0.07925088703632355,
0.06107473745942116,
0.009906571358442307,
0.10448598116636276,
0.010325845330953598,
-0.06722518056631088,
0.11965961754322052,
-0.06345878541469574,
-0.16715823113918304,
-0.029435785487294197,
0.14541040360927582,
0.030541667714715004,
0.06024562940001488,
-0.006741285789757967,
0.03873484209179878,
-0.021533163264393806,
-0.0777856782078743,
0.04143873602151871,
0.027503248304128647,
0.04470732435584068,
-0.014206839725375175,
-0.020286962389945984,
-0.005458523984998465,
-0.092143215239048,
-0.017812080681324005,
0.20657967031002045,
0.2433132529258728,
-0.09671036899089813,
0.09242824465036392,
0.07054515182971954,
-0.04207838699221611,
-0.17109502851963043,
-0.005364830605685711,
0.06509749591350555,
0.00029050654848106205,
-0.025419609621167183,
-0.19329196214675903,
0.024708323180675507,
0.07068528980016708,
-0.020678779110312462,
0.08496245741844177,
-0.31854888796806335,
-0.14039675891399384,
0.1382562816143036,
0.11448084563016891,
0.06179714947938919,
-0.14635130763053894,
-0.05542339012026787,
-0.011684753932058811,
-0.10281585901975632,
0.09466679394245148,
-0.07480227202177048,
0.13611441850662231,
-0.023239154368638992,
0.09094065427780151,
0.011861932463943958,
-0.05805433914065361,
0.10578307509422302,
0.014349930919706821,
0.060503460466861725,
-0.04615020379424095,
0.016661815345287323,
0.04746484383940697,
-0.06291206181049347,
0.05584972724318504,
-0.08041075617074966,
0.02704249881207943,
-0.07878268510103226,
-0.033330343663692474,
-0.08450663834810257,
0.014150233939290047,
-0.009041558019816875,
-0.03433900326490402,
-0.037647053599357605,
0.0012825436424463987,
0.06266023218631744,
-0.010874048806726933,
0.15425506234169006,
-0.026982998475432396,
0.12771837413311005,
0.1612652689218521,
0.10134720057249069,
-0.10507036000490189,
-0.07794497162103653,
0.006444824859499931,
-0.0353928841650486,
0.05527971312403679,
-0.114251047372818,
0.036970868706703186,
0.13586045801639557,
0.0317814014852047,
0.1230761855840683,
0.07006236910820007,
-0.06595906615257263,
0.03336332365870476,
0.041813239455223083,
-0.13666492700576782,
-0.12745165824890137,
0.014391975477337837,
0.021960584446787834,
-0.07239434123039246,
0.07329856604337692,
0.11538799107074738,
-0.055434390902519226,
-0.014538846909999847,
-0.002790429862216115,
0.01461660023778677,
-0.04043827950954437,
0.19656917452812195,
0.036879219114780426,
0.061267055571079254,
-0.12445112317800522,
0.08026530593633652,
0.03825094550848007,
-0.13416461646556854,
0.06092957779765129,
0.10449139773845673,
-0.09554275870323181,
-0.028595883399248123,
0.028480472043156624,
0.11316141486167908,
-0.02681661583483219,
-0.07262279093265533,
-0.14142650365829468,
-0.14410068094730377,
0.10833471268415451,
0.20469264686107635,
0.056105438619852066,
0.01793164201080799,
-0.059262488037347794,
0.016316000372171402,
-0.1177837997674942,
0.06858782470226288,
0.04270758479833603,
0.059764523059129715,
-0.12803928554058075,
0.14716140925884247,
0.01723911054432392,
0.03987022116780281,
-0.014985362999141216,
-0.011578728444874287,
-0.11220239102840424,
0.04025993496179581,
-0.12759321928024292,
0.0053183394484221935,
-0.06666240096092224,
0.0005802881787531078,
0.004140312317758799,
-0.049399010837078094,
-0.06345752626657486,
0.03355974704027176,
-0.12033090740442276,
-0.022954951971769333,
0.0010879815090447664,
0.03561911731958389,
-0.12854722142219543,
-0.010034811682999134,
0.014622929506003857,
-0.09445216506719589,
0.09760427474975586,
0.08662255853414536,
-0.03380337730050087,
0.050895851105451584,
-0.06241556629538536,
-0.024912232533097267,
0.07826048880815506,
-0.006731427740305662,
0.05049201846122742,
-0.13120625913143158,
-0.01891012117266655,
0.010861457325518131,
0.03546803444623947,
0.02475726418197155,
0.11155343800783157,
-0.11613568663597107,
-0.0007936620968393981,
-0.027263224124908447,
-0.05233846604824066,
-0.06935308873653412,
0.049911659210920334,
0.11069650948047638,
0.02790077216923237,
0.16516898572444916,
-0.0933993011713028,
0.028691140934824944,
-0.16638906300067902,
0.006468876264989376,
-0.014837069436907768,
-0.12160038948059082,
-0.05011274665594101,
-0.032570239156484604,
0.07884865999221802,
-0.0631338506937027,
0.13064607977867126,
-0.03075088933110237,
0.025161447003483772,
0.036469586193561554,
-0.07828015089035034,
-0.0527469776570797,
0.040124084800481796,
0.20741739869117737,
0.03956456482410431,
-0.044178880751132965,
0.07282374054193497,
0.021301409229636192,
0.08074010163545609,
0.12784172594547272,
0.17232707142829895,
0.15909621119499207,
0.062180954962968826,
0.11706854403018951,
0.05342555046081543,
-0.05187973380088806,
-0.17054444551467896,
0.09200920164585114,
-0.06030571088194847,
0.12936437129974365,
-0.014301195740699768,
0.24417847394943237,
0.1201138123869896,
-0.15352948009967804,
0.06636855751276016,
-0.018945807591080666,
-0.0899410992860794,
-0.11638960242271423,
-0.06593874096870422,
-0.08715374767780304,
-0.17543204128742218,
0.009455111809074879,
-0.10179425776004791,
0.06203227862715721,
0.04726257175207138,
0.038152050226926804,
0.016367249190807343,
0.13674096763134003,
0.01531605701893568,
0.0026255035772919655,
0.09254135191440582,
-0.003501188475638628,
-0.05687446519732475,
-0.07224809378385544,
-0.08517061173915863,
0.03454108536243439,
-0.013258512131869793,
0.05836020037531853,
-0.003164408029988408,
-0.07028627395629883,
0.04676789417862892,
-0.0390552394092083,
-0.09649781137704849,
0.02226269245147705,
0.020778264850378036,
0.07001485675573349,
0.05092121288180351,
0.035551175475120544,
-0.04264792427420616,
-0.002246784046292305,
0.19426937401294708,
-0.09474879503250122,
-0.09459006786346436,
-0.1095619946718216,
0.2517842948436737,
0.04015745967626572,
-0.015760254114866257,
0.020758552476763725,
-0.060395412147045135,
-0.031212205067276955,
0.2123595029115677,
0.17211712896823883,
-0.010134766809642315,
0.004089081659913063,
-0.014844562858343124,
-0.006620477419346571,
-0.03648579493165016,
0.07974691689014435,
0.14597897231578827,
0.06169988587498665,
-0.06308671087026596,
-0.04906245321035385,
-0.05028046667575836,
-0.035623591393232346,
-0.06767010688781738,
0.07728166878223419,
0.0058279335498809814,
-0.024887727573513985,
-0.04564166069030762,
0.06562773138284683,
-0.09434399008750916,
-0.08332283794879913,
0.0257792379707098,
-0.19353632628917694,
-0.1487489640712738,
0.007178751286119223,
0.06937839835882187,
0.012977039441466331,
0.03497197851538658,
0.0036974602844566107,
-0.00788669940084219,
0.08064007014036179,
-0.0013644119026139379,
-0.08130128681659698,
-0.0664898157119751,
0.08444055914878845,
-0.13308702409267426,
0.16576837003231049,
-0.0416889451444149,
0.0483836755156517,
0.1232990100979805,
0.08817929774522781,
-0.07881080359220505,
0.08898291736841202,
0.0424179844558239,
-0.10575871169567108,
0.023239770904183388,
0.15274737775325775,
-0.03320731222629547,
0.09310232102870941,
0.03045591153204441,
-0.11389287561178207,
0.015381242148578167,
-0.0909588634967804,
-0.037897828966379166,
-0.04307923465967178,
-0.04908560588955879,
-0.04397739842534065,
0.10986648499965668,
0.16293127834796906,
-0.043940071016550064,
0.004727006424218416,
-0.05286566913127899,
0.010922370478510857,
0.04697117581963539,
-0.004585982766002417,
-0.06245831400156021,
-0.27924230694770813,
0.011095499619841576,
0.03904192894697189,
0.0024522216990590096,
-0.2558799386024475,
-0.09656772017478943,
0.01269526593387127,
-0.04261689633131027,
-0.08763118088245392,
0.08571159094572067,
0.07505708187818527,
0.04689347743988037,
-0.05196138471364975,
-0.057690251618623734,
-0.03496808931231499,
0.1900923252105713,
-0.17577718198299408,
-0.05937637761235237
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# wav2vec2-xls-r-tf-left-right-trainer
This model is a fine-tuned version of [facebook/wav2vec2-xls-r-300m](https://huggingface.co/facebook/wav2vec2-xls-r-300m) on the None dataset.
It achieves the following results on the evaluation set:
- eval_loss: 0.0090
- eval_wer: 0.0037
- eval_runtime: 11.2686
- eval_samples_per_second: 71.703
- eval_steps_per_second: 8.963
- epoch: 21.05
- step: 4000
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 0.0001
- train_batch_size: 32
- eval_batch_size: 8
- seed: 42
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- lr_scheduler_warmup_steps: 1000
- num_epochs: 30
- mixed_precision_training: Native AMP
### Framework versions
- Transformers 4.11.3
- Pytorch 1.10.0+cu111
- Datasets 1.13.3
- Tokenizers 0.10.3
|
{"license": "apache-2.0", "tags": ["generated_from_trainer"], "model-index": [{"name": "wav2vec2-xls-r-tf-left-right-trainer", "results": []}]}
|
automatic-speech-recognition
|
hrdipto/wav2vec2-xls-r-tf-left-right-trainer
|
[
"transformers",
"pytorch",
"tensorboard",
"wav2vec2",
"automatic-speech-recognition",
"generated_from_trainer",
"license:apache-2.0",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tensorboard #wav2vec2 #automatic-speech-recognition #generated_from_trainer #license-apache-2.0 #endpoints_compatible #region-us
|
# wav2vec2-xls-r-tf-left-right-trainer
This model is a fine-tuned version of facebook/wav2vec2-xls-r-300m on the None dataset.
It achieves the following results on the evaluation set:
- eval_loss: 0.0090
- eval_wer: 0.0037
- eval_runtime: 11.2686
- eval_samples_per_second: 71.703
- eval_steps_per_second: 8.963
- epoch: 21.05
- step: 4000
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 0.0001
- train_batch_size: 32
- eval_batch_size: 8
- seed: 42
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- lr_scheduler_warmup_steps: 1000
- num_epochs: 30
- mixed_precision_training: Native AMP
### Framework versions
- Transformers 4.11.3
- Pytorch 1.10.0+cu111
- Datasets 1.13.3
- Tokenizers 0.10.3
|
[
"# wav2vec2-xls-r-tf-left-right-trainer\n\nThis model is a fine-tuned version of facebook/wav2vec2-xls-r-300m on the None dataset.\nIt achieves the following results on the evaluation set:\n- eval_loss: 0.0090\n- eval_wer: 0.0037\n- eval_runtime: 11.2686\n- eval_samples_per_second: 71.703\n- eval_steps_per_second: 8.963\n- epoch: 21.05\n- step: 4000",
"## Model description\n\nMore information needed",
"## Intended uses & limitations\n\nMore information needed",
"## Training and evaluation data\n\nMore information needed",
"## Training procedure",
"### Training hyperparameters\n\nThe following hyperparameters were used during training:\n- learning_rate: 0.0001\n- train_batch_size: 32\n- eval_batch_size: 8\n- seed: 42\n- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n- lr_scheduler_type: linear\n- lr_scheduler_warmup_steps: 1000\n- num_epochs: 30\n- mixed_precision_training: Native AMP",
"### Framework versions\n\n- Transformers 4.11.3\n- Pytorch 1.10.0+cu111\n- Datasets 1.13.3\n- Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #wav2vec2 #automatic-speech-recognition #generated_from_trainer #license-apache-2.0 #endpoints_compatible #region-us \n",
"# wav2vec2-xls-r-tf-left-right-trainer\n\nThis model is a fine-tuned version of facebook/wav2vec2-xls-r-300m on the None dataset.\nIt achieves the following results on the evaluation set:\n- eval_loss: 0.0090\n- eval_wer: 0.0037\n- eval_runtime: 11.2686\n- eval_samples_per_second: 71.703\n- eval_steps_per_second: 8.963\n- epoch: 21.05\n- step: 4000",
"## Model description\n\nMore information needed",
"## Intended uses & limitations\n\nMore information needed",
"## Training and evaluation data\n\nMore information needed",
"## Training procedure",
"### Training hyperparameters\n\nThe following hyperparameters were used during training:\n- learning_rate: 0.0001\n- train_batch_size: 32\n- eval_batch_size: 8\n- seed: 42\n- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n- lr_scheduler_type: linear\n- lr_scheduler_warmup_steps: 1000\n- num_epochs: 30\n- mixed_precision_training: Native AMP",
"### Framework versions\n\n- Transformers 4.11.3\n- Pytorch 1.10.0+cu111\n- Datasets 1.13.3\n- Tokenizers 0.10.3"
] |
[
56,
128,
6,
12,
8,
3,
117,
33
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #wav2vec2 #automatic-speech-recognition #generated_from_trainer #license-apache-2.0 #endpoints_compatible #region-us \n# wav2vec2-xls-r-tf-left-right-trainer\n\nThis model is a fine-tuned version of facebook/wav2vec2-xls-r-300m on the None dataset.\nIt achieves the following results on the evaluation set:\n- eval_loss: 0.0090\n- eval_wer: 0.0037\n- eval_runtime: 11.2686\n- eval_samples_per_second: 71.703\n- eval_steps_per_second: 8.963\n- epoch: 21.05\n- step: 4000## Model description\n\nMore information needed## Intended uses & limitations\n\nMore information needed## Training and evaluation data\n\nMore information needed## Training procedure### Training hyperparameters\n\nThe following hyperparameters were used during training:\n- learning_rate: 0.0001\n- train_batch_size: 32\n- eval_batch_size: 8\n- seed: 42\n- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n- lr_scheduler_type: linear\n- lr_scheduler_warmup_steps: 1000\n- num_epochs: 30\n- mixed_precision_training: Native AMP### Framework versions\n\n- Transformers 4.11.3\n- Pytorch 1.10.0+cu111\n- Datasets 1.13.3\n- Tokenizers 0.10.3"
] |
[
-0.08825577795505524,
0.08324992656707764,
-0.003999330103397369,
0.07499989867210388,
0.134449765086174,
0.01505287829786539,
0.09599533677101135,
0.12190377712249756,
-0.048422008752822876,
0.08081036806106567,
0.0598420649766922,
0.08071792125701904,
0.05978623405098915,
0.12308347970247269,
-0.05633215233683586,
-0.17608577013015747,
0.007873434573411942,
-0.04245750978589058,
-0.05211048945784569,
0.09621147066354752,
0.08918943256139755,
-0.10188202559947968,
0.05949259176850319,
0.013195911422371864,
-0.10976272076368332,
0.02096589095890522,
0.003398194443434477,
-0.0446702279150486,
0.11688785254955292,
0.030391298234462738,
0.09106869250535965,
0.018835658207535744,
0.10894127935171127,
-0.237391859292984,
-0.011842532083392143,
0.1006976068019867,
0.03136515989899635,
0.06882895529270172,
0.0993465855717659,
-0.004701378755271435,
0.06490414589643478,
-0.14380693435668945,
0.08963479846715927,
0.034934140741825104,
-0.13033843040466309,
-0.16636066138744354,
-0.10485219210386276,
0.039012931287288666,
0.11614750325679779,
0.11073633283376694,
-0.020847145467996597,
0.13233807682991028,
-0.07567255198955536,
0.0896051898598671,
0.22587932646274567,
-0.28818193078041077,
-0.057258062064647675,
0.018377799540758133,
0.04799243062734604,
0.032430749386548996,
-0.09644129127264023,
0.006736028473824263,
0.0028933726716786623,
0.027519945055246353,
0.11344725638628006,
-0.023059379309415817,
-0.08937787264585495,
0.0028380288276821375,
-0.11719435453414917,
-0.03693358600139618,
0.07477714121341705,
0.06814730912446976,
-0.04077576845884323,
-0.09265376627445221,
-0.07505238801240921,
-0.1143246442079544,
-0.004766192752867937,
-0.0487879142165184,
0.04189916327595711,
-0.03907845541834831,
-0.05312220752239227,
-0.016448838636279106,
-0.06891657412052155,
-0.046061206609010696,
0.0003079767047893256,
0.12080078572034836,
0.012229399755597115,
0.016612153500318527,
-0.03040022775530815,
0.10576605796813965,
0.0009485266637057066,
-0.13285453617572784,
-0.012130548246204853,
0.011436982080340385,
-0.14781327545642853,
-0.05751848220825195,
-0.05987953022122383,
-0.07256439328193665,
-0.029143260791897774,
0.18420934677124023,
-0.03648070991039276,
0.09313850849866867,
0.018337327986955643,
-0.00787493959069252,
-0.056503377854824066,
0.18084397912025452,
-0.04595506936311722,
-0.10691364109516144,
-0.03255264088511467,
0.08539498597383499,
-0.0016159701626747847,
-0.025358567014336586,
-0.03679235279560089,
0.010665742680430412,
0.057462625205516815,
0.037217289209365845,
-0.03337536379694939,
0.029456786811351776,
-0.06685468554496765,
-0.01751117780804634,
0.0074534048326313496,
-0.13214576244354248,
0.04846041649580002,
0.0031044434290379286,
-0.09618724882602692,
-0.03638846427202225,
0.03069283440709114,
0.009975014254450798,
-0.045515451580286026,
0.13929535448551178,
-0.06210709735751152,
0.007283052895218134,
-0.06853324919939041,
-0.09146209806203842,
-0.003893617307767272,
-0.06053183227777481,
-0.021766120567917824,
-0.06233462318778038,
-0.16571128368377686,
-0.0631340816617012,
0.06729307770729065,
-0.06533166021108627,
-0.010703256353735924,
-0.028247695416212082,
-0.05137176439166069,
0.03363019973039627,
-0.0243473369628191,
0.14944195747375488,
-0.0611189641058445,
0.05765722692012787,
0.0022478688042610884,
0.045793160796165466,
0.06765695661306381,
0.035598840564489365,
-0.06804077327251434,
0.033820025622844696,
-0.1344018578529358,
0.09692145138978958,
-0.07271712273359299,
0.013711751438677311,
-0.13616862893104553,
-0.07298916578292847,
0.008817912079393864,
-0.016148725524544716,
0.09098301082849503,
0.09273276478052139,
-0.18930990993976593,
-0.04022735729813576,
0.13879147171974182,
-0.05023065209388733,
-0.06587591767311096,
0.09448538720607758,
-0.035681307315826416,
0.005485852714627981,
0.05007543787360191,
0.1583968698978424,
0.0549234114587307,
-0.1599571257829666,
-0.01488501112908125,
-0.030813291668891907,
0.03396698087453842,
0.07780217379331589,
0.03542381897568703,
-0.02227885276079178,
0.10039021819829941,
-0.007097445894032717,
-0.07064186781644821,
-0.000671726418659091,
-0.0660843700170517,
-0.07854916900396347,
-0.0290802251547575,
-0.08016154915094376,
0.022482367232441902,
0.015491480007767677,
0.023906389251351357,
-0.06261217594146729,
-0.12297229468822479,
0.08569984883069992,
0.09860698878765106,
-0.03904498368501663,
0.024745821952819824,
-0.09663191437721252,
-0.004173811059445143,
0.028152121230959892,
-0.02200545370578766,
-0.2172480672597885,
-0.08986380696296692,
0.012630759738385677,
-0.1027318686246872,
0.01736854948103428,
0.03481799364089966,
0.07253874093294144,
0.06041926518082619,
-0.030731305480003357,
-0.024297388270497322,
-0.09112658351659775,
-0.00758494483307004,
-0.09722950309515,
-0.16505105793476105,
-0.06483004242181778,
-0.009407497942447662,
0.19941851496696472,
-0.19629354774951935,
-0.0012017360422760248,
0.0005127399927005172,
0.15065701305866241,
0.030962279066443443,
-0.06596136093139648,
-0.03356649726629257,
0.048929497599601746,
0.00749995606020093,
-0.0999567061662674,
0.037714142352342606,
-0.009348754771053791,
-0.07602265477180481,
-0.052450697869062424,
-0.1550413966178894,
0.02350756525993347,
0.08423440903425217,
0.022484872490167618,
-0.10404282808303833,
0.03927011042833328,
-0.05124745890498161,
-0.03946678712964058,
-0.08351165056228638,
-0.004950729664415121,
0.22048209607601166,
0.06746577471494675,
0.12296945601701736,
-0.02669038437306881,
-0.06425940990447998,
-0.0010101611260324717,
0.01138728205114603,
0.016892168670892715,
0.08951858431100845,
0.06173516437411308,
-0.11822515726089478,
0.04807444289326668,
0.08114679157733917,
-0.016521329060196877,
0.10522082448005676,
-0.023452846333384514,
-0.08196789026260376,
-0.04313381761312485,
-0.0036492731887847185,
0.018979040905833244,
0.09847576171159744,
-0.0622715950012207,
-0.0014582087751477957,
0.03256481885910034,
0.019344044849276543,
-0.0013479026965796947,
-0.17491434514522552,
0.004234886262565851,
0.040041569620370865,
-0.03603748977184296,
-0.00486872298642993,
-0.02953595481812954,
0.031165262684226036,
0.07268676906824112,
0.031953733414411545,
-0.03870736435055733,
-0.007334789261221886,
-0.03184885159134865,
-0.07729590684175491,
0.16383370757102966,
-0.08984322100877762,
-0.15106996893882751,
-0.1128704622387886,
-0.0035222829319536686,
-0.06292694061994553,
-0.013999111019074917,
0.03408139944076538,
-0.07939563691616058,
-0.0843958780169487,
-0.0862988755106926,
0.00502048572525382,
0.005712450481951237,
-0.01575484871864319,
0.07108035683631897,
-0.006689680740237236,
0.112139992415905,
-0.12932850420475006,
-0.002927336608991027,
-0.02051830105483532,
-0.05369634926319122,
0.007229841314256191,
0.09114264696836472,
0.08124784380197525,
0.11924607306718826,
0.00408561434596777,
0.02538156695663929,
-0.023487649857997894,
0.25965985655784607,
-0.09257251024246216,
0.011635608039796352,
0.13825488090515137,
0.005758468993008137,
0.06905139982700348,
0.11913147568702698,
0.036608535796403885,
-0.11583001911640167,
0.031740106642246246,
0.09177454560995102,
-0.012779375538229942,
-0.2367643415927887,
-0.03109116293489933,
-0.02585117518901825,
-0.06603433191776276,
0.10842163860797882,
0.022037677466869354,
-0.008044866845011711,
0.019152916967868805,
0.003943006042391062,
0.02860507369041443,
0.009924224577844143,
0.07049610465765,
0.09538928419351578,
0.06755688786506653,
0.11748266965150833,
-0.01008102111518383,
-0.0004492226871661842,
0.04921054467558861,
-0.02396659553050995,
0.20596392452716827,
-0.01439525093883276,
0.12376895546913147,
0.035508979111909866,
0.1320355385541916,
-0.03675932064652443,
0.035421498119831085,
0.030654149129986763,
-0.024393634870648384,
0.013639233075082302,
-0.05735986679792404,
-0.037071917206048965,
0.03672653064131737,
-0.016580386087298393,
-0.0037600742653012276,
-0.06368386745452881,
0.0490298792719841,
0.04276975989341736,
0.30032384395599365,
0.06773700565099716,
-0.2838314175605774,
-0.07115092873573303,
-0.0013736599357798696,
-0.03984224796295166,
-0.05543148145079613,
-0.020717306062579155,
0.09166223555803299,
-0.13407427072525024,
0.07591645419597626,
-0.06506796926259995,
0.09540555626153946,
-0.04078149050474167,
0.026116279885172844,
0.08148866146802902,
0.09425761550664902,
0.009559150785207748,
0.03889855742454529,
-0.18740171194076538,
0.21761813759803772,
0.018299525603652,
0.10843632370233536,
-0.04615698754787445,
0.041683379560709,
0.008182809688150883,
0.007691723760217428,
0.09144968539476395,
0.0041596051305532455,
-0.0562497153878212,
-0.1959637850522995,
-0.057943955063819885,
0.0010667823953554034,
0.1232144758105278,
-0.06533419340848923,
0.10037438571453094,
-0.045168641954660416,
-0.0151418661698699,
0.026381704956293106,
0.01629837229847908,
-0.12736232578754425,
-0.10840428620576859,
0.030613210052251816,
0.014258827082812786,
0.010634271427989006,
-0.07034986466169357,
-0.07549890875816345,
-0.08647692948579788,
0.1819992959499359,
-0.014519525691866875,
-0.025673238560557365,
-0.14648011326789856,
0.06556930392980576,
0.1072741150856018,
-0.06690610200166702,
0.019312933087348938,
0.03670237958431244,
0.1020195484161377,
0.02204502746462822,
-0.06451871246099472,
0.08284972608089447,
-0.06599340587854385,
-0.17098885774612427,
-0.06948599964380264,
0.12660710513591766,
0.05428523197770119,
0.041999734938144684,
-0.0011052138870581985,
0.042886883020401,
0.012987119145691395,
-0.08076594024896622,
0.03503702953457832,
0.02831445075571537,
0.047963306307792664,
0.025511452928185463,
-0.005375365726649761,
0.012657961808145046,
-0.06596846133470535,
-0.021259943023324013,
0.08352288603782654,
0.2588767409324646,
-0.07926920801401138,
0.06801657378673553,
0.05206409469246864,
-0.05450133606791496,
-0.14349471032619476,
0.04531487077474594,
0.12119514495134354,
0.011725795455276966,
0.09985949844121933,
-0.15418171882629395,
0.11914362013339996,
0.12275082617998123,
-0.014221311546862125,
0.014827491715550423,
-0.30914971232414246,
-0.15760137140750885,
0.047034233808517456,
0.10539700835943222,
0.027682378888130188,
-0.12502306699752808,
-0.038798458874225616,
-0.04559813067317009,
-0.2062748670578003,
0.07791052013635635,
-0.08338829129934311,
0.1048448458313942,
0.01641703024506569,
0.055478621274232864,
0.023673903197050095,
-0.03494015708565712,
0.14823973178863525,
0.058424901217222214,
0.0850910171866417,
-0.04583413898944855,
0.048043422400951385,
0.07513907551765442,
-0.07232017070055008,
0.022082170471549034,
-0.026906181126832962,
0.04000061750411987,
-0.13365104794502258,
-0.01608642004430294,
-0.0710001140832901,
0.03624671325087547,
-0.04975571110844612,
-0.05193120613694191,
-0.018770629540085793,
0.04348384961485863,
0.0755205750465393,
-0.030216755345463753,
0.04361338168382645,
-0.00847178976982832,
0.08498432487249374,
0.1535896211862564,
0.04304378107190132,
-0.0232686884701252,
-0.1516987830400467,
-0.0013788884971290827,
0.005633092951029539,
0.03817128390073776,
-0.08097556233406067,
0.048181962221860886,
0.13454684615135193,
0.04148101434111595,
0.14555811882019043,
0.024885063990950584,
-0.056135740131139755,
0.008781244046986103,
0.02770131640136242,
-0.11880012601613998,
-0.11439863592386246,
0.022833650931715965,
-0.0661502480506897,
-0.08940201252698898,
-0.01948961615562439,
0.1563778519630432,
-0.011606162413954735,
-0.0019805149640887976,
-0.004449554719030857,
0.01609647087752819,
-0.027679216116666794,
0.2110012173652649,
-0.01570332981646061,
0.08429139852523804,
-0.08842156082391739,
0.11172790080308914,
0.09150317311286926,
-0.09467513859272003,
0.03155463561415672,
0.08032277226448059,
-0.09206299483776093,
-0.010117759928107262,
0.020370353013277054,
0.1265193521976471,
-0.05096959322690964,
-0.01903352700173855,
-0.09753476083278656,
-0.10408344864845276,
0.05886588618159294,
0.10257162898778915,
0.022559767588973045,
-0.0077002085745334625,
-0.035171959549188614,
0.005190616007894278,
-0.0999758318066597,
0.05995501950383186,
0.07306180149316788,
0.047889865934848785,
-0.11252888292074203,
0.16251809895038605,
0.01391500886529684,
0.008329044096171856,
0.0031199546065181494,
0.01372502464801073,
-0.09079273045063019,
0.0004801213217433542,
-0.14961868524551392,
-0.019393227994441986,
0.001450608135201037,
-0.003276197472587228,
-0.015872102230787277,
-0.03220471367239952,
-0.039415143430233,
0.03452686965465546,
-0.08647488802671432,
-0.08165109157562256,
0.009523328393697739,
0.03962737321853638,
-0.16209113597869873,
-0.016481975093483925,
0.02765698730945587,
-0.10955099016427994,
0.07334879040718079,
0.05652479827404022,
0.02149316668510437,
0.021762892603874207,
-0.08229734748601913,
-0.0313427671790123,
0.012178207747638226,
0.02265780232846737,
0.07610795646905899,
-0.10903993993997574,
-0.013692053034901619,
-0.04250537231564522,
0.0398159958422184,
0.019391357898712158,
0.037887703627347946,
-0.11896158009767532,
0.000580513384193182,
-0.035718243569135666,
-0.055370766669511795,
-0.05798213556408882,
0.02526269294321537,
0.11374994367361069,
0.04228140041232109,
0.16672061383724213,
-0.07416819781064987,
0.0436113104224205,
-0.22042714059352875,
-0.03730415180325508,
0.008635335601866245,
-0.0289055947214365,
-0.03837426379323006,
-0.06242875009775162,
0.10130669921636581,
-0.055370181798934937,
0.07390744984149933,
0.009861507453024387,
0.149628683924675,
0.05570117384195328,
-0.06767991185188293,
-0.07945141941308975,
0.018487626686692238,
0.09427867829799652,
0.054873351007699966,
-0.006038461811840534,
0.08741031587123871,
-0.014715148136019707,
0.06104760989546776,
0.08363853394985199,
0.2118854820728302,
0.1708439737558365,
0.023746276274323463,
0.06674568355083466,
0.014272374100983143,
-0.12914574146270752,
-0.19526240229606628,
0.08358285576105118,
-0.09472045302391052,
0.14023186266422272,
-0.06646163761615753,
0.13646696507930756,
0.04775303229689598,
-0.1947341412305832,
0.06654804199934006,
-0.07943463325500488,
-0.08990028500556946,
-0.09397375583648682,
-0.029582876712083817,
-0.06791093945503235,
-0.1078198254108429,
0.028735240921378136,
-0.08517688512802124,
0.08781582117080688,
0.10954412072896957,
0.015737980604171753,
0.01821509748697281,
0.13253484666347504,
-0.06628761440515518,
-0.006360669154673815,
0.05936931073665619,
0.015676524490118027,
-0.018112991005182266,
-0.04172259569168091,
-0.0560583658516407,
0.024511029943823814,
0.017832575365900993,
0.07815954089164734,
-0.031039629131555557,
-0.019978661090135574,
0.02383165992796421,
0.006493923719972372,
-0.08795225620269775,
0.03168032690882683,
0.013736015185713768,
0.026058772578835487,
0.048725325614213943,
0.059449367225170135,
0.036904897540807724,
-0.04240456596016884,
0.29209741950035095,
-0.08530546724796295,
-0.08512192964553833,
-0.13771124184131622,
0.20289772748947144,
0.05958571657538414,
0.017399301752448082,
0.043505217880010605,
-0.11486434191465378,
-0.020765818655490875,
0.1584147959947586,
0.09116818755865097,
-0.08861992508172989,
-0.013533396646380424,
-0.01962587609887123,
-0.010098484344780445,
-0.04193343594670296,
0.09575313329696655,
0.0868869423866272,
0.03483211621642113,
-0.05315162613987923,
0.02709222212433815,
-0.003955142572522163,
-0.06263622641563416,
-0.04581025615334511,
0.08772283047437668,
0.0009562861523590982,
0.030353611335158348,
-0.034532539546489716,
0.0645894706249237,
0.024001093581318855,
-0.26260557770729065,
0.08398915082216263,
-0.1947316974401474,
-0.1795394867658615,
-0.006320446729660034,
0.08195141702890396,
-0.017654260620474815,
0.08549971133470535,
0.009154360741376877,
-0.01853928156197071,
0.14195823669433594,
-0.012671040371060371,
-0.017819665372371674,
-0.1187528669834137,
0.07139096409082413,
-0.14385946094989777,
0.21219748258590698,
-0.013888795860111713,
0.04435410723090172,
0.10785117000341415,
0.03721955046057701,
-0.11164578795433044,
0.05831318348646164,
0.0666063129901886,
-0.11192932724952698,
0.027903234586119652,
0.1659667193889618,
-0.0489577054977417,
0.11663345247507095,
0.04959728196263313,
-0.15639685094356537,
0.007289279252290726,
-0.004553421400487423,
0.003656975692138076,
-0.05653156340122223,
-0.0388365276157856,
-0.04785134643316269,
0.14364588260650635,
0.21742327511310577,
-0.012401456944644451,
0.03617708012461662,
-0.07194942981004715,
0.014807645231485367,
0.022301053628325462,
0.10309932380914688,
-0.05501493811607361,
-0.2041659951210022,
0.0623394139111042,
0.0205573458224535,
0.02782152220606804,
-0.20210760831832886,
-0.12263984978199005,
0.05891580134630203,
-0.060964975506067276,
-0.026459386572241783,
0.12214569002389908,
0.051049865782260895,
0.02573011815547943,
-0.03602307289838791,
-0.14126428961753845,
-0.022603988647460938,
0.1666017472743988,
-0.1317993700504303,
-0.05802304297685623
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# wav2vec2-xls-r-timit-tokenizer-base
This model is a fine-tuned version of [facebook/wav2vec2-base](https://huggingface.co/facebook/wav2vec2-base) on the None dataset.
It achieves the following results on the evaluation set:
- Loss: 3.0828
- Wer: 1.0
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 0.0003
- train_batch_size: 16
- eval_batch_size: 8
- seed: 42
- gradient_accumulation_steps: 2
- total_train_batch_size: 32
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- lr_scheduler_warmup_steps: 500
- num_epochs: 30
- mixed_precision_training: Native AMP
### Training results
| Training Loss | Epoch | Step | Validation Loss | Wer |
|:-------------:|:-----:|:----:|:---------------:|:---:|
| 3.3134 | 4.03 | 500 | 3.0814 | 1.0 |
| 2.9668 | 8.06 | 1000 | 3.0437 | 1.0 |
| 2.9604 | 12.1 | 1500 | 3.0337 | 1.0 |
| 2.9619 | 16.13 | 2000 | 3.0487 | 1.0 |
| 2.9588 | 20.16 | 2500 | 3.0859 | 1.0 |
| 2.957 | 24.19 | 3000 | 3.0921 | 1.0 |
| 2.9555 | 28.22 | 3500 | 3.0828 | 1.0 |
### Framework versions
- Transformers 4.11.3
- Pytorch 1.10.0+cu111
- Datasets 1.13.3
- Tokenizers 0.10.3
|
{"license": "apache-2.0", "tags": ["generated_from_trainer"], "model-index": [{"name": "wav2vec2-xls-r-timit-tokenizer-base", "results": []}]}
|
automatic-speech-recognition
|
hrdipto/wav2vec2-xls-r-timit-tokenizer-base
|
[
"transformers",
"pytorch",
"tensorboard",
"wav2vec2",
"automatic-speech-recognition",
"generated_from_trainer",
"license:apache-2.0",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tensorboard #wav2vec2 #automatic-speech-recognition #generated_from_trainer #license-apache-2.0 #endpoints_compatible #region-us
|
wav2vec2-xls-r-timit-tokenizer-base
===================================
This model is a fine-tuned version of facebook/wav2vec2-base on the None dataset.
It achieves the following results on the evaluation set:
* Loss: 3.0828
* Wer: 1.0
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 0.0003
* train\_batch\_size: 16
* eval\_batch\_size: 8
* seed: 42
* gradient\_accumulation\_steps: 2
* total\_train\_batch\_size: 32
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* lr\_scheduler\_warmup\_steps: 500
* num\_epochs: 30
* mixed\_precision\_training: Native AMP
### Training results
### Framework versions
* Transformers 4.11.3
* Pytorch 1.10.0+cu111
* Datasets 1.13.3
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0003\n* train\\_batch\\_size: 16\n* eval\\_batch\\_size: 8\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 32\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_steps: 500\n* num\\_epochs: 30\n* mixed\\_precision\\_training: Native AMP",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.10.0+cu111\n* Datasets 1.13.3\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #wav2vec2 #automatic-speech-recognition #generated_from_trainer #license-apache-2.0 #endpoints_compatible #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0003\n* train\\_batch\\_size: 16\n* eval\\_batch\\_size: 8\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 32\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_steps: 500\n* num\\_epochs: 30\n* mixed\\_precision\\_training: Native AMP",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.10.0+cu111\n* Datasets 1.13.3\n* Tokenizers 0.10.3"
] |
[
56,
158,
4,
33
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #wav2vec2 #automatic-speech-recognition #generated_from_trainer #license-apache-2.0 #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0003\n* train\\_batch\\_size: 16\n* eval\\_batch\\_size: 8\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 32\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_steps: 500\n* num\\_epochs: 30\n* mixed\\_precision\\_training: Native AMP### Training results### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.10.0+cu111\n* Datasets 1.13.3\n* Tokenizers 0.10.3"
] |
[
-0.13006892800331116,
0.07729004323482513,
-0.001969793578609824,
0.05954808369278908,
0.12145804613828659,
0.0024069463834166527,
0.12439573556184769,
0.1315283477306366,
-0.10195445269346237,
0.0698397308588028,
0.1194976270198822,
0.11569254845380783,
0.044535525143146515,
0.1060384213924408,
-0.031297050416469574,
-0.3123490512371063,
0.012429763562977314,
0.030680187046527863,
-0.15023821592330933,
0.12603455781936646,
0.10409147292375565,
-0.11244010180234909,
0.05032738298177719,
0.055761564522981644,
-0.15202268958091736,
0.003142560599371791,
-0.017468439415097237,
-0.0860467180609703,
0.11887132376432419,
0.03468502312898636,
0.09653358906507492,
0.02772720530629158,
0.08311745524406433,
-0.21305975317955017,
0.009685827419161797,
0.05178215727210045,
0.03413263335824013,
0.08119498193264008,
0.07985289394855499,
-0.012528839521110058,
0.15310916304588318,
-0.06002139672636986,
0.08129581809043884,
0.057284269481897354,
-0.1089617908000946,
-0.32914063334465027,
-0.08828354626893997,
0.07321296632289886,
0.10145972669124603,
0.08618052303791046,
-0.01725301705300808,
0.10997362434864044,
-0.0516035296022892,
0.0906982347369194,
0.24354833364486694,
-0.2848224937915802,
-0.08458969742059708,
-0.03160274028778076,
0.057795729488134384,
0.02898668497800827,
-0.11262668669223785,
-0.010413186624646187,
0.03708086162805557,
0.03576362133026123,
0.11322009563446045,
0.00941045768558979,
-0.00407549599185586,
0.017765244469046593,
-0.14881500601768494,
-0.06368044018745422,
0.14077574014663696,
0.0750119760632515,
-0.05044467747211456,
-0.09949962049722672,
-0.029119648039340973,
-0.21286410093307495,
-0.047425493597984314,
-0.006743223872035742,
0.03075454570353031,
-0.05704685300588608,
-0.1413401961326599,
-0.005784714128822088,
-0.08562935143709183,
-0.10388270765542984,
0.004761265590786934,
0.21778398752212524,
0.04568227007985115,
-0.0022157097700983286,
-0.01907745935022831,
0.11413607746362686,
0.05374573543667793,
-0.15427115559577942,
-0.028935501351952553,
0.039934441447257996,
-0.07196107506752014,
-0.02515016868710518,
-0.053999677300453186,
-0.02284380793571472,
-0.005758027546107769,
0.16873782873153687,
-0.03618548810482025,
0.06949369609355927,
0.03545861318707466,
0.027802936732769012,
-0.10922600328922272,
0.21750614047050476,
-0.05474388971924782,
-0.008941118605434895,
-0.03811006620526314,
0.0950222983956337,
0.011027658358216286,
-0.015318757854402065,
-0.07891546934843063,
0.028847146779298782,
0.10361789911985397,
0.03904692456126213,
-0.03350231796503067,
0.038563478738069534,
-0.04792650789022446,
-0.02709932252764702,
0.019829297438263893,
-0.0867348238825798,
0.02327934093773365,
0.019067414104938507,
-0.09780359268188477,
-0.00866840872913599,
0.012430645525455475,
0.02557417005300522,
0.005003004334867001,
0.10845928639173508,
-0.08226852864027023,
-0.0011705171782523394,
-0.08096349239349365,
-0.09791962057352066,
0.02524363435804844,
-0.038861069828271866,
0.009102833457291126,
-0.08608370274305344,
-0.12447575479745865,
-0.01767042465507984,
0.05085970461368561,
-0.03483721613883972,
-0.06541859358549118,
-0.04724497348070145,
-0.08127444237470627,
0.04989399388432503,
-0.020674744620919228,
0.14626391232013702,
-0.05589993670582771,
0.10849699378013611,
0.07969580590724945,
0.06421709060668945,
0.028113503009080887,
0.050791963934898376,
-0.060468193143606186,
0.036927543580532074,
-0.16577620804309845,
0.07187598198652267,
-0.08335911482572556,
0.07352966070175171,
-0.1299343854188919,
-0.12917135655879974,
0.005488288588821888,
-0.0014029338490217924,
0.08767905086278915,
0.09761403501033783,
-0.14841538667678833,
-0.11192837357521057,
0.156582772731781,
-0.077564537525177,
-0.1388697773218155,
0.12246164679527283,
-0.01649344153702259,
0.0027931334916502237,
0.04731963574886322,
0.1429062932729721,
0.08958175778388977,
-0.09852148592472076,
-0.005171321798115969,
-0.04527593031525612,
0.10073765367269516,
-0.007025726139545441,
0.11102380603551865,
-0.032373178750276566,
0.01874067820608616,
0.009952468797564507,
-0.05407185107469559,
0.05067146196961403,
-0.10760031640529633,
-0.09761928766965866,
-0.037531349807977676,
-0.09769608825445175,
0.03221775218844414,
0.05661816895008087,
0.07056296616792679,
-0.10169161111116409,
-0.13753995299339294,
0.043793387711048126,
0.11366024613380432,
-0.0903615728020668,
0.031162740662693977,
-0.10397566109895706,
0.05489426851272583,
-0.03868880122900009,
-0.008346573449671268,
-0.17487038671970367,
-0.01716676913201809,
0.018851036205887794,
-0.05868754908442497,
0.024716755375266075,
-0.030509769916534424,
0.09325331449508667,
0.05901740491390228,
-0.048247773200273514,
-0.06568437069654465,
-0.08259612321853638,
-0.01309414766728878,
-0.0788160115480423,
-0.20542213320732117,
-0.1019454151391983,
-0.021109407767653465,
0.1571868509054184,
-0.19981326162815094,
0.0278315432369709,
0.03388996049761772,
0.12336376309394836,
0.0348738431930542,
-0.04587772116065025,
-0.020962968468666077,
0.0719718411564827,
-0.028094671666622162,
-0.0665077269077301,
0.034262143075466156,
0.006360148079693317,
-0.12926851212978363,
-0.00900106318295002,
-0.10888885706663132,
0.1427089273929596,
0.11986333131790161,
-0.020534485578536987,
-0.07236147671937943,
-0.01766807585954666,
-0.07714740186929703,
-0.04611492529511452,
-0.007132268976420164,
0.003938495181500912,
0.17299498617649078,
0.025814926251769066,
0.13498516380786896,
-0.080159991979599,
-0.064271479845047,
0.0395641028881073,
0.00015374747454188764,
-0.01701745204627514,
0.11950244754552841,
0.04435776174068451,
-0.0729757621884346,
0.10408109426498413,
0.09646826982498169,
-0.09143858402967453,
0.14375808835029602,
-0.07190344482660294,
-0.09719441086053848,
-0.02902473509311676,
0.009818628430366516,
0.04537349194288254,
0.11616425216197968,
-0.14377069473266602,
-0.0219808891415596,
0.024218810722231865,
0.005397321656346321,
0.018940394744277,
-0.21441590785980225,
-0.009544947184622288,
0.05394911766052246,
-0.0607762411236763,
-0.04768545180559158,
0.0002486353041604161,
-0.012664380483329296,
0.08052386343479156,
0.017382271587848663,
-0.059930093586444855,
0.004945396911352873,
0.0006753334891982377,
-0.06805195659399033,
0.19801685214042664,
-0.07493733614683151,
-0.13525977730751038,
-0.16101619601249695,
-0.03481104224920273,
-0.056479133665561676,
-0.0014351303689181805,
0.051342807710170746,
-0.10740278661251068,
-0.029631169512867928,
-0.05255455896258354,
0.0483420193195343,
-0.04776541516184807,
0.04660490155220032,
0.04140453413128853,
0.0055513083934783936,
0.09253919869661331,
-0.12077653408050537,
0.0188759732991457,
-0.03150045499205589,
-0.04352874308824539,
0.017816219478845596,
0.03975585103034973,
0.1140240803360939,
0.1578490287065506,
0.018570199608802795,
0.038788750767707825,
-0.02843562513589859,
0.19173108041286469,
-0.10217786580324173,
-0.051049716770648956,
0.1313893347978592,
0.008626271970570087,
0.0408991239964962,
0.0815669521689415,
0.06906970590353012,
-0.09063766151666641,
0.016533443704247475,
0.03945718705654144,
-0.02800559811294079,
-0.2189369648694992,
-0.017876651138067245,
-0.052900198847055435,
-0.017701489850878716,
0.12100754678249359,
0.03348584100604057,
0.05050405487418175,
0.045076675713062286,
-0.00931475218385458,
0.018663670867681503,
-0.013468533754348755,
0.08701901882886887,
0.09059009701013565,
0.06054220721125603,
0.13160979747772217,
-0.03602054342627525,
-0.0520872138440609,
0.02049659937620163,
-0.009871664457023144,
0.2274779975414276,
0.009152872487902641,
0.18347378075122833,
0.054517172276973724,
0.1540527045726776,
0.014814993366599083,
0.08557996898889542,
0.015784382820129395,
-0.042316555976867676,
0.02227150835096836,
-0.05825990065932274,
-0.031283147633075714,
0.04411304369568825,
0.055663689970970154,
0.0626799538731575,
-0.13481371104717255,
-0.020825672894716263,
0.02359205111861229,
0.3599518835544586,
0.05636140704154968,
-0.34419891238212585,
-0.12236525118350983,
0.0011942728888243437,
-0.07470420002937317,
-0.03288089856505394,
0.02043106034398079,
0.08510317653417587,
-0.08839494735002518,
0.0736437663435936,
-0.07903195917606354,
0.09752631932497025,
-0.043523672968149185,
0.007935469038784504,
0.08368922024965286,
0.0824279859662056,
-0.0017159385606646538,
0.052998293191194534,
-0.24598541855812073,
0.2891269624233246,
-0.009256646037101746,
0.09736278653144836,
-0.0467572882771492,
0.028340861201286316,
0.036458682268857956,
0.0011855855118483305,
0.052690379321575165,
-0.027701226994395256,
-0.07308001816272736,
-0.1986115574836731,
-0.07089867442846298,
0.02266407571732998,
0.12466077506542206,
-0.07461219280958176,
0.13266868889331818,
-0.02375919558107853,
-0.01849285140633583,
0.06317523866891861,
-0.060618750751018524,
-0.08706145733594894,
-0.09816139936447144,
0.018336007371544838,
0.025774160400032997,
0.06464212387800217,
-0.10864750295877457,
-0.1198664978146553,
-0.05953822284936905,
0.15151503682136536,
-0.07578399032354355,
-0.022709708660840988,
-0.13160745799541473,
0.06802573055028915,
0.15663805603981018,
-0.06687887758016586,
0.05633344501256943,
0.012358131818473339,
0.13568297028541565,
0.025627681985497475,
-0.03280585631728172,
0.09622389078140259,
-0.08170334994792938,
-0.21222342550754547,
-0.03201885521411896,
0.14787186682224274,
0.024425681680440903,
0.057366810739040375,
-0.023358182981610298,
0.03453398123383522,
-0.03278414160013199,
-0.08652547746896744,
0.05820857360959053,
-0.021961629390716553,
0.023969797417521477,
0.015295246616005898,
0.002069956623017788,
0.03897500038146973,
-0.07355381548404694,
-0.03747929632663727,
0.14050959050655365,
0.2905829846858978,
-0.08323709666728973,
-0.006829030811786652,
0.04071608930826187,
-0.01999621093273163,
-0.13056662678718567,
0.018167484551668167,
0.11816268414258957,
0.021023742854595184,
-0.00902947410941124,
-0.20892596244812012,
0.05300996080040932,
0.0794893354177475,
-0.03198099881410599,
0.10044839233160019,
-0.31032055616378784,
-0.1465235948562622,
0.12475177645683289,
0.11227548122406006,
0.0014272555708885193,
-0.1588851511478424,
-0.06375232338905334,
-0.019001184031367302,
-0.12818369269371033,
0.0896773487329483,
-0.03821129351854324,
0.1216282919049263,
-0.017941927537322044,
0.06762666255235672,
0.012437881901860237,
-0.053201451897621155,
0.1543060541152954,
-0.011704503558576107,
0.06345760822296143,
-0.004202710464596748,
0.039142508059740067,
0.0484519861638546,
-0.06040389463305473,
0.014936879277229309,
-0.08930713683366776,
0.02401319146156311,
-0.11815331131219864,
-0.03860253840684891,
-0.0914759561419487,
0.04048921912908554,
-0.03252727910876274,
-0.03744930773973465,
-0.020873529836535454,
0.02098826877772808,
0.021582981571555138,
-0.010376826860010624,
0.17051957547664642,
-0.021377872675657272,
0.16764992475509644,
0.11073220521211624,
0.09313507378101349,
-0.021396372467279434,
-0.10201609134674072,
-0.0093442527577281,
-0.018264297395944595,
0.07338321954011917,
-0.1382240504026413,
0.015589314512908459,
0.12989415228366852,
0.0596783384680748,
0.1300256997346878,
0.07426523417234421,
-0.06474962830543518,
0.027448566630482674,
0.07590167224407196,
-0.09445245563983917,
-0.12419097125530243,
-0.03002060204744339,
0.025104263797402382,
-0.13946689665317535,
0.06435635685920715,
0.09844792634248734,
-0.06154536083340645,
-0.009460304863750935,
0.007055831607431173,
-0.005226208362728357,
-0.057791534811258316,
0.2204383760690689,
0.04570847004652023,
0.08748295158147812,
-0.10602555423974991,
0.07307598739862442,
0.03475995734333992,
-0.13782329857349396,
0.013953858986496925,
0.06885261088609695,
-0.04605920612812042,
-0.011976619251072407,
0.009344340302050114,
0.0893707126379013,
-0.05268368870019913,
-0.0620669387280941,
-0.15069016814231873,
-0.13991618156433105,
0.08844584226608276,
0.1333930641412735,
0.05546136572957039,
0.03037344664335251,
-0.054962530732154846,
0.059285968542099,
-0.11770603060722351,
0.08495376259088516,
0.07358075678348541,
0.08168965578079224,
-0.15779179334640503,
0.1573401391506195,
0.011959518305957317,
0.02601158805191517,
0.001082317321561277,
-0.008420114405453205,
-0.09338659048080444,
0.024775173515081406,
-0.1342976689338684,
-0.04762788861989975,
-0.05142791569232941,
0.0026310926768928766,
0.008298384957015514,
-0.06451267004013062,
-0.08075074851512909,
0.02992560714483261,
-0.12527404725551605,
-0.046584248542785645,
0.014511500485241413,
0.04546618461608887,
-0.1249312236905098,
-0.010156498290598392,
0.049010079354047775,
-0.12385927140712738,
0.08366966992616653,
0.07194304466247559,
0.023717001080513,
0.05169839784502983,
-0.052177511155605316,
0.010846767574548721,
0.05162842944264412,
-0.010148127563297749,
0.03828534856438637,
-0.13541948795318604,
-0.006875729653984308,
-0.02215857431292534,
0.05571886524558067,
-0.0003027305647265166,
0.05133764073252678,
-0.12957073748111725,
-0.04286310821771622,
-0.016629831865429878,
-0.054744653403759,
-0.06443526595830917,
0.04517325758934021,
0.08894677460193634,
0.03705950081348419,
0.18152235448360443,
-0.07311119139194489,
0.020337140187621117,
-0.22058312594890594,
0.01070133876055479,
-0.026168690994381905,
-0.09796643257141113,
-0.07796621322631836,
-0.028882959857583046,
0.07419703155755997,
-0.0670676901936531,
0.09112503379583359,
-0.06768546998500824,
0.0636286735534668,
0.0439075231552124,
-0.05761638656258583,
0.026958636939525604,
0.04574819281697273,
0.2454788237810135,
0.058024000376462936,
-0.013910389505326748,
0.08180250227451324,
0.02215707115828991,
0.07136616110801697,
0.10840915888547897,
0.16571709513664246,
0.14713484048843384,
-0.008035112172365189,
0.11230288445949554,
0.06682708859443665,
-0.08181402832269669,
-0.16843606531620026,
0.07240087538957596,
-0.03776944428682327,
0.12818492949008942,
-0.002942534862086177,
0.20731917023658752,
0.12225812673568726,
-0.17639707028865814,
0.041360996663570404,
-0.027626007795333862,
-0.07751331478357315,
-0.09853797405958176,
-0.03407168388366699,
-0.06952609866857529,
-0.1879522055387497,
0.02386309951543808,
-0.10019493848085403,
0.04569961130619049,
0.0499090813100338,
0.026428621262311935,
0.012381397187709808,
0.1540805995464325,
0.04204383119940758,
0.016138488426804543,
0.09166231006383896,
0.0022799011785537004,
-0.03544513136148453,
-0.05338474363088608,
-0.0982859805226326,
0.03087848611176014,
-0.03441892936825752,
0.05066896602511406,
-0.059521015733480453,
-0.1259525865316391,
0.06421322375535965,
0.016159676015377045,
-0.11461999267339706,
0.024982992559671402,
0.005470600910484791,
0.08190874010324478,
0.035729724913835526,
0.017254065722227097,
-0.0004721263831015676,
-0.019553517922759056,
0.2379143238067627,
-0.11203157901763916,
-0.06405151635408401,
-0.1310209482908249,
0.25977739691734314,
0.01467808336019516,
-0.01971849799156189,
0.033267561346292496,
-0.07295237481594086,
-0.028249133378267288,
0.16956594586372375,
0.13393591344356537,
-0.010709038935601711,
-0.023080574348568916,
0.00258997012861073,
-0.01699604094028473,
-0.05568348616361618,
0.07656671851873398,
0.11821909248828888,
0.07214906066656113,
-0.06670068204402924,
-0.03915267810225487,
-0.037576667964458466,
-0.051187459379434586,
-0.008910520933568478,
0.09710872173309326,
0.020655706524848938,
-0.020823977887630463,
-0.0352361686527729,
0.09211451560258865,
-0.06177157908678055,
-0.10031464695930481,
0.06639812141656876,
-0.17139063775539398,
-0.17836950719356537,
-0.029927833005785942,
0.06768326461315155,
0.010723229497671127,
0.07085436582565308,
0.005936807487159967,
-0.033798664808273315,
0.08245527744293213,
0.0011328092077746987,
-0.06616675108671188,
-0.12274333089590073,
0.11334734410047531,
-0.07690369337797165,
0.19100430607795715,
-0.05888223275542259,
0.041437018662691116,
0.12965980172157288,
0.06472662836313248,
-0.08167003095149994,
0.035294052213430405,
0.06294091045856476,
-0.1363840103149414,
0.03777487203478813,
0.1757582426071167,
-0.031191831454634666,
0.11297404766082764,
0.023374641314148903,
-0.1424483358860016,
0.012658156454563141,
-0.08557455986738205,
-0.03353589400649071,
-0.06192026287317276,
-0.031164322048425674,
-0.04112519696354866,
0.12365196645259857,
0.21207210421562195,
-0.06261181086301804,
-0.0108278077095747,
-0.05374167114496231,
0.03492928668856621,
0.07297107577323914,
0.09020373970270157,
-0.046948060393333435,
-0.29203367233276367,
0.00806252658367157,
0.018276071175932884,
-0.01562916673719883,
-0.28167691826820374,
-0.09942390769720078,
0.04111960530281067,
-0.06293390691280365,
-0.037009112536907196,
0.08899466693401337,
0.09251882135868073,
0.049073852598667145,
-0.05244305729866028,
-0.060060326009988785,
-0.06361690908670425,
0.1796216070652008,
-0.18383800983428955,
-0.0629945918917656
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# wav2vec2-xls-r-timit-tokenizer
This model is a fine-tuned version of [facebook/wav2vec2-xls-r-300m](https://huggingface.co/facebook/wav2vec2-xls-r-300m) on the None dataset.
It achieves the following results on the evaluation set:
- Loss: 0.4285
- Wer: 0.3662
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 0.0003
- train_batch_size: 16
- eval_batch_size: 8
- seed: 42
- gradient_accumulation_steps: 2
- total_train_batch_size: 32
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- lr_scheduler_warmup_steps: 500
- num_epochs: 30
- mixed_precision_training: Native AMP
### Training results
| Training Loss | Epoch | Step | Validation Loss | Wer |
|:-------------:|:-----:|:----:|:---------------:|:------:|
| 2.1571 | 4.03 | 500 | 0.5235 | 0.5098 |
| 0.2001 | 8.06 | 1000 | 0.4172 | 0.4375 |
| 0.0968 | 12.1 | 1500 | 0.4562 | 0.4016 |
| 0.0607 | 16.13 | 2000 | 0.4640 | 0.4050 |
| 0.0409 | 20.16 | 2500 | 0.4688 | 0.3914 |
| 0.0273 | 24.19 | 3000 | 0.4414 | 0.3763 |
| 0.0181 | 28.22 | 3500 | 0.4285 | 0.3662 |
### Framework versions
- Transformers 4.11.3
- Pytorch 1.10.0+cu111
- Datasets 1.13.3
- Tokenizers 0.10.3
|
{"license": "apache-2.0", "tags": ["generated_from_trainer"], "model-index": [{"name": "wav2vec2-xls-r-timit-tokenizer", "results": []}]}
|
automatic-speech-recognition
|
hrdipto/wav2vec2-xls-r-timit-tokenizer
|
[
"transformers",
"pytorch",
"tensorboard",
"wav2vec2",
"automatic-speech-recognition",
"generated_from_trainer",
"license:apache-2.0",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tensorboard #wav2vec2 #automatic-speech-recognition #generated_from_trainer #license-apache-2.0 #endpoints_compatible #region-us
|
wav2vec2-xls-r-timit-tokenizer
==============================
This model is a fine-tuned version of facebook/wav2vec2-xls-r-300m on the None dataset.
It achieves the following results on the evaluation set:
* Loss: 0.4285
* Wer: 0.3662
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 0.0003
* train\_batch\_size: 16
* eval\_batch\_size: 8
* seed: 42
* gradient\_accumulation\_steps: 2
* total\_train\_batch\_size: 32
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* lr\_scheduler\_warmup\_steps: 500
* num\_epochs: 30
* mixed\_precision\_training: Native AMP
### Training results
### Framework versions
* Transformers 4.11.3
* Pytorch 1.10.0+cu111
* Datasets 1.13.3
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0003\n* train\\_batch\\_size: 16\n* eval\\_batch\\_size: 8\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 32\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_steps: 500\n* num\\_epochs: 30\n* mixed\\_precision\\_training: Native AMP",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.10.0+cu111\n* Datasets 1.13.3\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #wav2vec2 #automatic-speech-recognition #generated_from_trainer #license-apache-2.0 #endpoints_compatible #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0003\n* train\\_batch\\_size: 16\n* eval\\_batch\\_size: 8\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 32\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_steps: 500\n* num\\_epochs: 30\n* mixed\\_precision\\_training: Native AMP",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.10.0+cu111\n* Datasets 1.13.3\n* Tokenizers 0.10.3"
] |
[
56,
158,
4,
33
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #wav2vec2 #automatic-speech-recognition #generated_from_trainer #license-apache-2.0 #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0003\n* train\\_batch\\_size: 16\n* eval\\_batch\\_size: 8\n* seed: 42\n* gradient\\_accumulation\\_steps: 2\n* total\\_train\\_batch\\_size: 32\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_steps: 500\n* num\\_epochs: 30\n* mixed\\_precision\\_training: Native AMP### Training results### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.10.0+cu111\n* Datasets 1.13.3\n* Tokenizers 0.10.3"
] |
[
-0.13006892800331116,
0.07729004323482513,
-0.001969793578609824,
0.05954808369278908,
0.12145804613828659,
0.0024069463834166527,
0.12439573556184769,
0.1315283477306366,
-0.10195445269346237,
0.0698397308588028,
0.1194976270198822,
0.11569254845380783,
0.044535525143146515,
0.1060384213924408,
-0.031297050416469574,
-0.3123490512371063,
0.012429763562977314,
0.030680187046527863,
-0.15023821592330933,
0.12603455781936646,
0.10409147292375565,
-0.11244010180234909,
0.05032738298177719,
0.055761564522981644,
-0.15202268958091736,
0.003142560599371791,
-0.017468439415097237,
-0.0860467180609703,
0.11887132376432419,
0.03468502312898636,
0.09653358906507492,
0.02772720530629158,
0.08311745524406433,
-0.21305975317955017,
0.009685827419161797,
0.05178215727210045,
0.03413263335824013,
0.08119498193264008,
0.07985289394855499,
-0.012528839521110058,
0.15310916304588318,
-0.06002139672636986,
0.08129581809043884,
0.057284269481897354,
-0.1089617908000946,
-0.32914063334465027,
-0.08828354626893997,
0.07321296632289886,
0.10145972669124603,
0.08618052303791046,
-0.01725301705300808,
0.10997362434864044,
-0.0516035296022892,
0.0906982347369194,
0.24354833364486694,
-0.2848224937915802,
-0.08458969742059708,
-0.03160274028778076,
0.057795729488134384,
0.02898668497800827,
-0.11262668669223785,
-0.010413186624646187,
0.03708086162805557,
0.03576362133026123,
0.11322009563446045,
0.00941045768558979,
-0.00407549599185586,
0.017765244469046593,
-0.14881500601768494,
-0.06368044018745422,
0.14077574014663696,
0.0750119760632515,
-0.05044467747211456,
-0.09949962049722672,
-0.029119648039340973,
-0.21286410093307495,
-0.047425493597984314,
-0.006743223872035742,
0.03075454570353031,
-0.05704685300588608,
-0.1413401961326599,
-0.005784714128822088,
-0.08562935143709183,
-0.10388270765542984,
0.004761265590786934,
0.21778398752212524,
0.04568227007985115,
-0.0022157097700983286,
-0.01907745935022831,
0.11413607746362686,
0.05374573543667793,
-0.15427115559577942,
-0.028935501351952553,
0.039934441447257996,
-0.07196107506752014,
-0.02515016868710518,
-0.053999677300453186,
-0.02284380793571472,
-0.005758027546107769,
0.16873782873153687,
-0.03618548810482025,
0.06949369609355927,
0.03545861318707466,
0.027802936732769012,
-0.10922600328922272,
0.21750614047050476,
-0.05474388971924782,
-0.008941118605434895,
-0.03811006620526314,
0.0950222983956337,
0.011027658358216286,
-0.015318757854402065,
-0.07891546934843063,
0.028847146779298782,
0.10361789911985397,
0.03904692456126213,
-0.03350231796503067,
0.038563478738069534,
-0.04792650789022446,
-0.02709932252764702,
0.019829297438263893,
-0.0867348238825798,
0.02327934093773365,
0.019067414104938507,
-0.09780359268188477,
-0.00866840872913599,
0.012430645525455475,
0.02557417005300522,
0.005003004334867001,
0.10845928639173508,
-0.08226852864027023,
-0.0011705171782523394,
-0.08096349239349365,
-0.09791962057352066,
0.02524363435804844,
-0.038861069828271866,
0.009102833457291126,
-0.08608370274305344,
-0.12447575479745865,
-0.01767042465507984,
0.05085970461368561,
-0.03483721613883972,
-0.06541859358549118,
-0.04724497348070145,
-0.08127444237470627,
0.04989399388432503,
-0.020674744620919228,
0.14626391232013702,
-0.05589993670582771,
0.10849699378013611,
0.07969580590724945,
0.06421709060668945,
0.028113503009080887,
0.050791963934898376,
-0.060468193143606186,
0.036927543580532074,
-0.16577620804309845,
0.07187598198652267,
-0.08335911482572556,
0.07352966070175171,
-0.1299343854188919,
-0.12917135655879974,
0.005488288588821888,
-0.0014029338490217924,
0.08767905086278915,
0.09761403501033783,
-0.14841538667678833,
-0.11192837357521057,
0.156582772731781,
-0.077564537525177,
-0.1388697773218155,
0.12246164679527283,
-0.01649344153702259,
0.0027931334916502237,
0.04731963574886322,
0.1429062932729721,
0.08958175778388977,
-0.09852148592472076,
-0.005171321798115969,
-0.04527593031525612,
0.10073765367269516,
-0.007025726139545441,
0.11102380603551865,
-0.032373178750276566,
0.01874067820608616,
0.009952468797564507,
-0.05407185107469559,
0.05067146196961403,
-0.10760031640529633,
-0.09761928766965866,
-0.037531349807977676,
-0.09769608825445175,
0.03221775218844414,
0.05661816895008087,
0.07056296616792679,
-0.10169161111116409,
-0.13753995299339294,
0.043793387711048126,
0.11366024613380432,
-0.0903615728020668,
0.031162740662693977,
-0.10397566109895706,
0.05489426851272583,
-0.03868880122900009,
-0.008346573449671268,
-0.17487038671970367,
-0.01716676913201809,
0.018851036205887794,
-0.05868754908442497,
0.024716755375266075,
-0.030509769916534424,
0.09325331449508667,
0.05901740491390228,
-0.048247773200273514,
-0.06568437069654465,
-0.08259612321853638,
-0.01309414766728878,
-0.0788160115480423,
-0.20542213320732117,
-0.1019454151391983,
-0.021109407767653465,
0.1571868509054184,
-0.19981326162815094,
0.0278315432369709,
0.03388996049761772,
0.12336376309394836,
0.0348738431930542,
-0.04587772116065025,
-0.020962968468666077,
0.0719718411564827,
-0.028094671666622162,
-0.0665077269077301,
0.034262143075466156,
0.006360148079693317,
-0.12926851212978363,
-0.00900106318295002,
-0.10888885706663132,
0.1427089273929596,
0.11986333131790161,
-0.020534485578536987,
-0.07236147671937943,
-0.01766807585954666,
-0.07714740186929703,
-0.04611492529511452,
-0.007132268976420164,
0.003938495181500912,
0.17299498617649078,
0.025814926251769066,
0.13498516380786896,
-0.080159991979599,
-0.064271479845047,
0.0395641028881073,
0.00015374747454188764,
-0.01701745204627514,
0.11950244754552841,
0.04435776174068451,
-0.0729757621884346,
0.10408109426498413,
0.09646826982498169,
-0.09143858402967453,
0.14375808835029602,
-0.07190344482660294,
-0.09719441086053848,
-0.02902473509311676,
0.009818628430366516,
0.04537349194288254,
0.11616425216197968,
-0.14377069473266602,
-0.0219808891415596,
0.024218810722231865,
0.005397321656346321,
0.018940394744277,
-0.21441590785980225,
-0.009544947184622288,
0.05394911766052246,
-0.0607762411236763,
-0.04768545180559158,
0.0002486353041604161,
-0.012664380483329296,
0.08052386343479156,
0.017382271587848663,
-0.059930093586444855,
0.004945396911352873,
0.0006753334891982377,
-0.06805195659399033,
0.19801685214042664,
-0.07493733614683151,
-0.13525977730751038,
-0.16101619601249695,
-0.03481104224920273,
-0.056479133665561676,
-0.0014351303689181805,
0.051342807710170746,
-0.10740278661251068,
-0.029631169512867928,
-0.05255455896258354,
0.0483420193195343,
-0.04776541516184807,
0.04660490155220032,
0.04140453413128853,
0.0055513083934783936,
0.09253919869661331,
-0.12077653408050537,
0.0188759732991457,
-0.03150045499205589,
-0.04352874308824539,
0.017816219478845596,
0.03975585103034973,
0.1140240803360939,
0.1578490287065506,
0.018570199608802795,
0.038788750767707825,
-0.02843562513589859,
0.19173108041286469,
-0.10217786580324173,
-0.051049716770648956,
0.1313893347978592,
0.008626271970570087,
0.0408991239964962,
0.0815669521689415,
0.06906970590353012,
-0.09063766151666641,
0.016533443704247475,
0.03945718705654144,
-0.02800559811294079,
-0.2189369648694992,
-0.017876651138067245,
-0.052900198847055435,
-0.017701489850878716,
0.12100754678249359,
0.03348584100604057,
0.05050405487418175,
0.045076675713062286,
-0.00931475218385458,
0.018663670867681503,
-0.013468533754348755,
0.08701901882886887,
0.09059009701013565,
0.06054220721125603,
0.13160979747772217,
-0.03602054342627525,
-0.0520872138440609,
0.02049659937620163,
-0.009871664457023144,
0.2274779975414276,
0.009152872487902641,
0.18347378075122833,
0.054517172276973724,
0.1540527045726776,
0.014814993366599083,
0.08557996898889542,
0.015784382820129395,
-0.042316555976867676,
0.02227150835096836,
-0.05825990065932274,
-0.031283147633075714,
0.04411304369568825,
0.055663689970970154,
0.0626799538731575,
-0.13481371104717255,
-0.020825672894716263,
0.02359205111861229,
0.3599518835544586,
0.05636140704154968,
-0.34419891238212585,
-0.12236525118350983,
0.0011942728888243437,
-0.07470420002937317,
-0.03288089856505394,
0.02043106034398079,
0.08510317653417587,
-0.08839494735002518,
0.0736437663435936,
-0.07903195917606354,
0.09752631932497025,
-0.043523672968149185,
0.007935469038784504,
0.08368922024965286,
0.0824279859662056,
-0.0017159385606646538,
0.052998293191194534,
-0.24598541855812073,
0.2891269624233246,
-0.009256646037101746,
0.09736278653144836,
-0.0467572882771492,
0.028340861201286316,
0.036458682268857956,
0.0011855855118483305,
0.052690379321575165,
-0.027701226994395256,
-0.07308001816272736,
-0.1986115574836731,
-0.07089867442846298,
0.02266407571732998,
0.12466077506542206,
-0.07461219280958176,
0.13266868889331818,
-0.02375919558107853,
-0.01849285140633583,
0.06317523866891861,
-0.060618750751018524,
-0.08706145733594894,
-0.09816139936447144,
0.018336007371544838,
0.025774160400032997,
0.06464212387800217,
-0.10864750295877457,
-0.1198664978146553,
-0.05953822284936905,
0.15151503682136536,
-0.07578399032354355,
-0.022709708660840988,
-0.13160745799541473,
0.06802573055028915,
0.15663805603981018,
-0.06687887758016586,
0.05633344501256943,
0.012358131818473339,
0.13568297028541565,
0.025627681985497475,
-0.03280585631728172,
0.09622389078140259,
-0.08170334994792938,
-0.21222342550754547,
-0.03201885521411896,
0.14787186682224274,
0.024425681680440903,
0.057366810739040375,
-0.023358182981610298,
0.03453398123383522,
-0.03278414160013199,
-0.08652547746896744,
0.05820857360959053,
-0.021961629390716553,
0.023969797417521477,
0.015295246616005898,
0.002069956623017788,
0.03897500038146973,
-0.07355381548404694,
-0.03747929632663727,
0.14050959050655365,
0.2905829846858978,
-0.08323709666728973,
-0.006829030811786652,
0.04071608930826187,
-0.01999621093273163,
-0.13056662678718567,
0.018167484551668167,
0.11816268414258957,
0.021023742854595184,
-0.00902947410941124,
-0.20892596244812012,
0.05300996080040932,
0.0794893354177475,
-0.03198099881410599,
0.10044839233160019,
-0.31032055616378784,
-0.1465235948562622,
0.12475177645683289,
0.11227548122406006,
0.0014272555708885193,
-0.1588851511478424,
-0.06375232338905334,
-0.019001184031367302,
-0.12818369269371033,
0.0896773487329483,
-0.03821129351854324,
0.1216282919049263,
-0.017941927537322044,
0.06762666255235672,
0.012437881901860237,
-0.053201451897621155,
0.1543060541152954,
-0.011704503558576107,
0.06345760822296143,
-0.004202710464596748,
0.039142508059740067,
0.0484519861638546,
-0.06040389463305473,
0.014936879277229309,
-0.08930713683366776,
0.02401319146156311,
-0.11815331131219864,
-0.03860253840684891,
-0.0914759561419487,
0.04048921912908554,
-0.03252727910876274,
-0.03744930773973465,
-0.020873529836535454,
0.02098826877772808,
0.021582981571555138,
-0.010376826860010624,
0.17051957547664642,
-0.021377872675657272,
0.16764992475509644,
0.11073220521211624,
0.09313507378101349,
-0.021396372467279434,
-0.10201609134674072,
-0.0093442527577281,
-0.018264297395944595,
0.07338321954011917,
-0.1382240504026413,
0.015589314512908459,
0.12989415228366852,
0.0596783384680748,
0.1300256997346878,
0.07426523417234421,
-0.06474962830543518,
0.027448566630482674,
0.07590167224407196,
-0.09445245563983917,
-0.12419097125530243,
-0.03002060204744339,
0.025104263797402382,
-0.13946689665317535,
0.06435635685920715,
0.09844792634248734,
-0.06154536083340645,
-0.009460304863750935,
0.007055831607431173,
-0.005226208362728357,
-0.057791534811258316,
0.2204383760690689,
0.04570847004652023,
0.08748295158147812,
-0.10602555423974991,
0.07307598739862442,
0.03475995734333992,
-0.13782329857349396,
0.013953858986496925,
0.06885261088609695,
-0.04605920612812042,
-0.011976619251072407,
0.009344340302050114,
0.0893707126379013,
-0.05268368870019913,
-0.0620669387280941,
-0.15069016814231873,
-0.13991618156433105,
0.08844584226608276,
0.1333930641412735,
0.05546136572957039,
0.03037344664335251,
-0.054962530732154846,
0.059285968542099,
-0.11770603060722351,
0.08495376259088516,
0.07358075678348541,
0.08168965578079224,
-0.15779179334640503,
0.1573401391506195,
0.011959518305957317,
0.02601158805191517,
0.001082317321561277,
-0.008420114405453205,
-0.09338659048080444,
0.024775173515081406,
-0.1342976689338684,
-0.04762788861989975,
-0.05142791569232941,
0.0026310926768928766,
0.008298384957015514,
-0.06451267004013062,
-0.08075074851512909,
0.02992560714483261,
-0.12527404725551605,
-0.046584248542785645,
0.014511500485241413,
0.04546618461608887,
-0.1249312236905098,
-0.010156498290598392,
0.049010079354047775,
-0.12385927140712738,
0.08366966992616653,
0.07194304466247559,
0.023717001080513,
0.05169839784502983,
-0.052177511155605316,
0.010846767574548721,
0.05162842944264412,
-0.010148127563297749,
0.03828534856438637,
-0.13541948795318604,
-0.006875729653984308,
-0.02215857431292534,
0.05571886524558067,
-0.0003027305647265166,
0.05133764073252678,
-0.12957073748111725,
-0.04286310821771622,
-0.016629831865429878,
-0.054744653403759,
-0.06443526595830917,
0.04517325758934021,
0.08894677460193634,
0.03705950081348419,
0.18152235448360443,
-0.07311119139194489,
0.020337140187621117,
-0.22058312594890594,
0.01070133876055479,
-0.026168690994381905,
-0.09796643257141113,
-0.07796621322631836,
-0.028882959857583046,
0.07419703155755997,
-0.0670676901936531,
0.09112503379583359,
-0.06768546998500824,
0.0636286735534668,
0.0439075231552124,
-0.05761638656258583,
0.026958636939525604,
0.04574819281697273,
0.2454788237810135,
0.058024000376462936,
-0.013910389505326748,
0.08180250227451324,
0.02215707115828991,
0.07136616110801697,
0.10840915888547897,
0.16571709513664246,
0.14713484048843384,
-0.008035112172365189,
0.11230288445949554,
0.06682708859443665,
-0.08181402832269669,
-0.16843606531620026,
0.07240087538957596,
-0.03776944428682327,
0.12818492949008942,
-0.002942534862086177,
0.20731917023658752,
0.12225812673568726,
-0.17639707028865814,
0.041360996663570404,
-0.027626007795333862,
-0.07751331478357315,
-0.09853797405958176,
-0.03407168388366699,
-0.06952609866857529,
-0.1879522055387497,
0.02386309951543808,
-0.10019493848085403,
0.04569961130619049,
0.0499090813100338,
0.026428621262311935,
0.012381397187709808,
0.1540805995464325,
0.04204383119940758,
0.016138488426804543,
0.09166231006383896,
0.0022799011785537004,
-0.03544513136148453,
-0.05338474363088608,
-0.0982859805226326,
0.03087848611176014,
-0.03441892936825752,
0.05066896602511406,
-0.059521015733480453,
-0.1259525865316391,
0.06421322375535965,
0.016159676015377045,
-0.11461999267339706,
0.024982992559671402,
0.005470600910484791,
0.08190874010324478,
0.035729724913835526,
0.017254065722227097,
-0.0004721263831015676,
-0.019553517922759056,
0.2379143238067627,
-0.11203157901763916,
-0.06405151635408401,
-0.1310209482908249,
0.25977739691734314,
0.01467808336019516,
-0.01971849799156189,
0.033267561346292496,
-0.07295237481594086,
-0.028249133378267288,
0.16956594586372375,
0.13393591344356537,
-0.010709038935601711,
-0.023080574348568916,
0.00258997012861073,
-0.01699604094028473,
-0.05568348616361618,
0.07656671851873398,
0.11821909248828888,
0.07214906066656113,
-0.06670068204402924,
-0.03915267810225487,
-0.037576667964458466,
-0.051187459379434586,
-0.008910520933568478,
0.09710872173309326,
0.020655706524848938,
-0.020823977887630463,
-0.0352361686527729,
0.09211451560258865,
-0.06177157908678055,
-0.10031464695930481,
0.06639812141656876,
-0.17139063775539398,
-0.17836950719356537,
-0.029927833005785942,
0.06768326461315155,
0.010723229497671127,
0.07085436582565308,
0.005936807487159967,
-0.033798664808273315,
0.08245527744293213,
0.0011328092077746987,
-0.06616675108671188,
-0.12274333089590073,
0.11334734410047531,
-0.07690369337797165,
0.19100430607795715,
-0.05888223275542259,
0.041437018662691116,
0.12965980172157288,
0.06472662836313248,
-0.08167003095149994,
0.035294052213430405,
0.06294091045856476,
-0.1363840103149414,
0.03777487203478813,
0.1757582426071167,
-0.031191831454634666,
0.11297404766082764,
0.023374641314148903,
-0.1424483358860016,
0.012658156454563141,
-0.08557455986738205,
-0.03353589400649071,
-0.06192026287317276,
-0.031164322048425674,
-0.04112519696354866,
0.12365196645259857,
0.21207210421562195,
-0.06261181086301804,
-0.0108278077095747,
-0.05374167114496231,
0.03492928668856621,
0.07297107577323914,
0.09020373970270157,
-0.046948060393333435,
-0.29203367233276367,
0.00806252658367157,
0.018276071175932884,
-0.01562916673719883,
-0.28167691826820374,
-0.09942390769720078,
0.04111960530281067,
-0.06293390691280365,
-0.037009112536907196,
0.08899466693401337,
0.09251882135868073,
0.049073852598667145,
-0.05244305729866028,
-0.060060326009988785,
-0.06361690908670425,
0.1796216070652008,
-0.18383800983428955,
-0.0629945918917656
] |
null | null | null |
# Configuration
`title`: _string_
Display title for the Space
`emoji`: _string_
Space emoji (emoji-only character allowed)
`colorFrom`: _string_
Color for Thumbnail gradient (red, yellow, green, blue, indigo, purple, pink, gray)
`colorTo`: _string_
Color for Thumbnail gradient (red, yellow, green, blue, indigo, purple, pink, gray)
`sdk`: _string_
Can be either `gradio` or `streamlit`
`sdk_version` : _string_
Only applicable for `streamlit` SDK.
See [doc](https://hf.co/docs/hub/spaces) for more info on supported versions.
`app_file`: _string_
Path to your main application file (which contains either `gradio` or `streamlit` Python code).
Path is relative to the root of the repository.
`pinned`: _boolean_
Whether the Space stays on top of your list.
|
{"title": "First Order Motion Model", "emoji": "\ud83d\udc22", "colorFrom": "blue", "colorTo": "yellow", "sdk": "gradio", "app_file": "app.py", "pinned": false}
| null |
hrushikute/DanceOnTune
|
[
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#region-us
|
# Configuration
'title': _string_
Display title for the Space
'emoji': _string_
Space emoji (emoji-only character allowed)
'colorFrom': _string_
Color for Thumbnail gradient (red, yellow, green, blue, indigo, purple, pink, gray)
'colorTo': _string_
Color for Thumbnail gradient (red, yellow, green, blue, indigo, purple, pink, gray)
'sdk': _string_
Can be either 'gradio' or 'streamlit'
'sdk_version' : _string_
Only applicable for 'streamlit' SDK.
See doc for more info on supported versions.
'app_file': _string_
Path to your main application file (which contains either 'gradio' or 'streamlit' Python code).
Path is relative to the root of the repository.
'pinned': _boolean_
Whether the Space stays on top of your list.
|
[
"# Configuration\n\n'title': _string_ \nDisplay title for the Space\n\n'emoji': _string_ \nSpace emoji (emoji-only character allowed)\n\n'colorFrom': _string_ \nColor for Thumbnail gradient (red, yellow, green, blue, indigo, purple, pink, gray)\n\n'colorTo': _string_ \nColor for Thumbnail gradient (red, yellow, green, blue, indigo, purple, pink, gray)\n\n'sdk': _string_ \nCan be either 'gradio' or 'streamlit'\n\n'sdk_version' : _string_ \nOnly applicable for 'streamlit' SDK. \nSee doc for more info on supported versions.\n\n'app_file': _string_ \nPath to your main application file (which contains either 'gradio' or 'streamlit' Python code). \nPath is relative to the root of the repository.\n\n'pinned': _boolean_ \nWhether the Space stays on top of your list."
] |
[
"TAGS\n#region-us \n",
"# Configuration\n\n'title': _string_ \nDisplay title for the Space\n\n'emoji': _string_ \nSpace emoji (emoji-only character allowed)\n\n'colorFrom': _string_ \nColor for Thumbnail gradient (red, yellow, green, blue, indigo, purple, pink, gray)\n\n'colorTo': _string_ \nColor for Thumbnail gradient (red, yellow, green, blue, indigo, purple, pink, gray)\n\n'sdk': _string_ \nCan be either 'gradio' or 'streamlit'\n\n'sdk_version' : _string_ \nOnly applicable for 'streamlit' SDK. \nSee doc for more info on supported versions.\n\n'app_file': _string_ \nPath to your main application file (which contains either 'gradio' or 'streamlit' Python code). \nPath is relative to the root of the repository.\n\n'pinned': _boolean_ \nWhether the Space stays on top of your list."
] |
[
6,
223
] |
[
"passage: TAGS\n#region-us \n# Configuration\n\n'title': _string_ \nDisplay title for the Space\n\n'emoji': _string_ \nSpace emoji (emoji-only character allowed)\n\n'colorFrom': _string_ \nColor for Thumbnail gradient (red, yellow, green, blue, indigo, purple, pink, gray)\n\n'colorTo': _string_ \nColor for Thumbnail gradient (red, yellow, green, blue, indigo, purple, pink, gray)\n\n'sdk': _string_ \nCan be either 'gradio' or 'streamlit'\n\n'sdk_version' : _string_ \nOnly applicable for 'streamlit' SDK. \nSee doc for more info on supported versions.\n\n'app_file': _string_ \nPath to your main application file (which contains either 'gradio' or 'streamlit' Python code). \nPath is relative to the root of the repository.\n\n'pinned': _boolean_ \nWhether the Space stays on top of your list."
] |
[
0.01188071072101593,
0.07758358120918274,
-0.005383076146245003,
-0.017373213544487953,
0.08687979727983475,
-0.014924844726920128,
0.014434631913900375,
0.06603410094976425,
0.10808582603931427,
0.1384299099445343,
0.02763586863875389,
0.09634341299533844,
0.009154659695923328,
0.1522364765405655,
0.006923646666109562,
-0.22283697128295898,
0.042200393974781036,
-0.0709344744682312,
0.04902972653508186,
0.06943602859973907,
0.06273981928825378,
-0.059291329234838486,
0.07202128320932388,
-0.0033088354393839836,
-0.14888131618499756,
0.00013695177040062845,
-0.010616403073072433,
-0.049460891634225845,
0.014828769490122795,
-0.0402386374771595,
0.08266407251358032,
-0.04675670340657234,
-0.07371697574853897,
-0.1268932968378067,
0.033357325941324234,
0.12697471678256989,
0.033907450735569,
0.002259220229461789,
0.12230198830366135,
-0.12981177866458893,
0.21807576715946198,
-0.1327449381351471,
0.05513199418783188,
-0.014647294767200947,
-0.018211161717772484,
-0.15117362141609192,
-0.0412733368575573,
-0.05453411117196083,
0.14670124650001526,
0.009025073610246181,
-0.008476284332573414,
0.01575237512588501,
-0.12312111258506775,
0.08234678208827972,
0.06933248043060303,
-0.04727475345134735,
-0.002634770004078746,
0.1116911768913269,
0.08801890909671783,
0.037870731204748154,
-0.12108538299798965,
0.0075067500583827496,
-0.03785065934062004,
-0.010196342132985592,
-0.03730084374547005,
-0.05277198180556297,
-0.10130736231803894,
0.02550225891172886,
-0.08296948671340942,
-0.0009194708545692265,
0.25919461250305176,
0.012544205412268639,
-0.05758257955312729,
-0.11951438337564468,
-0.06565803289413452,
-0.045700762420892715,
0.02577689290046692,
0.060137517750263214,
0.053701069205999374,
0.06539168953895569,
0.10311403125524521,
-0.0007050674175843596,
-0.12313074618577957,
-0.010358340106904507,
-0.12153321504592896,
0.15558311343193054,
-0.027887029573321342,
0.020990798249840736,
-0.11456135660409927,
0.08364246785640717,
-0.08822081238031387,
-0.13817736506462097,
0.014043902978301048,
-0.10895320028066635,
0.00031807392952032387,
0.06523092091083527,
-0.0740472599864006,
-0.17337968945503235,
0.0711340457201004,
0.1878124326467514,
0.055560074746608734,
0.0983857661485672,
-0.0769592747092247,
0.04823235049843788,
0.10609938204288483,
0.191655695438385,
-0.07476762682199478,
0.00942059326916933,
0.02892918698489666,
-0.1705435961484909,
0.09933915734291077,
-0.08998927474021912,
-0.1121046394109726,
0.018870292231440544,
-0.001934586907736957,
-0.000014767882021260448,
0.10403572767972946,
0.0064650182612240314,
-0.08259890973567963,
-0.06819719076156616,
0.11214480549097061,
-0.08646366745233536,
0.08762615919113159,
0.04516882076859474,
-0.03648534417152405,
0.05987478047609329,
-0.027545860037207603,
0.033786140382289886,
0.03255436569452286,
0.18013893067836761,
-0.04335426539182663,
-0.03640882298350334,
-0.1593049317598343,
-0.10343391448259354,
0.04724676534533501,
-0.09820203483104706,
0.05610208958387375,
-0.06877691298723221,
-0.045289840549230576,
-0.044973164796829224,
0.014098557643592358,
-0.000728745711967349,
0.05700746551156044,
0.05061240494251251,
-0.11077108979225159,
0.12225869297981262,
0.0473838746547699,
-0.010428318753838539,
-0.045939430594444275,
0.04395761713385582,
-0.014665937051177025,
0.08389930427074432,
-0.07425318658351898,
0.006531501188874245,
-0.08674965053796768,
0.030134806409478188,
-0.31573250889778137,
0.0038507028948515654,
-0.01876232586801052,
0.10097015649080276,
-0.001635766588151455,
-0.00516595458611846,
-0.03464873880147934,
-0.03676575794816017,
-0.05587775260210037,
0.046409010887145996,
-0.25948604941368103,
0.011800145730376244,
0.1486954241991043,
-0.00558213796466589,
-0.010950867086648941,
0.039532337337732315,
0.015502169728279114,
-0.19489143788814545,
0.0033431637566536665,
0.382671982049942,
0.12433561682701111,
-0.15602239966392517,
-0.038460731506347656,
0.002253052545711398,
-0.12435132265090942,
0.05122614651918411,
0.10650063306093216,
-0.028285304084420204,
0.06810537725687027,
0.06492038071155548,
-0.13770891726016998,
0.03194277361035347,
0.08137232065200806,
0.07254006713628769,
-0.06185802444815636,
0.03309902548789978,
0.12447807937860489,
-0.001426891190931201,
-0.09820882230997086,
-0.12830325961112976,
-0.044034551829099655,
0.05778408423066139,
0.12614697217941284,
0.010718805715441704,
-0.005161978770047426,
-0.08368363976478577,
0.15632237493991852,
0.046334970742464066,
-0.01825164072215557,
-0.11742157489061356,
-0.10392706096172333,
0.039694882929325104,
0.15260495245456696,
-0.04313033074140549,
0.00972882192581892,
0.02338254079222679,
0.0057436628267169,
0.06214966997504234,
-0.06808450818061829,
0.005501836538314819,
-0.0404035858809948,
0.07184121012687683,
-0.06626638770103455,
0.0597655288875103,
-0.04996529221534729,
-0.07479199767112732,
-0.054193515330553055,
-0.01938489079475403,
0.154617577791214,
0.16577643156051636,
0.08953801542520523,
-0.08143158257007599,
0.07282175868749619,
-0.0971396267414093,
-0.07091189175844193,
-0.05644267797470093,
-0.0658361092209816,
-0.014500929042696953,
0.10139623284339905,
0.12220189720392227,
-0.18264023959636688,
0.05132247507572174,
0.12537862360477448,
0.0016902342904359102,
0.06873060762882233,
0.0471314862370491,
0.000008568487828597426,
0.07041674107313156,
-0.03394358977675438,
-0.026566192507743835,
0.039171043783426285,
0.06057918071746826,
-0.004764103796333075,
-0.045834168791770935,
-0.03129440173506737,
0.0007111160084605217,
-0.08782535046339035,
-0.046953946352005005,
0.01650119572877884,
0.09927795827388763,
0.021815184503793716,
0.06723066419363022,
0.07200349122285843,
0.11227423697710037,
0.23267285525798798,
-0.01663021929562092,
-0.04860781878232956,
-0.05390782281756401,
-0.0038662166334688663,
-0.072230763733387,
0.05298231542110443,
-0.044093791395425797,
-0.013703000731766224,
0.06197461113333702,
0.02880055084824562,
-0.0241240244358778,
-0.07767774909734726,
-0.048432767391204834,
0.015537015162408352,
0.01294061541557312,
0.058635298162698746,
0.13916529715061188,
0.03883228823542595,
0.013341385871171951,
-0.03627052530646324,
0.03799179568886757,
-0.08247525244951248,
-0.06778591871261597,
-0.009052245877683163,
0.0850275531411171,
-0.2374516725540161,
-0.27590590715408325,
-0.06182103976607323,
-0.1899542510509491,
-0.05442836135625839,
0.10173444449901581,
0.06149669736623764,
-0.0981135293841362,
-0.06572254002094269,
-0.010261597111821175,
-0.017206426709890366,
-0.10618901252746582,
-0.030549757182598114,
-0.19057513773441315,
-0.00282036024145782,
-0.0530216358602047,
-0.08368751406669617,
-0.04087941721081734,
0.07227448374032974,
0.07781261950731277,
0.13454799354076385,
0.11990272998809814,
0.13019338250160217,
0.14590215682983398,
-0.04064978286623955,
-0.017342550680041313,
0.030587121844291687,
0.1189412996172905,
-0.11379950493574142,
0.09215140342712402,
0.15846885740756989,
0.04160125181078911,
0.10976667702198029,
0.17799557745456696,
-0.01809956505894661,
-0.08431658893823624,
0.09001435339450836,
0.03243125602602959,
0.002199815586209297,
-0.14897596836090088,
-0.09807609766721725,
-0.09747834503650665,
-0.024533651769161224,
-0.01050117053091526,
0.08103906363248825,
-0.02973933517932892,
0.00024748386931605637,
0.007691757287830114,
-0.031353335827589035,
-0.11500386148691177,
0.10164745151996613,
0.10783034563064575,
-0.051677361130714417,
0.06240885704755783,
-0.03621061518788338,
0.013111197389662266,
0.12872914969921112,
-0.005574287846684456,
0.05351933091878891,
0.0009217691840603948,
0.02266230620443821,
0.07706180214881897,
0.12979085743427277,
0.05972360819578171,
-0.06920131295919418,
-0.012724172323942184,
-0.018883759155869484,
-0.02852329984307289,
-0.04018702358007431,
-0.051866352558135986,
0.016110291704535484,
0.07158230245113373,
-0.07070600986480713,
0.007464050315320492,
-0.09313590079545975,
0.04392194375395775,
-0.013408638536930084,
0.040764421224594116,
-0.09933379292488098,
0.11283421516418457,
0.11212150752544403,
0.06982560455799103,
-0.20660647749900818,
-0.0049742055125534534,
0.1760960966348648,
-0.06036512181162834,
0.026148896664381027,
0.044760819524526596,
0.07438094913959503,
-0.013310940004885197,
-0.014131118543446064,
-0.011971941217780113,
0.04165353998541832,
0.008321966044604778,
0.10816291719675064,
-0.06053003668785095,
-0.06731956452131271,
-0.009565652348101139,
-0.01802060380578041,
0.0145226139575243,
-0.031234830617904663,
0.023711146786808968,
0.15951348841190338,
-0.0065234447829425335,
0.05780748277902603,
-0.17307095229625702,
-0.08096909523010254,
-0.054502543061971664,
-0.021976597607135773,
0.16028445959091187,
-0.09405897557735443,
0.021642018109560013,
-0.017742451280355453,
-0.03450736030936241,
-0.029039451852440834,
-0.06952325999736786,
-0.0375576987862587,
-0.08077406883239746,
0.022223329171538353,
0.004659013357013464,
0.04072054848074913,
-0.07474403083324432,
0.03581817448139191,
0.04575775936245918,
0.07663757354021072,
0.008526667021214962,
-0.027415957301855087,
-0.09199786931276321,
-0.1843370795249939,
0.06291311979293823,
-0.049490317702293396,
0.04931570217013359,
-0.0418127179145813,
0.1938740313053131,
0.06365559250116348,
-0.05853547528386116,
0.05742616578936577,
-0.03859667852520943,
0.03838387504220009,
-0.13857907056808472,
0.07207682728767395,
-0.07659886032342911,
-0.016371112316846848,
-0.007355353329330683,
0.11035165935754776,
-0.1062881276011467,
-0.15562652051448822,
0.06501084566116333,
0.16950535774230957,
0.10116761177778244,
0.000806302996352315,
-0.022203318774700165,
0.0725877434015274,
0.05569668486714363,
0.0068956539034843445,
0.06090042367577553,
0.1454852670431137,
-0.12914572656154633,
0.1224699541926384,
-0.022552968934178352,
-0.016733380034565926,
-0.13407345116138458,
0.03897436335682869,
-0.02403831109404564,
0.05824385955929756,
0.03790315240621567,
-0.18016976118087769,
0.08470097184181213,
-0.03233107551932335,
0.01641049236059189,
0.23477694392204285,
-0.18917188048362732,
-0.06158251315355301,
0.05588087812066078,
0.024497007951140404,
-0.04277771711349487,
-0.11648520082235336,
-0.09813736379146576,
-0.029879916459321976,
-0.05052601546049118,
0.11488782614469528,
-0.04573405534029007,
0.04909282177686691,
-0.031638309359550476,
0.1112939864397049,
0.047230225056409836,
-0.045135047286748886,
0.1427699476480484,
-0.14035338163375854,
0.0977340042591095,
-0.12127474695444107,
0.019972048699855804,
0.09111752361059189,
-0.07159112393856049,
0.10494855046272278,
-0.06665443629026413,
0.06383790075778961,
-0.2465050369501114,
0.0022746575996279716,
-0.008453061804175377,
0.036550372838974,
0.0271987933665514,
-0.05680028721690178,
-0.12901988625526428,
-0.03715227171778679,
-0.027381405234336853,
-0.01813647337257862,
-0.11713598668575287,
-0.0031143249943852425,
-0.14133024215698242,
-0.057005152106285095,
-0.08749523013830185,
0.02312685362994671,
-0.21061889827251434,
-0.004185882862657309,
0.008126300759613514,
0.020437484607100487,
-0.17857614159584045,
-0.04297766089439392,
0.03774886205792427,
0.004375527147203684,
0.08832122385501862,
-0.023119816556572914,
-0.04940380901098251,
0.014956346713006496,
0.13044969737529755,
-0.12233772873878479,
-0.0006630075513385236,
-0.03437092527747154,
0.14777947962284088,
-0.011739841662347317,
-0.11739847809076309,
0.0032358316238969564,
0.07741342484951019,
-0.032717783004045486,
-0.003090545302256942,
0.04521242901682854,
0.08729007840156555,
-0.016245700418949127,
0.05914886295795441,
0.0005832412862218916,
-0.07385926693677902,
0.01753399521112442,
0.07166670262813568,
-0.03103218413889408,
0.0253831148147583,
0.04003673419356346,
-0.0640636757016182,
-0.036020029336214066,
0.11157411336898804,
0.08526992052793503,
0.14425128698349,
0.0024558203294873238,
0.07784372568130493,
-0.022310519590973854,
0.00008107958274194971,
0.007436560466885567,
0.06408929824829102,
0.039241448044776917,
-0.06688732653856277,
-0.048639725893735886,
0.0198516845703125,
0.10516323149204254,
-0.026003355160355568,
0.057434793561697006,
-0.12909218668937683,
-0.08399637043476105,
0.037286579608917236,
-0.0039027638267725706,
-0.01567487232387066,
-0.08358988910913467,
-0.04786192625761032,
-0.05242536962032318,
-0.04371176287531853,
0.0586145780980587,
0.1470838487148285,
-0.0020743575878441334,
0.005813091527670622,
-0.01973113976418972,
-0.04642482101917267,
-0.025618139654397964,
-0.055881984531879425,
-0.07332699000835419,
-0.048941005021333694,
0.06819722801446915,
-0.11012738198041916,
-0.07231386750936508,
0.16018956899642944,
-0.03682415932416916,
-0.03815343603491783,
0.024433957412838936,
0.01551902573555708,
-0.00039856525836512446,
-0.14088329672813416,
-0.10607647895812988,
0.14317864179611206,
0.029827237129211426,
0.012417588382959366,
-0.15009038150310516,
0.02611035853624344,
-0.01723400130867958,
0.014754951931536198,
-0.06131420657038689,
0.018221061676740646,
-0.1636582612991333,
-0.01831182837486267,
-0.043449562042951584,
-0.1960551142692566,
-0.07952366024255753,
-0.02105804532766342,
-0.006272517144680023,
0.11141568422317505,
0.16052664816379547,
0.051811136305332184,
0.006949711591005325,
0.005004440434277058,
-0.02623261883854866,
-0.006475407164543867,
-0.013523302972316742,
0.06349780410528183,
0.039204467087984085,
0.0024625249207019806,
-0.02758321352303028,
0.0603170283138752,
0.10930006951093674,
-0.12960131466388702,
-0.036593466997146606,
0.15204331278800964,
-0.012650004588067532,
0.031441085040569305,
0.1599513441324234,
-0.011068953201174736,
0.02534940093755722,
0.08766882121562958,
0.05402905493974686,
0.06659457087516785,
-0.011877855286002159,
0.022817092016339302,
0.12724968791007996,
0.02715393155813217,
-0.05997871980071068,
-0.08889736980199814,
-0.01388144213706255,
-0.2833007872104645,
-0.07705571502447128,
-0.03963233903050423,
0.08437919616699219,
-0.036246880888938904,
0.26679450273513794,
0.12403151392936707,
-0.1266176998615265,
0.0516984798014164,
0.0333801694214344,
-0.04046184569597244,
-0.0784478411078453,
-0.15067048370838165,
-0.02651614509522915,
-0.1089402586221695,
0.002701305551454425,
-0.10374482721090317,
0.08816846460103989,
-0.03821096569299698,
-0.0022927771788090467,
-0.025286901742219925,
0.08528466522693634,
-0.04238740727305412,
-0.14017419517040253,
-0.0010722818551585078,
-0.005889455787837505,
-0.05190213397145271,
0.08433298766613007,
0.07148399204015732,
-0.01335175707936287,
-0.02401786483824253,
0.07422652095556259,
0.024782950058579445,
-0.004932098090648651,
0.009790784679353237,
-0.14268329739570618,
-0.02815517596900463,
0.03501781448721886,
0.007378872949630022,
-0.05755603685975075,
0.06268104165792465,
0.07430897653102875,
-0.02182610146701336,
-0.0035758463200181723,
0.3859916031360626,
-0.002008322160691023,
-0.00420699967071414,
0.021913999691605568,
-0.17859221994876862,
0.0202576145529747,
0.050627049058675766,
-0.06285877525806427,
-0.1814487725496292,
-0.12856777012348175,
0.1440022885799408,
0.010789863765239716,
0.04751095920801163,
0.00413023354485631,
0.0036507844924926758,
0.01860237494111061,
0.024117661640048027,
0.12237895280122757,
0.05381025746464729,
0.20935729146003723,
0.02940075471997261,
0.037343814969062805,
-0.013649571686983109,
-0.039105575531721115,
-0.1428230255842209,
-0.08808896690607071,
-0.041783563792705536,
-0.10876533389091492,
-0.06712602078914642,
0.07452433556318283,
0.03308483213186264,
0.08906224370002747,
-0.017640581354498863,
0.012726355344057083,
-0.047113630920648575,
0.05965685844421387,
0.17941376566886902,
-0.03781856968998909,
0.06464013457298279,
0.0029464554972946644,
-0.07043617963790894,
0.08819912374019623,
0.0008902386180125177,
-0.12900254130363464,
-0.004896699916571379,
0.0014196228003129363,
-0.07022278755903244,
0.15389502048492432,
-0.02325236052274704,
0.005016704089939594,
0.06732019782066345,
0.04813087359070778,
-0.08010302484035492,
0.059342797845602036,
0.0013369007501751184,
-0.08320751786231995,
0.03120153211057186,
0.17014345526695251,
0.010565650649368763,
-0.07222873717546463,
0.12183177471160889,
-0.01722615398466587,
0.013417752459645271,
-0.08210001140832901,
0.16389773786067963,
-0.1405809074640274,
0.08207181096076965,
-0.16919106245040894,
0.029745303094387054,
0.022214224562048912,
0.012673028744757175,
-0.008893320336937904,
-0.06229591369628906,
-0.008654721081256866,
-0.01413356140255928,
-0.040051933377981186,
-0.010774712078273296,
0.05027569457888603,
-0.026332490146160126,
0.2655848264694214,
0.013165590353310108,
-0.08682727068662643,
-0.07146721333265305,
-0.027951331809163094,
0.04633788391947746,
-0.0429544635117054,
0.09822317212820053,
-0.024975966662168503,
0.008648942224681377,
-0.08020338416099548,
-0.11325552314519882,
0.0160344447940588,
0.07119356095790863,
-0.12669649720191956,
-0.05326187238097191
] |
null | null |
transformers
|
# Rick and Morty DialoGPT Model
|
{"tags": ["conversational"]}
|
text-generation
|
hrv/DialoGPT-small-rick-morty
|
[
"transformers",
"pytorch",
"gpt2",
"text-generation",
"conversational",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
# Rick and Morty DialoGPT Model
|
[
"# Rick and Morty DialoGPT Model"
] |
[
"TAGS\n#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n",
"# Rick and Morty DialoGPT Model"
] |
[
51,
10
] |
[
"passage: TAGS\n#transformers #pytorch #gpt2 #text-generation #conversational #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us \n# Rick and Morty DialoGPT Model"
] |
[
-0.01990443281829357,
0.10367733240127563,
-0.006012056488543749,
0.013662099838256836,
0.1287931650876999,
0.004103946499526501,
0.13405320048332214,
0.13470496237277985,
-0.029608309268951416,
-0.0377325713634491,
0.1409052610397339,
0.2081032246351242,
-0.009616929106414318,
0.025026321411132812,
-0.08027864247560501,
-0.33285143971443176,
0.04419311136007309,
0.04611847549676895,
-0.04805411398410797,
0.11171722412109375,
0.09962809830904007,
-0.03511058911681175,
0.07650627940893173,
0.012189619243144989,
-0.11959464848041534,
0.014523470774292946,
0.01571112684905529,
-0.09889741986989975,
0.11399844288825989,
0.07783890515565872,
0.031239205971360207,
0.033389654010534286,
-0.042143791913986206,
-0.13308840990066528,
0.04855761677026749,
-0.0014628645731136203,
-0.03996938467025757,
0.06519230455160141,
0.0068825362250208855,
-0.09896008670330048,
0.13105708360671997,
0.11774895340204239,
-0.001342291128821671,
0.030811335891485214,
-0.1546017825603485,
-0.03095608949661255,
-0.013916928321123123,
0.04583658277988434,
0.05571185424923897,
0.1092928797006607,
-0.03970988467335701,
0.11546611040830612,
-0.046847838908433914,
0.11656361073255539,
0.13404695689678192,
-0.27711591124534607,
-0.013774634338915348,
0.14150507748126984,
0.03755388408899307,
0.031246060505509377,
-0.03764049708843231,
0.09234841167926788,
0.010574371553957462,
-0.009135077707469463,
-0.054559025913476944,
-0.07839421927928925,
-0.06956472247838974,
0.03881034255027771,
-0.08538595587015152,
-0.0028573249001055956,
0.22309143841266632,
-0.029777048155665398,
0.0931403860449791,
-0.061110686510801315,
-0.083645299077034,
0.0022445949725806713,
-0.04396601766347885,
-0.031562261283397675,
-0.0995510146021843,
0.08443354815244675,
-0.04024428874254227,
-0.08693728595972061,
-0.10731299221515656,
-0.022938303649425507,
-0.15873323380947113,
0.16214832663536072,
0.03501884266734123,
0.03956814110279083,
-0.21219894289970398,
0.07603893429040909,
-0.04213596507906914,
-0.10128775984048843,
0.025763655081391335,
-0.0809730738401413,
0.0031352867372334003,
0.01420458871871233,
-0.034850042313337326,
-0.01257789321243763,
0.09354974329471588,
0.11913833022117615,
-0.002085368847474456,
0.028482265770435333,
-0.03459439426660538,
0.04555915296077728,
0.04445279389619827,
0.04635937884449959,
-0.030874032527208328,
-0.005519113503396511,
0.024999095126986504,
-0.0903957337141037,
-0.010871811769902706,
-0.060442280024290085,
-0.1946737915277481,
0.013364237733185291,
0.05735969915986061,
0.055262304842472076,
0.030765585601329803,
0.13551434874534607,
0.0010974886827170849,
-0.0475224107503891,
0.03023342229425907,
-0.020769428461790085,
-0.016528211534023285,
0.029149476438760757,
-0.0072809201665222645,
0.1526104062795639,
0.022983204573392868,
0.05690442770719528,
-0.11451500654220581,
0.012773441150784492,
-0.03330712020397186,
-0.006917042192071676,
-0.03216493874788284,
-0.061537809669971466,
0.003289242973551154,
0.0014469954185187817,
0.013694697991013527,
-0.12761977314949036,
-0.15719962120056152,
-0.003717299085110426,
0.00613630935549736,
-0.05369097366929054,
-0.10004933178424835,
-0.10542158782482147,
-0.03153182193636894,
0.046352777630090714,
-0.053748197853565216,
0.03198752924799919,
-0.039340607821941376,
0.09383489936590195,
-0.03441528603434563,
0.0691300630569458,
-0.0863635316491127,
0.0905333161354065,
-0.06098577380180359,
-0.04111234471201897,
-0.0643690675497055,
0.12356391549110413,
0.011561519466340542,
0.04442533850669861,
-0.03781363368034363,
-0.01636880449950695,
-0.11087207496166229,
0.06495212018489838,
-0.03516015037894249,
0.22487092018127441,
-0.08996163308620453,
-0.09683383256196976,
0.22284504771232605,
-0.04562665522098541,
-0.12769415974617004,
0.12243670970201492,
-0.03600937873125076,
0.09682484716176987,
0.11536505818367004,
0.16257616877555847,
0.03866875544190407,
-0.0002237519365735352,
0.10846788436174393,
0.10610917955636978,
-0.07603283226490021,
0.006744202226400375,
0.0250004380941391,
-0.02382737584412098,
-0.09139634668827057,
0.015165179036557674,
0.07776524871587753,
0.04803644120693207,
-0.05478836968541145,
-0.015317765064537525,
0.015090391971170902,
-0.003627530997619033,
0.06564177572727203,
-0.017049036920070648,
0.11691898107528687,
-0.03955721855163574,
-0.07620245963335037,
-0.014626736752688885,
0.028113901615142822,
-0.06986767798662186,
0.026787258684635162,
-0.07962338626384735,
0.02948051132261753,
-0.01967560686171055,
0.06687499582767487,
-0.16950036585330963,
-0.09430424869060516,
-0.06010226905345917,
0.23349159955978394,
0.07496993243694305,
0.11698364466428757,
0.06350064277648926,
-0.056928664445877075,
0.0006459777359850705,
0.037900060415267944,
0.19767099618911743,
-0.006904584355652332,
-0.07503941655158997,
-0.11777795851230621,
0.10312607139348984,
-0.07375676929950714,
0.06138577312231064,
-0.0416308231651783,
0.007855354808270931,
0.019795136526226997,
0.11127804219722748,
-0.04220014438033104,
0.039965033531188965,
0.012499134056270123,
-0.03696384280920029,
-0.05908297002315521,
0.0004571304307319224,
0.09440597146749496,
-0.0005542659782804549,
-0.10514124482870102,
0.2379530370235443,
-0.21215155720710754,
0.12180843949317932,
0.1799643337726593,
-0.2256188690662384,
0.008836638182401657,
-0.10462760180234909,
-0.016665222123265266,
0.01030759233981371,
0.03996801748871803,
-0.040312353521585464,
0.24249082803726196,
-0.014560520648956299,
0.17035135626792908,
-0.04880015179514885,
-0.05010494217276573,
-0.0440804697573185,
-0.05291803553700447,
0.0003277618088759482,
0.12486644089221954,
0.09157522767782211,
-0.18372175097465515,
0.17465431988239288,
0.06325390189886093,
0.03004654310643673,
0.1566917598247528,
0.022896459326148033,
0.020663797855377197,
0.05599488690495491,
-0.0012882096925750375,
-0.03033529780805111,
-0.07880529016256332,
-0.20945574343204498,
-0.012111871503293514,
0.07547834515571594,
0.04618273675441742,
0.10363037884235382,
-0.1018955409526825,
-0.030724551528692245,
-0.006948297843337059,
-0.030821966007351875,
0.03848150745034218,
0.13554143905639648,
0.015318007208406925,
0.12024796009063721,
-0.019162237644195557,
-0.06668011844158173,
0.0741129145026207,
0.01461794413626194,
-0.09263674914836884,
0.18050695955753326,
-0.1221487745642662,
-0.3382752537727356,
-0.10329627990722656,
-0.20327065885066986,
-0.04040617123246193,
0.0422586165368557,
0.11002974957227707,
-0.1460546851158142,
-0.029720865190029144,
0.0010455691954120994,
0.08435780555009842,
-0.1366978883743286,
0.006720550823956728,
-0.017843635752797127,
-0.01294276025146246,
-0.1374056041240692,
-0.09384968876838684,
-0.04747654125094414,
-0.060003772377967834,
-0.03218422830104828,
0.10381519794464111,
-0.1596987098455429,
0.007801016326993704,
0.230968177318573,
0.04797196388244629,
0.07053504139184952,
-0.036995481699705124,
0.17910921573638916,
-0.08220451325178146,
0.016473548486828804,
0.24478016793727875,
-0.05610832944512367,
0.0740312784910202,
0.10560029745101929,
-0.005553957540541887,
-0.052998270839452744,
0.03756273165345192,
0.00788428820669651,
-0.0785532221198082,
-0.21784749627113342,
-0.1030275970697403,
-0.11046822369098663,
0.04284128174185753,
0.05120398849248886,
0.04543844982981682,
0.1585974246263504,
0.06446543335914612,
-0.05187172442674637,
-0.011306295171380043,
0.08315242826938629,
0.08576013147830963,
0.24794787168502808,
-0.06311704963445663,
0.1473274976015091,
-0.020790869370102882,
-0.16434483230113983,
0.07334780693054199,
0.06416254490613937,
0.07227631658315659,
0.06913222372531891,
0.11215730756521225,
0.0020037174690514803,
0.017364054918289185,
0.12614323198795319,
0.05889604985713959,
-0.011050567030906677,
-0.031410302966833115,
-0.04586650803685188,
-0.04347039759159088,
-0.020151739940047264,
0.041160233318805695,
0.05188119783997536,
-0.1600257307291031,
-0.02415069006383419,
0.022831739857792854,
0.046689603477716446,
-0.003216250566765666,
0.08608495444059372,
-0.19217506051063538,
-0.018159521743655205,
0.06477150321006775,
-0.0016290671192109585,
-0.09313707798719406,
0.08108778297901154,
-0.009849769994616508,
-0.09697907418012619,
0.03780587762594223,
-0.03585495799779892,
0.1301390826702118,
-0.0750122219324112,
0.07286842167377472,
-0.1119815781712532,
-0.02080838568508625,
-0.0087605444714427,
0.11860883235931396,
-0.3024371266365051,
0.1707288920879364,
-0.0030656929593533278,
-0.04842326417565346,
-0.11293680220842361,
-0.015061003156006336,
0.03821004554629326,
0.08916047215461731,
0.10371578484773636,
-0.030773809179663658,
-0.06436607241630554,
0.0791664570569992,
-0.050910793244838715,
0.03525971621274948,
0.10187692940235138,
-0.04662879928946495,
-0.014911266043782234,
-0.05685164034366608,
0.0027524156030267477,
0.02270045317709446,
-0.10804066807031631,
0.014929873868823051,
-0.19113284349441528,
0.07794220000505447,
0.0811065286397934,
0.0722472071647644,
0.04095001146197319,
-0.029467018321156502,
-0.1261810064315796,
0.2744207978248596,
0.007417048793286085,
-0.09985779225826263,
-0.11269644647836685,
0.04465123638510704,
0.05646880716085434,
-0.07145541161298752,
-0.028514720499515533,
-0.07924950867891312,
0.052012015134096146,
-0.07113154232501984,
-0.1981293261051178,
0.11338871717453003,
-0.09873685240745544,
-0.04736494645476341,
-0.03962721675634384,
0.2276533544063568,
-0.027753405272960663,
0.02130931057035923,
0.0393831804394722,
-0.001616212772205472,
-0.12734149396419525,
-0.09492160379886627,
0.004517016001045704,
-0.0013660878175869584,
0.02586340345442295,
0.022777099162340164,
-0.04388801380991936,
0.0049570053815841675,
-0.06949588656425476,
-0.0037953434512019157,
0.3158918023109436,
0.10998717695474625,
-0.04474896565079689,
0.1561327874660492,
0.10242960602045059,
-0.06360200047492981,
-0.28859275579452515,
-0.11298105865716934,
-0.07240703701972961,
-0.05466444417834282,
-0.0838940367102623,
-0.18133240938186646,
0.08497140556573868,
-0.042584747076034546,
-0.00881777424365282,
0.042027126997709274,
-0.2644155025482178,
-0.09412363916635513,
0.18815293908119202,
-0.01533579919487238,
0.4300551414489746,
-0.11307147145271301,
-0.07450833916664124,
-0.05387028306722641,
-0.13561248779296875,
0.18766070902347565,
-0.018648525699973106,
0.0966244488954544,
0.00443116994574666,
0.20654869079589844,
0.05815155804157257,
-0.0008219819865189493,
0.0747876986861229,
0.011587066575884819,
-0.0452013723552227,
-0.09014920890331268,
-0.09217863529920578,
-0.020688166841864586,
0.005974666681140661,
0.034957773983478546,
-0.0941787138581276,
0.05258546397089958,
-0.11336535215377808,
-0.05589618906378746,
-0.07209338247776031,
0.026715638116002083,
0.02418643794953823,
-0.06410122662782669,
-0.006407043896615505,
-0.048794936388731,
-0.0010418962920084596,
0.00979152973741293,
0.21295785903930664,
-0.11305148899555206,
0.12096642702817917,
0.04414689913392067,
0.1508360654115677,
-0.08366664499044418,
-0.03614836558699608,
-0.04910365119576454,
-0.05565084517002106,
0.0676501989364624,
-0.1319035291671753,
0.04462771117687225,
0.10053624957799911,
-0.030742639675736427,
0.0898696631193161,
0.11227817088365555,
-0.02972952462732792,
0.0016581144882366061,
0.07279330492019653,
-0.23832836747169495,
-0.08509121090173721,
-0.07718803733587265,
0.05435929819941521,
0.057659514248371124,
0.09007556736469269,
0.21964938938617706,
0.011087107472121716,
-0.023847850039601326,
0.027587326243519783,
0.029717741534113884,
-0.01658647321164608,
0.05797221511602402,
0.008770608343183994,
0.031205764040350914,
-0.14632299542427063,
0.04562913626432419,
-0.010501107200980186,
-0.07197817414999008,
0.03429242596030235,
0.16717956960201263,
-0.10209374874830246,
-0.12234743684530258,
-0.04288604483008385,
0.17517046630382538,
-0.13247300684452057,
-0.017495078966021538,
-0.05478521063923836,
-0.1241658553481102,
0.07977617532014847,
0.11423204839229584,
0.05072414129972458,
0.042339734733104706,
-0.09691346436738968,
-0.03881148621439934,
-0.05552472919225693,
0.01957569271326065,
0.018891409039497375,
-0.030404040589928627,
-0.037885911762714386,
0.025801094248890877,
-0.04172535613179207,
0.11203933507204056,
-0.087384894490242,
-0.09792038798332214,
-0.16838693618774414,
0.03925701230764389,
-0.049022991210222244,
-0.07899222522974014,
-0.09344983100891113,
-0.03523614630103111,
0.014231358654797077,
-0.03348008170723915,
-0.018664700910449028,
-0.02225758694112301,
-0.0958842933177948,
0.03419994190335274,
-0.048781368881464005,
-0.005008503329008818,
-0.08496184647083282,
0.017331385985016823,
0.04781922325491905,
-0.023604100570082664,
0.1431105136871338,
0.12453559041023254,
-0.11789791285991669,
0.10031480342149734,
-0.16611437499523163,
-0.06820093840360641,
0.09455996751785278,
0.02471991442143917,
0.043245621025562286,
0.028927266597747803,
0.005174829158931971,
0.04808570072054863,
0.05950818210840225,
0.03694291412830353,
0.041101954877376556,
-0.07111897319555283,
0.061451081186532974,
-0.06278520077466965,
-0.11226452142000198,
-0.04257739707827568,
-0.005422866903245449,
0.00011432790051912889,
0.07346735894680023,
0.11052975058555603,
-0.05098198726773262,
0.09580544382333755,
-0.050767768174409866,
0.046003878116607666,
0.0289035402238369,
-0.16526201367378235,
0.008764104917645454,
-0.08482556790113449,
0.05248309671878815,
0.0030253108125180006,
0.15688744187355042,
0.028536081314086914,
-0.03175791725516319,
0.02630779519677162,
0.05105529725551605,
0.06318540126085281,
-0.00840448122471571,
0.19050461053848267,
0.09726009517908096,
-0.04487645998597145,
-0.09418396651744843,
0.08849480748176575,
0.05022666975855827,
0.05143674090504646,
0.1403687596321106,
-0.020687401294708252,
0.012512898072600365,
0.07724163681268692,
0.014415515586733818,
0.017872430384159088,
-0.07756411284208298,
-0.09487451612949371,
-0.011494439095258713,
0.025514457374811172,
-0.02882363088428974,
0.1138797178864479,
0.16729387640953064,
-0.0008394720498472452,
0.013234704732894897,
-0.01801590994000435,
-0.05735309422016144,
-0.20129387080669403,
-0.1959676295518875,
-0.09400797635316849,
-0.13690303266048431,
-0.0009418319095857441,
-0.13835963606834412,
0.03616710752248764,
0.042394787073135376,
0.09917435795068741,
-0.039446551352739334,
0.019261397421360016,
0.026794444769620895,
-0.10323353111743927,
0.039175424724817276,
-0.04838612675666809,
0.09421038627624512,
-0.007761404849588871,
0.005773975048214197,
-0.046786144375801086,
0.02436385303735733,
0.02127891033887863,
0.038409680128097534,
-0.012736459262669086,
0.024856114760041237,
-0.11602245271205902,
-0.09478921443223953,
-0.058010075241327286,
0.0558818019926548,
0.0046934462152421474,
0.18179026246070862,
0.02449701726436615,
-0.03384847193956375,
0.0275272186845541,
0.19317778944969177,
-0.06196035072207451,
-0.09709009528160095,
-0.08241496980190277,
0.2182236760854721,
-0.018931716680526733,
0.09253086894750595,
-0.035876765847206116,
0.012440751306712627,
-0.07121489197015762,
0.33243879675865173,
0.29320472478866577,
-0.10524016618728638,
0.010426074266433716,
-0.0019151283195242286,
0.0405552051961422,
0.1290767937898636,
0.07575080543756485,
0.11663594841957092,
0.256552129983902,
-0.06501701474189758,
-0.057690393179655075,
-0.014668738469481468,
-0.027142031118273735,
-0.06502988189458847,
0.04214107245206833,
0.04939494654536247,
-0.07117093354463577,
-0.00912293791770935,
0.12242040783166885,
-0.24606983363628387,
0.04577518254518509,
-0.13518153131008148,
-0.14807558059692383,
-0.0726354643702507,
0.002261551097035408,
0.09914402663707733,
0.010166509076952934,
0.08546656370162964,
-0.014570544473826885,
-0.0710548534989357,
0.03896206244826317,
0.021210450679063797,
-0.2144380509853363,
0.021960165351629257,
0.07259857654571533,
-0.028754761442542076,
-0.07154250144958496,
-0.013138728216290474,
0.08338925242424011,
0.09720319509506226,
0.03173141926527023,
-0.009079075418412685,
0.04570826143026352,
-0.0000614441087236628,
-0.06747788935899734,
0.035688117146492004,
0.022403022274374962,
0.01331246830523014,
-0.05491582676768303,
0.07895619422197342,
-0.17176033556461334,
0.020258452743291855,
-0.03599786013364792,
-0.06506339460611343,
-0.006352625321596861,
0.02872123196721077,
-0.06236473098397255,
0.0810769721865654,
0.08681372553110123,
-0.010693355463445187,
-0.015406738966703415,
-0.019259916618466377,
-0.012411676347255707,
-0.028850549831986427,
-0.07069326192140579,
-0.09390060603618622,
-0.15529757738113403,
-0.12466321885585785,
0.08110006153583527,
-0.008061634376645088,
-0.2096063792705536,
0.012769150547683239,
-0.13104628026485443,
0.04622570425271988,
-0.10809949785470963,
0.09371429681777954,
0.08394473046064377,
0.020185640081763268,
-0.007141938898712397,
0.003890183288604021,
0.036074474453926086,
0.07894916087388992,
-0.13067346811294556,
-0.08049263805150986
] |
null | null |
transformers
|
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
should probably proofread and complete it, then remove this comment. -->
# wav2vec2-base-timit-demo-colab
This model is a fine-tuned version of [facebook/wav2vec2-base](https://huggingface.co/facebook/wav2vec2-base) on the None dataset.
It achieves the following results on the evaluation set:
- Loss: 0.4125
- Wer: 0.3607
## Model description
More information needed
## Intended uses & limitations
More information needed
## Training and evaluation data
More information needed
## Training procedure
### Training hyperparameters
The following hyperparameters were used during training:
- learning_rate: 0.0001
- train_batch_size: 64
- eval_batch_size: 8
- seed: 42
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
- lr_scheduler_type: linear
- lr_scheduler_warmup_steps: 1000
- num_epochs: 30
- mixed_precision_training: Native AMP
### Training results
| Training Loss | Epoch | Step | Validation Loss | Wer |
|:-------------:|:-----:|:----:|:---------------:|:------:|
| 3.2018 | 7.94 | 500 | 1.3144 | 0.8508 |
| 0.4671 | 15.87 | 1000 | 0.4737 | 0.4160 |
| 0.1375 | 23.81 | 1500 | 0.4125 | 0.3607 |
### Framework versions
- Transformers 4.11.3
- Pytorch 1.10.0+cu111
- Datasets 1.13.3
- Tokenizers 0.10.3
|
{"license": "apache-2.0", "tags": ["generated_from_trainer"], "model-index": [{"name": "wav2vec2-base-timit-demo-colab", "results": []}]}
|
automatic-speech-recognition
|
hs788/wav2vec2-base-timit-demo-colab
|
[
"transformers",
"pytorch",
"tensorboard",
"wav2vec2",
"automatic-speech-recognition",
"generated_from_trainer",
"license:apache-2.0",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#transformers #pytorch #tensorboard #wav2vec2 #automatic-speech-recognition #generated_from_trainer #license-apache-2.0 #endpoints_compatible #region-us
|
wav2vec2-base-timit-demo-colab
==============================
This model is a fine-tuned version of facebook/wav2vec2-base on the None dataset.
It achieves the following results on the evaluation set:
* Loss: 0.4125
* Wer: 0.3607
Model description
-----------------
More information needed
Intended uses & limitations
---------------------------
More information needed
Training and evaluation data
----------------------------
More information needed
Training procedure
------------------
### Training hyperparameters
The following hyperparameters were used during training:
* learning\_rate: 0.0001
* train\_batch\_size: 64
* eval\_batch\_size: 8
* seed: 42
* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
* lr\_scheduler\_type: linear
* lr\_scheduler\_warmup\_steps: 1000
* num\_epochs: 30
* mixed\_precision\_training: Native AMP
### Training results
### Framework versions
* Transformers 4.11.3
* Pytorch 1.10.0+cu111
* Datasets 1.13.3
* Tokenizers 0.10.3
|
[
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0001\n* train\\_batch\\_size: 64\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_steps: 1000\n* num\\_epochs: 30\n* mixed\\_precision\\_training: Native AMP",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.10.0+cu111\n* Datasets 1.13.3\n* Tokenizers 0.10.3"
] |
[
"TAGS\n#transformers #pytorch #tensorboard #wav2vec2 #automatic-speech-recognition #generated_from_trainer #license-apache-2.0 #endpoints_compatible #region-us \n",
"### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0001\n* train\\_batch\\_size: 64\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_steps: 1000\n* num\\_epochs: 30\n* mixed\\_precision\\_training: Native AMP",
"### Training results",
"### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.10.0+cu111\n* Datasets 1.13.3\n* Tokenizers 0.10.3"
] |
[
56,
130,
4,
33
] |
[
"passage: TAGS\n#transformers #pytorch #tensorboard #wav2vec2 #automatic-speech-recognition #generated_from_trainer #license-apache-2.0 #endpoints_compatible #region-us \n### Training hyperparameters\n\n\nThe following hyperparameters were used during training:\n\n\n* learning\\_rate: 0.0001\n* train\\_batch\\_size: 64\n* eval\\_batch\\_size: 8\n* seed: 42\n* optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08\n* lr\\_scheduler\\_type: linear\n* lr\\_scheduler\\_warmup\\_steps: 1000\n* num\\_epochs: 30\n* mixed\\_precision\\_training: Native AMP### Training results### Framework versions\n\n\n* Transformers 4.11.3\n* Pytorch 1.10.0+cu111\n* Datasets 1.13.3\n* Tokenizers 0.10.3"
] |
[
-0.10840193927288055,
0.09942979365587234,
-0.0032732731197029352,
0.0639089047908783,
0.10887520760297775,
-0.019640479236841202,
0.12833033502101898,
0.15100139379501343,
-0.09275872260332108,
0.07423093914985657,
0.1264987289905548,
0.15020155906677246,
0.042416736483573914,
0.1458519697189331,
-0.04969090223312378,
-0.2825383245944977,
0.04574279859662056,
0.03541945293545723,
-0.011976231820881367,
0.1270800232887268,
0.08406341820955276,
-0.12452585995197296,
0.05826559662818909,
0.034275297075510025,
-0.15899118781089783,
-0.003669236321002245,
-0.004683241248130798,
-0.10435055941343307,
0.12317958474159241,
0.0064573404379189014,
0.07069429755210876,
0.0483207069337368,
0.06600438058376312,
-0.21849775314331055,
0.006719421129673719,
0.04425959661602974,
0.02855605073273182,
0.07425720989704132,
0.05765978619456291,
-0.028049888089299202,
0.10288064926862717,
-0.07449200749397278,
0.08023011684417725,
0.03749969229102135,
-0.10554177314043045,
-0.29212671518325806,
-0.08603860437870026,
0.04690544679760933,
0.06829395145177841,
0.08848930150270462,
-0.011906549334526062,
0.14405378699302673,
-0.05435881018638611,
0.11068060249090195,
0.2799336314201355,
-0.3132377862930298,
-0.045146841555833817,
-0.03918004035949707,
0.05645136535167694,
0.06091735512018204,
-0.09987223893404007,
-0.01784549094736576,
0.015179495327174664,
0.04465949907898903,
0.13795247673988342,
-0.015828529372811317,
-0.060198623687028885,
-0.0067263380624353886,
-0.14862552285194397,
-0.06026579439640045,
0.11518867313861847,
0.022374121472239494,
-0.03954322636127472,
-0.09777384996414185,
-0.055425889790058136,
-0.21335269510746002,
-0.06749007105827332,
-0.016157276928424835,
0.04333611950278282,
-0.042394332587718964,
-0.10477558523416519,
-0.012277049943804741,
-0.0672234520316124,
-0.07472497969865799,
-0.04086429998278618,
0.1891331970691681,
0.0566757395863533,
-0.0011921821860596538,
-0.03852369263768196,
0.07690025866031647,
-0.021689042448997498,
-0.138247549533844,
-0.02397570200264454,
0.03700451925396919,
-0.020956119522452354,
-0.015290474519133568,
-0.042231518775224686,
-0.0581996776163578,
0.021151943132281303,
0.16163335740566254,
-0.10255303233861923,
0.09620499610900879,
-0.02057516761124134,
0.03970718011260033,
-0.10276912152767181,
0.20789474248886108,
-0.0418180376291275,
0.017576782032847404,
-0.009529013186693192,
0.05598139017820358,
0.02954266034066677,
-0.026170557364821434,
-0.09537056088447571,
0.030951932072639465,
0.12193423509597778,
0.046246014535427094,
-0.048178110271692276,
0.06492531299591064,
-0.034047674387693405,
-0.009892637841403484,
0.002084632171317935,
-0.11154741048812866,
0.036412544548511505,
0.019652148708701134,
-0.06569615751504898,
0.0035736383870244026,
0.014341851696372032,
0.0072896224446594715,
-0.05436139926314354,
0.08275589346885681,
-0.061672989279031754,
0.03320573642849922,
-0.05781775340437889,
-0.12572386860847473,
0.02513972669839859,
-0.11487725377082825,
-0.0033150711096823215,
-0.099916011095047,
-0.10138573497533798,
-0.011766526848077774,
0.03755359351634979,
-0.03871326148509979,
-0.026000158861279488,
-0.07775180041790009,
-0.09040243178606033,
0.045780859887599945,
-0.03436008468270302,
0.07266891747713089,
-0.07448053359985352,
0.09425774216651917,
0.03419557586312294,
0.08770892769098282,
-0.01659530960023403,
0.060401320457458496,
-0.07115887105464935,
0.026651468127965927,
-0.2003585249185562,
0.07499521225690842,
-0.0882958397269249,
0.05715619772672653,
-0.12451942265033722,
-0.1148756593465805,
0.022022083401679993,
-0.007351801265031099,
0.09911179542541504,
0.09700754284858704,
-0.17146143317222595,
-0.08826595544815063,
0.20687226951122284,
-0.08204206824302673,
-0.083707295358181,
0.12498222291469574,
-0.025076063349843025,
-0.00022820691810920835,
0.05553898587822914,
0.2581619620323181,
0.04523538798093796,
-0.12530574202537537,
0.008143801242113113,
-0.04025382921099663,
0.04302043840289116,
-0.0359862744808197,
0.058715540915727615,
-0.02759694680571556,
0.0676233172416687,
0.018036799505352974,
-0.004287241958081722,
0.03732522204518318,
-0.0872926339507103,
-0.07719556987285614,
-0.04409119114279747,
-0.07841350883245468,
0.029108479619026184,
0.03302030637860298,
0.06443256884813309,
-0.11678167432546616,
-0.10780417919158936,
0.03901488706469536,
0.0813325047492981,
-0.1034737378358841,
0.0716647133231163,
-0.1204025000333786,
0.08364253491163254,
-0.014879105612635612,
-0.005070221610367298,
-0.19012872874736786,
0.03600987419486046,
0.038029711693525314,
-0.028291866183280945,
0.04005897045135498,
-0.0645090788602829,
0.07783611118793488,
0.04584876075387001,
-0.026543641462922096,
-0.04625583812594414,
-0.009465120732784271,
0.010228021070361137,
-0.08988085389137268,
-0.20652009546756744,
-0.03812091052532196,
-0.03793153166770935,
0.07917838543653488,
-0.1385466307401657,
0.03419172018766403,
0.0765547826886177,
0.09213785827159882,
0.03232195973396301,
-0.03142789751291275,
-0.0016957769403234124,
0.09017588943243027,
-0.020771650597453117,
-0.06446697562932968,
0.05829201638698578,
0.020398985594511032,
-0.0865570455789566,
0.03818977624177933,
-0.14922857284545898,
0.12808121740818024,
0.14709283411502838,
-0.015159038826823235,
-0.06684043258428574,
0.00010474542796146125,
-0.04736353084445,
-0.03516967594623566,
-0.003827969077974558,
0.03290269523859024,
0.21538689732551575,
0.013531610369682312,
0.14365656673908234,
-0.08930571377277374,
-0.0421011783182621,
0.04978974536061287,
-0.021473588421940804,
-0.006120389327406883,
0.11701664328575134,
0.045085448771715164,
-0.054624710232019424,
0.11850684881210327,
0.09027540683746338,
-0.07986439764499664,
0.12190937250852585,
-0.06021132320165634,
-0.07451378554105759,
-0.020309461280703545,
0.004928208887577057,
0.023886194452643394,
0.09902336448431015,
-0.16320304572582245,
-0.039612188935279846,
0.025918470695614815,
0.025259938091039658,
0.020627159625291824,
-0.20869190990924835,
0.014049693010747433,
0.028607629239559174,
-0.08579307049512863,
-0.04328200966119766,
0.002465304220095277,
0.012892539612948895,
0.0943845584988594,
0.012641347013413906,
-0.09374843537807465,
0.011171546764671803,
0.004129425622522831,
-0.07322510331869125,
0.17603977024555206,
-0.11666613072156906,
-0.17625857889652252,
-0.10573778301477432,
-0.09282252192497253,
-0.039468951523303986,
-0.0027053970843553543,
0.08880801498889923,
-0.09253130108118057,
-0.039298634976148605,
-0.08366912603378296,
-0.016318781301379204,
-0.026619402691721916,
0.04205513745546341,
0.031230339780449867,
-0.011865105479955673,
0.06496904045343399,
-0.1169544905424118,
-0.021483587101101875,
-0.04010646045207977,
-0.0017652381211519241,
0.05449385568499565,
0.037149347364902496,
0.10860037058591843,
0.1582314521074295,
-0.010777192190289497,
0.0500609427690506,
-0.045641567558050156,
0.1887023150920868,
-0.07469738274812698,
-0.03676736727356911,
0.11117152869701385,
-0.005873518995940685,
0.06863761693239212,
0.11719900369644165,
0.04854678362607956,
-0.09790899604558945,
-0.012829958461225033,
0.0037898255977779627,
-0.04597606509923935,
-0.21459706127643585,
-0.03515801206231117,
-0.04487563669681549,
-0.002147891791537404,
0.10576086491346359,
0.041067615151405334,
0.0373961441218853,
0.021660050377249718,
0.03248962014913559,
0.0061063640750944614,
0.002012399723753333,
0.09638699889183044,
0.130077064037323,
0.040140215307474136,
0.13330475986003876,
-0.03767659142613411,
-0.03711308538913727,
0.030172957107424736,
0.005699885077774525,
0.23065103590488434,
0.01923581399023533,
0.19079194962978363,
0.05599447712302208,
0.17562851309776306,
0.04188474267721176,
0.0665760338306427,
-0.0023281234316527843,
-0.011439934372901917,
0.01105701457709074,
-0.05235996097326279,
-0.0396500900387764,
0.023559488356113434,
0.023404523730278015,
0.010008291341364384,
-0.11367009580135345,
-0.011988941580057144,
0.04610157385468483,
0.3516078591346741,
0.027411343529820442,
-0.33806726336479187,
-0.09082548320293427,
-0.012200694531202316,
-0.08528269082307816,
-0.031141338869929314,
0.045277222990989685,
0.08848176151514053,
-0.08038756996393204,
0.06360291689634323,
-0.06236361712217331,
0.09022822231054306,
-0.06412261724472046,
0.03402787074446678,
0.03767089545726776,
0.07197225093841553,
0.004078727215528488,
0.03339843451976776,
-0.2922405004501343,
0.28097084164619446,
0.005429231096059084,
0.07737204432487488,
-0.06107424572110176,
0.008163461461663246,
0.025686483830213547,
0.018551770597696304,
0.08681221306324005,
-0.02570568211376667,
-0.12028193473815918,
-0.1752546727657318,
-0.0929955393075943,
0.01137358695268631,
0.12800903618335724,
0.014290926977992058,
0.11025993525981903,
-0.011076635681092739,
-0.016653049737215042,
0.04951779171824455,
-0.09612446278333664,
-0.06526286154985428,
-0.09221074730157852,
0.011428373865783215,
0.08238303661346436,
0.033630140125751495,
-0.07283381372690201,
-0.10337041318416595,
-0.08797062933444977,
0.149347722530365,
-0.05281282961368561,
-0.04277365654706955,
-0.11878708004951477,
0.008892927318811417,
0.10931842029094696,
-0.07885841280221939,
0.06090939790010452,
0.009357727132737637,
0.10454501956701279,
0.01170405000448227,
-0.06780713051557541,
0.11964511126279831,
-0.06433307379484177,
-0.16713593900203705,
-0.029247360303997993,
0.14410120248794556,
0.029984796419739723,
0.06048277020454407,
-0.008046498522162437,
0.0383547767996788,
-0.022522808983922005,
-0.077153280377388,
0.04115656390786171,
0.02626982517540455,
0.04411570727825165,
-0.013693139888346195,
-0.019618984311819077,
-0.006506140809506178,
-0.09045520424842834,
-0.01831859163939953,
0.20558883249759674,
0.24349291622638702,
-0.09658609330654144,
0.09254779666662216,
0.07048435509204865,
-0.04215268790721893,
-0.17233753204345703,
-0.003609598148614168,
0.06548202782869339,
0.0003060584713239223,
-0.025631308555603027,
-0.1945110559463501,
0.024103490635752678,
0.07004061341285706,
-0.021416759118437767,
0.08301912248134613,
-0.3171079754829407,
-0.14080950617790222,
0.13653235137462616,
0.11388445645570755,
0.06106197088956833,
-0.14613021910190582,
-0.05506385862827301,
-0.010211565531790257,
-0.1025380790233612,
0.09402883052825928,
-0.07439092546701431,
0.13583993911743164,
-0.024395085871219635,
0.09055118262767792,
0.011085258796811104,
-0.05827326700091362,
0.10640087723731995,
0.011888805776834488,
0.06008514389395714,
-0.04568540304899216,
0.01824812777340412,
0.04907729849219322,
-0.0632019191980362,
0.054474398493766785,
-0.07996165007352829,
0.02870166301727295,
-0.08069387078285217,
-0.03259076178073883,
-0.08501128107309341,
0.0143051752820611,
-0.009704957716166973,
-0.03350866585969925,
-0.03708186000585556,
0.0016160530503839254,
0.06174768507480621,
-0.010522176511585712,
0.1545044183731079,
-0.027125265449285507,
0.12636709213256836,
0.1635395884513855,
0.1008370965719223,
-0.10466770827770233,
-0.07610317319631577,
0.006215882487595081,
-0.034340038895606995,
0.05555565655231476,
-0.1166120246052742,
0.03718842566013336,
0.13585075736045837,
0.0314721018075943,
0.12213027477264404,
0.06990715116262436,
-0.06520267575979233,
0.03371967375278473,
0.04193675145506859,
-0.13790380954742432,
-0.12723968923091888,
0.013756118714809418,
0.023448465391993523,
-0.07200626283884048,
0.0727224200963974,
0.11524808406829834,
-0.05519428849220276,
-0.013773572631180286,
-0.0017679247539490461,
0.013635697774589062,
-0.040404267609119415,
0.19558584690093994,
0.036822110414505005,
0.061562422662973404,
-0.12449406087398529,
0.08033376932144165,
0.03864164650440216,
-0.1335592269897461,
0.060989879071712494,
0.10572937875986099,
-0.09525275230407715,
-0.028561105951666832,
0.02777741104364395,
0.11242704838514328,
-0.02837483584880829,
-0.0740433931350708,
-0.14249736070632935,
-0.14226004481315613,
0.10895731300115585,
0.2047395557165146,
0.05631996691226959,
0.016582321375608444,
-0.059292059391736984,
0.01703932322561741,
-0.11865764111280441,
0.069403737783432,
0.04012617841362953,
0.06058439984917641,
-0.12849301099777222,
0.14525172114372253,
0.017378974705934525,
0.039774760603904724,
-0.015068387612700462,
-0.01107021514326334,
-0.11188119649887085,
0.03961029648780823,
-0.12852124869823456,
0.0053679440170526505,
-0.06555632501840591,
0.0008056419319473207,
0.003699701512232423,
-0.050238218158483505,
-0.06420663744211197,
0.0350005067884922,
-0.11980230361223221,
-0.023233849555253983,
0.0015139579772949219,
0.036535680294036865,
-0.12821324169635773,
-0.009511788375675678,
0.015172506682574749,
-0.09357403218746185,
0.09747523069381714,
0.08688071370124817,
-0.03260219842195511,
0.05046198144555092,
-0.06034085154533386,
-0.02628629468381405,
0.07778842747211456,
-0.0063842604868113995,
0.051108378916978836,
-0.1306200921535492,
-0.019470445811748505,
0.011326364241540432,
0.03455924615263939,
0.02401355654001236,
0.11314746737480164,
-0.11603404581546783,
0.0008960484410636127,
-0.027594830840826035,
-0.051907241344451904,
-0.0684390515089035,
0.05053671449422836,
0.10958374291658401,
0.02757941372692585,
0.1637764722108841,
-0.09339061379432678,
0.027703197672963142,
-0.16620126366615295,
0.0064531732350587845,
-0.01522138249129057,
-0.12184406816959381,
-0.05091356486082077,
-0.03190355375409126,
0.07811766862869263,
-0.06345972418785095,
0.13078901171684265,
-0.030455099418759346,
0.025080401450395584,
0.03731410950422287,
-0.07726695388555527,
-0.05333266034722328,
0.039647504687309265,
0.20654624700546265,
0.03931599482893944,
-0.04324667528271675,
0.07519874721765518,
0.020934900268912315,
0.08080600202083588,
0.1295783370733261,
0.17298473417758942,
0.1607881486415863,
0.06347686797380447,
0.11682124435901642,
0.05453762784600258,
-0.05320059880614281,
-0.17402292788028717,
0.09230802953243256,
-0.060276709496974945,
0.1305769383907318,
-0.014341930858790874,
0.24112437665462494,
0.12125487625598907,
-0.15338638424873352,
0.06586644798517227,
-0.01937364786863327,
-0.08936231583356857,
-0.11579914391040802,
-0.06332032382488251,
-0.0869855210185051,
-0.17657700181007385,
0.00920387078076601,
-0.10204344987869263,
0.06307265162467957,
0.047155868262052536,
0.037556037306785583,
0.017014650627970695,
0.13752016425132751,
0.016575932502746582,
0.0028347221668809652,
0.09153669327497482,
-0.003392178798094392,
-0.05593612790107727,
-0.07296172529459,
-0.08541002869606018,
0.034677211195230484,
-0.013555123470723629,
0.05818389728665352,
-0.004046916961669922,
-0.06911953538656235,
0.04702029377222061,
-0.03861567750573158,
-0.0964185819029808,
0.023027431219816208,
0.021672777831554413,
0.06978718936443329,
0.05003747344017029,
0.0341593436896801,
-0.04138512536883354,
-0.0023753235582262278,
0.1956040859222412,
-0.09438656270503998,
-0.09315497428178787,
-0.10909578204154968,
0.25362738966941833,
0.039139874279499054,
-0.015677686780691147,
0.02148428186774254,
-0.06038916856050491,
-0.03152010217308998,
0.21176022291183472,
0.17216283082962036,
-0.011390690691769123,
0.0038215206004679203,
-0.013853519223630428,
-0.0063662175089120865,
-0.037059634923934937,
0.08023175597190857,
0.14704789221286774,
0.06169869378209114,
-0.06345091015100479,
-0.052421700209379196,
-0.05089200288057327,
-0.03470566123723984,
-0.06613753736019135,
0.07537133246660233,
0.006660172250121832,
-0.02515644207596779,
-0.04525953531265259,
0.06324705481529236,
-0.09421046078205109,
-0.08264342695474625,
0.025249235332012177,
-0.19519402086734772,
-0.15011774003505707,
0.00714054936543107,
0.07110968232154846,
0.011370549909770489,
0.03471324220299721,
0.003075638320297003,
-0.008666581474244595,
0.08035959303379059,
-0.002129684668034315,
-0.08053214848041534,
-0.06623467057943344,
0.08440342545509338,
-0.13297195732593536,
0.16674721240997314,
-0.04231426492333412,
0.048181675374507904,
0.12330064922571182,
0.08834908902645111,
-0.08060440421104431,
0.08698412030935287,
0.04214141517877579,
-0.10697236657142639,
0.021693486720323563,
0.1540914922952652,
-0.033257052302360535,
0.09497203677892685,
0.030675923451781273,
-0.11471245437860489,
0.014496706426143646,
-0.09043897688388824,
-0.03821524605154991,
-0.04121850058436394,
-0.050358083099126816,
-0.04437951371073723,
0.10982204973697662,
0.16341567039489746,
-0.043647341430187225,
0.004043710883706808,
-0.05261571705341339,
0.011974305845797062,
0.04789311811327934,
-0.0006571264821104705,
-0.061769306659698486,
-0.2793271541595459,
0.011486247181892395,
0.03786472603678703,
0.002746510785073042,
-0.25752827525138855,
-0.09663747996091843,
0.01299266703426838,
-0.04285489022731781,
-0.08771485090255737,
0.08573777973651886,
0.07471591234207153,
0.04653165489435196,
-0.05240245908498764,
-0.05806141346693039,
-0.035808440297842026,
0.18863575160503387,
-0.17564551532268524,
-0.06003304198384285
] |
null | null | null |
Hi, this is Taiwan_House_Prediction.
|
{}
| null |
huang0624/Taiwan_House_Prediction
|
[
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#region-us
|
Hi, this is Taiwan_House_Prediction.
|
[] |
[
"TAGS\n#region-us \n"
] |
[
6
] |
[
"passage: TAGS\n#region-us \n"
] |
[
0.024608636274933815,
-0.026205500587821007,
-0.009666500613093376,
-0.10395516455173492,
0.08638657629489899,
0.059816278517246246,
0.01882290467619896,
0.020661840215325356,
0.23975107073783875,
-0.005599027033895254,
0.1219947561621666,
0.0015615287702530622,
-0.037353623658418655,
0.03733762726187706,
-0.0035912662278860807,
-0.17583473026752472,
0.03876631706953049,
-0.018274923786520958,
0.01843859627842903,
0.026470553129911423,
-0.07776834815740585,
-0.07564429938793182,
0.015296397730708122,
-0.10247814655303955,
-0.083692267537117,
0.11002834886312485,
0.031466204673051834,
-0.019670886918902397,
0.10779199749231339,
-0.04243955761194229,
0.18699054419994354,
-0.011512263678014278,
-0.11213519424200058,
-0.2536850869655609,
0.021806683391332626,
-0.01765260472893715,
-0.08747660368680954,
0.01506110467016697,
0.0665089413523674,
-0.09014441072940826,
-0.0588928684592247,
0.0795099288225174,
-0.01132340170443058,
0.04246443510055542,
-0.27593839168548584,
-0.12684126198291779,
-0.05297930911183357,
-0.1421966552734375,
0.08651168644428253,
0.04035491496324539,
0.008764253929257393,
0.15506891906261444,
-0.20897391438484192,
0.004104613792151213,
0.08255259692668915,
-0.2538507878780365,
0.05591634660959244,
0.17671173810958862,
0.03623908758163452,
0.18037272989749908,
0.0060391901060938835,
0.11029672622680664,
0.0716743916273117,
-0.024263937026262283,
-0.17590197920799255,
-0.08127854019403458,
-0.04696211963891983,
0.16642488539218903,
-0.06727185100317001,
-0.14248386025428772,
0.34701237082481384,
0.00015008423360995948,
0.009657775051891804,
0.16921205818653107,
-0.059524230659008026,
-0.09972117841243744,
0.07259953022003174,
0.016484731808304787,
0.018492350354790688,
0.1471305936574936,
0.16307872533798218,
-0.0458691343665123,
-0.13837823271751404,
-0.018630273640155792,
-0.22798998653888702,
0.17510560154914856,
-0.03248048573732376,
0.13137903809547424,
-0.27447956800460815,
0.01684025302529335,
-0.2570667266845703,
0.0032130838371813297,
0.04178816080093384,
-0.06004921346902847,
-0.0226522795855999,
-0.013265985064208508,
-0.08018817007541656,
0.004899587947875261,
0.06192673370242119,
0.1266920566558838,
-0.06128726154565811,
0.06128238886594772,
-0.09319206327199936,
0.141696035861969,
0.07166698575019836,
0.07868369668722153,
0.13037432730197906,
0.041205424815416336,
-0.07187089323997498,
-0.21872246265411377,
-0.0026476888451725245,
-0.06275863200426102,
-0.09502086788415909,
-0.0020165652967989445,
-0.11606067419052124,
0.17244569957256317,
-0.030802514404058456,
-0.09825427830219269,
-0.11208184063434601,
0.09148659557104111,
-0.032992321997880936,
-0.03437839448451996,
-0.03552987426519394,
-0.020977836102247238,
0.019381176680326462,
0.04704452306032181,
-0.1548958420753479,
-0.005131472367793322,
0.07039852440357208,
0.11502562463283539,
-0.1346137970685959,
-0.003783059772104025,
-0.07908964157104492,
0.03039063885807991,
0.07654735445976257,
-0.16510222852230072,
0.03158547356724739,
-0.1124754324555397,
-0.07531405985355377,
0.002912673633545637,
-0.015710093080997467,
-0.016202643513679504,
0.166526660323143,
-0.0020451415330171585,
0.0714716836810112,
-0.026345307007431984,
-0.05890209600329399,
-0.11243434250354767,
-0.08489254862070084,
0.05390460044145584,
0.03670717030763626,
0.03266148269176483,
-0.2193479984998703,
0.014805203303694725,
-0.12762966752052307,
0.1360815018415451,
-0.10566820204257965,
-0.04705966264009476,
-0.022842247039079666,
0.20562705397605896,
0.037286072969436646,
0.08762791007757187,
-0.22171171009540558,
0.039756543934345245,
-0.05404696613550186,
0.18480908870697021,
-0.1502426266670227,
-0.0799463614821434,
0.20813211798667908,
-0.07964949309825897,
-0.10115210711956024,
0.021235812455415726,
0.020391687750816345,
0.026287272572517395,
0.0766737088561058,
0.4564172327518463,
-0.09766800701618195,
-0.09146861732006073,
0.10178250074386597,
0.17055274546146393,
-0.12427149713039398,
-0.1827561855316162,
0.06446871906518936,
-0.16666454076766968,
-0.1973118633031845,
0.0018917324487119913,
0.09222044050693512,
0.038269978016614914,
-0.07875611633062363,
-0.020746968686580658,
0.06325206160545349,
-0.0007678253459744155,
0.09095914661884308,
0.03755716234445572,
0.09034032374620438,
-0.08716782182455063,
0.11115926504135132,
-0.05017651244997978,
0.004037132486701012,
0.1343354731798172,
0.027325427159667015,
-0.03223329409956932,
0.08694463223218918,
-0.0485352948307991,
0.05295134335756302,
-0.1662379503250122,
-0.15068690478801727,
0.03398871049284935,
0.06283251196146011,
0.03186952322721481,
0.1280253529548645,
0.08141885697841644,
-0.10732853412628174,
0.022690722718834877,
-0.004228927195072174,
0.058398615568876266,
0.03891623765230179,
0.006107209715992212,
0.008764320984482765,
0.0961301177740097,
-0.10607069730758667,
-0.13589619100093842,
-0.07336436957120895,
-0.014715781435370445,
0.14371353387832642,
-0.0302802175283432,
0.07690227776765823,
-0.004240254405885935,
0.00013200697139836848,
0.06930823624134064,
0.08137880265712738,
0.016412746161222458,
0.08971183747053146,
-0.05237193778157234,
-0.05160155147314072,
0.10863113403320312,
-0.13533565402030945,
0.17837053537368774,
0.14053137600421906,
-0.20532016456127167,
0.029453208670020103,
-0.06838275492191315,
0.03670361638069153,
-0.008162540383636951,
0.0975119024515152,
-0.08272241055965424,
-0.02106042578816414,
0.013134466484189034,
0.0052274600602686405,
-0.013007243163883686,
0.017682146281003952,
-0.07295988500118256,
-0.07787393033504486,
-0.10233919322490692,
0.08436838537454605,
0.11562882363796234,
-0.10282530635595322,
0.14214380085468292,
0.4384984076023102,
0.11495281755924225,
0.21582984924316406,
-0.09581480920314789,
-0.0412987545132637,
0.007486371789127588,
0.0001535322517156601,
-0.04476691037416458,
0.08031861484050751,
-0.15973517298698425,
-0.038901735097169876,
0.027348900213837624,
0.07128690183162689,
0.11475157737731934,
-0.14959022402763367,
-0.09639324247837067,
-0.00793045200407505,
0.0022841424215584993,
-0.1249532699584961,
0.023905446752905846,
-0.03974650055170059,
0.04015624523162842,
0.07232289016246796,
-0.021535737439990044,
0.13939237594604492,
-0.04166141897439957,
-0.0639561116695404,
0.07585346698760986,
-0.2017085999250412,
-0.23179671168327332,
-0.12309670448303223,
-0.14680525660514832,
0.04366797208786011,
0.05154111236333847,
0.01726446859538555,
-0.17635835707187653,
-0.015074856579303741,
0.07706750929355621,
0.07820965349674225,
-0.20886357128620148,
-0.022814949974417686,
-0.004290030337870121,
0.0895976573228836,
-0.10227091610431671,
-0.0017130117630586028,
-0.04419664293527603,
-0.10150232166051865,
0.0017003051470965147,
0.07279510796070099,
-0.137485533952713,
0.13807645440101624,
0.21589438617229462,
0.07225540280342102,
0.07359948754310608,
-0.019093448296189308,
0.09936179965734482,
-0.10856141895055771,
-0.16549113392829895,
0.08348225057125092,
-0.06234746053814888,
0.047262318432331085,
0.17534415423870087,
0.03307317942380905,
-0.13904969394207,
-0.015682822093367577,
-0.0402069091796875,
-0.15603256225585938,
-0.238995760679245,
-0.09178274869918823,
-0.1182505264878273,
0.16442428529262543,
0.0009358620154671371,
0.06651917099952698,
0.08258313685655594,
-0.022042419761419296,
0.16447891294956207,
-0.07379321753978729,
-0.07578866183757782,
-0.006978808436542749,
0.12375060468912125,
-0.056660156697034836,
-0.03080669604241848,
-0.10566964000463486,
-0.008295975625514984,
0.1151021271944046,
0.15304014086723328,
0.12214863300323486,
0.2957419455051422,
0.08268889784812927,
0.026645636186003685,
0.08958091586828232,
0.17622539401054382,
0.09495089203119278,
0.07838419824838638,
-0.045413073152303696,
-0.014814783819019794,
0.014317171648144722,
-0.04022889584302902,
0.010141594335436821,
0.14683100581169128,
-0.2679629921913147,
-0.006678564939647913,
-0.2710230350494385,
0.0965198427438736,
-0.10913380235433578,
0.11837165057659149,
-0.01015760749578476,
0.10194015502929688,
0.11082887649536133,
0.03233652561903,
-0.03858073800802231,
0.16613617539405823,
0.08450309932231903,
-0.11277695000171661,
0.001758623169735074,
0.03737903758883476,
0.09715615212917328,
-0.02818971499800682,
0.12721189856529236,
-0.11048974841833115,
-0.1464834064245224,
0.013753619976341724,
0.07152791321277618,
-0.15373679995536804,
0.3138748109340668,
0.012069208547472954,
-0.13481520116329193,
-0.01481647603213787,
-0.09957809001207352,
-0.006440147757530212,
0.1254177987575531,
0.09333524852991104,
0.07935678958892822,
-0.2185502052307129,
-0.13339371979236603,
0.05872276425361633,
-0.00575496768578887,
0.22408108413219452,
-0.034034017473459244,
-0.11356475204229355,
-0.027013886719942093,
0.04241163283586502,
-0.06043251231312752,
0.08524788916110992,
0.023536119610071182,
-0.08113526552915573,
-0.032957352697849274,
0.05323701351881027,
0.012368366122245789,
0.00524376705288887,
0.09360801428556442,
0.020107939839363098,
-0.0009265501867048442,
0.01785753294825554,
0.047885000705718994,
-0.0675911232829094,
-0.1984109878540039,
0.09357594698667526,
-0.05215044692158699,
0.0015536568826064467,
-0.08013670891523361,
-0.15122665464878082,
-0.08837161958217621,
-0.16009655594825745,
0.12540200352668762,
-0.034406669437885284,
0.12700119614601135,
-0.06619787961244583,
0.17341409623622894,
-0.07871770113706589,
0.04481020197272301,
-0.047349292784929276,
0.050332702696323395,
-0.007268077693879604,
-0.07756082713603973,
0.16585899889469147,
-0.15564003586769104,
0.01809087023139,
0.19572502374649048,
-0.018915493041276932,
0.07177707552909851,
0.021322092041373253,
-0.0636206790804863,
0.23147478699684143,
0.3014698624610901,
0.008138049393892288,
0.1665448248386383,
0.3018903136253357,
-0.07466315478086472,
-0.2642788887023926,
-0.05505012720823288,
-0.2841376066207886,
-0.05371501296758652,
0.10716094076633453,
-0.22523896396160126,
0.06986407935619354,
0.14383509755134583,
-0.06471995264291763,
0.30228954553604126,
-0.21825523674488068,
0.012589273042976856,
0.15434536337852478,
-0.08868814259767532,
0.5515313148498535,
-0.1133413165807724,
-0.17677772045135498,
-0.008122089318931103,
-0.08741296827793121,
0.10602109134197235,
-0.0340677872300148,
0.06877441704273224,
0.013465235009789467,
0.04797380417585373,
0.048932258039712906,
-0.03111894056200981,
0.22701001167297363,
0.008710170164704323,
0.09015397727489471,
-0.07378865778446198,
-0.18624304234981537,
0.11639340221881866,
-0.04359482601284981,
-0.08891059458255768,
0.0849778801202774,
-0.05942516401410103,
-0.11078983545303345,
0.04663389176130295,
-0.07950539886951447,
-0.024862350896000862,
0.08423490077257156,
-0.04678233340382576,
-0.042606171220541,
-0.008054176345467567,
-0.1618063747882843,
-0.0002289071271661669,
0.31360217928886414,
-0.07096036523580551,
0.16695955395698547,
0.03677211329340935,
0.00038613268407061696,
-0.11027684062719345,
0.030288029462099075,
-0.05203165486454964,
-0.021576624363660812,
0.09578979015350342,
-0.11096979677677155,
0.03204701095819473,
0.14160704612731934,
-0.04864364117383957,
0.05846960097551346,
0.09256096184253693,
-0.0849417969584465,
0.007583672646433115,
0.17753590643405914,
-0.17537221312522888,
-0.1273445188999176,
-0.006135711446404457,
-0.09862716495990753,
0.14055661857128143,
0.04394126310944557,
0.05191568285226822,
0.16669964790344238,
0.03967129811644554,
-0.029474308714270592,
-0.02817419543862343,
-0.1153380498290062,
-0.0201893113553524,
0.040153320878744125,
0.00045633706031367183,
-0.08791285753250122,
0.2262638509273529,
0.06409153342247009,
-0.1328488290309906,
-0.051157206296920776,
0.2161225974559784,
-0.06805316358804703,
-0.04911920800805092,
-0.223562553524971,
0.10752306133508682,
-0.07112517952919006,
-0.0965060144662857,
0.05453834682703018,
-0.02270081453025341,
0.005106312222778797,
0.181985542178154,
0.03941008821129799,
0.11070270836353302,
0.03738937899470329,
-0.02448922023177147,
0.15798696875572205,
-0.142850860953331,
-0.14191335439682007,
-0.025354057550430298,
-0.08757315576076508,
-0.13844476640224457,
-0.026804137974977493,
0.1617041826248169,
-0.09177309274673462,
-0.14772607386112213,
-0.2621181011199951,
0.10968475043773651,
-0.16432365775108337,
-0.10192688554525375,
-0.03469514101743698,
-0.08968492597341537,
0.0696166530251503,
0.030301768332719803,
-0.03093348816037178,
-0.06706760823726654,
-0.18593791127204895,
0.0816768929362297,
0.06349513679742813,
0.045533183962106705,
-0.017847947776317596,
0.0067379772663116455,
0.1720137596130371,
0.025955144315958023,
0.10040043294429779,
0.16762186586856842,
0.011397695168852806,
0.2246655523777008,
-0.1671202927827835,
-0.11496317386627197,
0.1336962729692459,
-0.026543032377958298,
0.06762003898620605,
0.16792191565036774,
-0.0772583931684494,
0.015526676550507545,
-0.028136352077126503,
0.07066910713911057,
-0.11003983020782471,
-0.105624258518219,
0.007937257178127766,
0.02567129209637642,
-0.2755882740020752,
-0.005599735304713249,
-0.19717298448085785,
0.14788752794265747,
0.02579621411859989,
0.03297143429517746,
0.10257530212402344,
0.10404334217309952,
0.08312062919139862,
-0.0017710148822516203,
0.03226327523589134,
-0.1176818460226059,
0.02753005363047123,
-0.059239376336336136,
-0.020663779228925705,
0.017624232918024063,
0.36952024698257446,
-0.03603357449173927,
-0.046802736818790436,
0.003710439894348383,
0.1307835876941681,
-0.02139742486178875,
0.017395347356796265,
0.13209912180900574,
0.12607666850090027,
-0.08595693111419678,
-0.1504845917224884,
0.04888554662466049,
-0.04565655067563057,
-0.02836887165904045,
0.1464131623506546,
0.05905961990356445,
0.1050296202301979,
0.0908031314611435,
-0.014463032595813274,
-0.00318976235575974,
0.012856799177825451,
-0.15486004948616028,
0.06223496049642563,
-0.010558074340224266,
0.012565906159579754,
0.017934376373887062,
0.15238402783870697,
-0.005540105979889631,
0.07739730179309845,
-0.09889880567789078,
0.004208535887300968,
-0.13498884439468384,
-0.07913459837436676,
0.03617347031831741,
-0.13393273949623108,
0.04141177982091904,
-0.01871878281235695,
0.029611799865961075,
0.30386561155319214,
0.02558239921927452,
-0.020639164373278618,
0.12512871623039246,
-0.1214587539434433,
-0.12050267308950424,
-0.001594188273884356,
-0.029960084706544876,
0.0791488066315651,
-0.02633434161543846,
-0.0997740775346756,
-0.1001306027173996,
-0.15166029334068298,
-0.09759195148944855,
0.05182836204767227,
-0.04993441700935364,
-0.059362251311540604,
-0.17634081840515137,
-0.05707859992980957,
-0.05147340148687363,
0.14025864005088806,
-0.12263951450586319,
0.15159130096435547,
-0.014490418136119843,
0.004084470681846142,
0.04405883327126503,
0.1950942426919937,
-0.03644494712352753,
0.08714226633310318,
0.0154351145029068,
0.1522706001996994,
-0.05119588226079941,
0.14720745384693146,
-0.10931728035211563,
-0.04014137014746666,
-0.06710435450077057,
0.21513493359088898,
0.25630924105644226,
-0.06136954948306084,
-0.008937356993556023,
-0.012760217301547527,
0.058654606342315674,
0.1073930487036705,
0.16049085557460785,
0.002326392102986574,
0.2802925705909729,
-0.03133585304021835,
0.04815128445625305,
0.02901598811149597,
0.013607407920062542,
-0.06336209923028946,
0.03397751972079277,
0.07539387792348862,
-0.035039983689785004,
-0.1412304788827896,
0.15837742388248444,
-0.21980468928813934,
0.18157227337360382,
0.11640069633722305,
-0.19996967911720276,
-0.013728445395827293,
-0.04882071167230606,
0.1689416468143463,
-0.0856364443898201,
0.1637246012687683,
-0.0903693437576294,
-0.2108195722103119,
-0.2056000679731369,
0.03867346793413162,
-0.34623071551322937,
-0.254462867975235,
0.10422009229660034,
0.1488201916217804,
0.04015883058309555,
-0.018507536500692368,
-0.019967829808592796,
-0.018367022275924683,
0.04877542704343796,
-0.0067357709631323814,
0.06014643982052803,
0.031397558748722076,
-0.02988368645310402,
-0.24127542972564697,
-0.029804671183228493,
0.023964406922459602,
-0.07093082368373871,
0.07464958727359772,
-0.06874357163906097,
-0.022495782002806664,
0.08059766888618469,
-0.03066304884850979,
0.03298592567443848,
-0.035373736172914505,
-0.16326889395713806,
0.027529051527380943,
0.03900543600320816,
0.036012712866067886,
0.00634160777553916,
0.0008072225609794259,
-0.03455270454287529,
0.0644603744149208,
-0.16716794669628143,
-0.16015739738941193,
0.14140215516090393,
-0.06745140254497528,
0.2779497504234314,
-0.05812826007604599,
-0.0809100940823555,
0.04766704887151718,
-0.03426874056458473,
0.1807648241519928,
-0.07756473124027252,
0.047254521399736404,
0.12766779959201813,
0.011127962730824947,
0.03121316432952881,
-0.3092964291572571,
0.11082969605922699,
-0.000795336440205574,
-0.006093299947679043,
-0.07581598311662674
] |
null | null |
transformers
|
## DynaBERT: Dynamic BERT with Adaptive Width and Depth
* DynaBERT can flexibly adjust the size and latency by selecting adaptive width and depth, and
the subnetworks of it have competitive performances as other similar-sized compressed models.
The training process of DynaBERT includes first training a width-adaptive BERT and then
allowing both adaptive width and depth using knowledge distillation.
* This code is modified based on the repository developed by Hugging Face: [Transformers v2.1.1](https://github.com/huggingface/transformers/tree/v2.1.1), and is released in [GitHub](https://github.com/huawei-noah/Pretrained-Language-Model/tree/master/DynaBERT).
### Reference
Lu Hou, Zhiqi Huang, Lifeng Shang, Xin Jiang, Xiao Chen, Qun Liu.
[DynaBERT: Dynamic BERT with Adaptive Width and Depth](https://arxiv.org/abs/2004.04037).
```
@inproceedings{hou2020dynabert,
title = {DynaBERT: Dynamic BERT with Adaptive Width and Depth},
author = {Lu Hou, Zhiqi Huang, Lifeng Shang, Xin Jiang, Xiao Chen, Qun Liu},
booktitle = {Advances in Neural Information Processing Systems},
year = {2020}
}
```
|
{}
| null |
huawei-noah/DynaBERT_MNLI
|
[
"transformers",
"pytorch",
"jax",
"bert",
"arxiv:2004.04037",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[
"2004.04037"
] |
[] |
TAGS
#transformers #pytorch #jax #bert #arxiv-2004.04037 #endpoints_compatible #region-us
|
## DynaBERT: Dynamic BERT with Adaptive Width and Depth
* DynaBERT can flexibly adjust the size and latency by selecting adaptive width and depth, and
the subnetworks of it have competitive performances as other similar-sized compressed models.
The training process of DynaBERT includes first training a width-adaptive BERT and then
allowing both adaptive width and depth using knowledge distillation.
* This code is modified based on the repository developed by Hugging Face: Transformers v2.1.1, and is released in GitHub.
### Reference
Lu Hou, Zhiqi Huang, Lifeng Shang, Xin Jiang, Xiao Chen, Qun Liu.
DynaBERT: Dynamic BERT with Adaptive Width and Depth.
|
[
"## DynaBERT: Dynamic BERT with Adaptive Width and Depth\n\n* DynaBERT can flexibly adjust the size and latency by selecting adaptive width and depth, and \nthe subnetworks of it have competitive performances as other similar-sized compressed models.\nThe training process of DynaBERT includes first training a width-adaptive BERT and then \nallowing both adaptive width and depth using knowledge distillation. \n\n* This code is modified based on the repository developed by Hugging Face: Transformers v2.1.1, and is released in GitHub.",
"### Reference\nLu Hou, Zhiqi Huang, Lifeng Shang, Xin Jiang, Xiao Chen, Qun Liu.\nDynaBERT: Dynamic BERT with Adaptive Width and Depth."
] |
[
"TAGS\n#transformers #pytorch #jax #bert #arxiv-2004.04037 #endpoints_compatible #region-us \n",
"## DynaBERT: Dynamic BERT with Adaptive Width and Depth\n\n* DynaBERT can flexibly adjust the size and latency by selecting adaptive width and depth, and \nthe subnetworks of it have competitive performances as other similar-sized compressed models.\nThe training process of DynaBERT includes first training a width-adaptive BERT and then \nallowing both adaptive width and depth using knowledge distillation. \n\n* This code is modified based on the repository developed by Hugging Face: Transformers v2.1.1, and is released in GitHub.",
"### Reference\nLu Hou, Zhiqi Huang, Lifeng Shang, Xin Jiang, Xiao Chen, Qun Liu.\nDynaBERT: Dynamic BERT with Adaptive Width and Depth."
] |
[
34,
136,
47
] |
[
"passage: TAGS\n#transformers #pytorch #jax #bert #arxiv-2004.04037 #endpoints_compatible #region-us \n## DynaBERT: Dynamic BERT with Adaptive Width and Depth\n\n* DynaBERT can flexibly adjust the size and latency by selecting adaptive width and depth, and \nthe subnetworks of it have competitive performances as other similar-sized compressed models.\nThe training process of DynaBERT includes first training a width-adaptive BERT and then \nallowing both adaptive width and depth using knowledge distillation. \n\n* This code is modified based on the repository developed by Hugging Face: Transformers v2.1.1, and is released in GitHub.### Reference\nLu Hou, Zhiqi Huang, Lifeng Shang, Xin Jiang, Xiao Chen, Qun Liu.\nDynaBERT: Dynamic BERT with Adaptive Width and Depth."
] |
[
-0.04640497267246246,
0.062194932252168655,
-0.0024007975589483976,
0.06621027737855911,
0.13853752613067627,
-0.021721696481108665,
0.09933654963970184,
0.0463455431163311,
-0.032217804342508316,
-0.025705797597765923,
0.03086184523999691,
0.029750535264611244,
0.03861381486058235,
0.10609053075313568,
-0.02029876783490181,
-0.2867789566516876,
0.03639873489737511,
0.0957026481628418,
-0.1599423885345459,
0.0070722345262765884,
0.08523627370595932,
-0.11427120119333267,
0.10260650515556335,
0.040353428572416306,
-0.13776299357414246,
0.04648720473051071,
-0.05770221725106239,
-0.03340814262628555,
0.13822318613529205,
0.06742295622825623,
0.19895319640636444,
0.07370071113109589,
0.05249422788619995,
-0.03890644758939743,
0.031619325280189514,
0.02753223292529583,
0.007798473350703716,
0.03959975764155388,
0.04966900497674942,
0.202104851603508,
0.13618986308574677,
-0.033802181482315063,
-0.011621044017374516,
0.0007954379543662071,
-0.015552138909697533,
-0.08973048627376556,
-0.069710373878479,
-0.03801281750202179,
0.07276280224323273,
0.042332086712121964,
0.029340835288167,
0.06477876752614975,
-0.06322271376848221,
0.04206397756934166,
0.1570155769586563,
-0.34002000093460083,
0.018114173784852028,
0.12723305821418762,
0.06801590323448181,
-0.0074868639931082726,
-0.09286614507436752,
-0.0027618298772722483,
0.044369567185640335,
0.03978743031620979,
0.11871882528066635,
-0.10354158282279968,
0.0009015402756631374,
-0.006720948964357376,
-0.1236073300242424,
0.09890078008174896,
0.10532891750335693,
-0.010607123374938965,
-0.054431986063718796,
-0.07447993010282516,
-0.04105202481150627,
0.03848988935351372,
-0.039846718311309814,
-0.13111141324043274,
0.07209114730358124,
0.0038063055835664272,
-0.0691145807504654,
-0.10407252609729767,
-0.04718426242470741,
-0.06903630495071411,
-0.08260516822338104,
0.17413631081581116,
0.048464950174093246,
-0.007318675518035889,
-0.12049170583486557,
0.04290483146905899,
0.07104537636041641,
-0.08886678516864777,
-0.12050144374370575,
-0.08280674368143082,
0.07457096129655838,
-0.043132249265909195,
-0.14055640995502472,
-0.11050230264663696,
0.06348753720521927,
0.013121827505528927,
-0.04219494387507439,
0.019920745864510536,
0.05438508838415146,
-0.03481358289718628,
0.01243880670517683,
0.18262679874897003,
-0.09830140322446823,
0.001163846580311656,
0.091972716152668,
0.007036871742457151,
-0.018687298521399498,
-0.060217902064323425,
-0.12087764590978622,
-0.08382020145654678,
0.07269434630870819,
0.0083420155569911,
-0.09041920304298401,
0.133135125041008,
0.0218292698264122,
-0.08256914466619492,
0.029812345281243324,
-0.0817725881934166,
-0.06022792309522629,
-0.01235879771411419,
-0.05611801892518997,
0.057974305003881454,
0.052870966494083405,
-0.03244524449110031,
-0.03666243702173233,
0.0935652032494545,
-0.11797890067100525,
-0.05904586240649223,
-0.04432210326194763,
-0.09175629913806915,
-0.005146965850144625,
-0.0481896847486496,
0.05099806934595108,
-0.1619361937046051,
-0.03009507618844509,
0.03676380589604378,
-0.03916231915354729,
0.0847431868314743,
-0.02204873599112034,
-0.024049364030361176,
-0.03557829186320305,
-0.040001362562179565,
-0.026247723028063774,
-0.09007466584444046,
0.009831679984927177,
0.08477668464183807,
0.014668683521449566,
0.0321132130920887,
-0.20589688420295715,
0.08646875619888306,
-0.05804482102394104,
0.038303524255752563,
-0.09306348115205765,
0.09058544039726257,
-0.029688101261854172,
0.01658858358860016,
-0.009099104441702366,
-0.05957619845867157,
0.02726774662733078,
0.007929501123726368,
0.08833744376897812,
0.14044223725795746,
-0.11446104198694229,
-0.09034640341997147,
0.11191191524267197,
-0.07956854999065399,
-0.06539987772703171,
0.036691345274448395,
-0.05326138809323311,
-0.064402274787426,
0.11207320541143417,
0.12503032386302948,
0.19133301079273224,
-0.17333711683750153,
-0.040211305022239685,
0.10161904990673065,
-0.0026277604047209024,
-0.06416724622249603,
0.01622854731976986,
0.10280639678239822,
-0.03307482600212097,
0.06815136969089508,
-0.1582157015800476,
0.10361689329147339,
-0.01509149745106697,
-0.05536501482129097,
0.004975876305252314,
-0.10453414916992188,
0.08827631175518036,
0.003398026106879115,
0.07126712799072266,
0.07265561074018478,
0.05378071591258049,
0.1622493416070938,
0.12387096881866455,
-0.07077504694461823,
0.022600747644901276,
-0.10322577506303787,
-0.022321457043290138,
0.07743106037378311,
0.07699903100728989,
-0.09612777084112167,
-0.14096759259700775,
0.04892359673976898,
-0.006638683378696442,
-0.0244440995156765,
0.16660068929195404,
0.06161453202366829,
0.06194682419300079,
-0.028753064572811127,
0.01664140820503235,
-0.14625687897205353,
-0.02631732076406479,
-0.011470520868897438,
-0.03027181327342987,
-0.05517284572124481,
-0.05344060808420181,
0.030407361686229706,
-0.09837261587381363,
-0.012341664172708988,
0.028246574103832245,
0.045115597546100616,
0.07798200100660324,
-0.00845405925065279,
-0.020679239183664322,
0.05588245391845703,
-0.03525558114051819,
-0.04333777725696564,
0.026116106659173965,
0.05513326823711395,
-0.08150036633014679,
0.09329527616500854,
-0.02283664606511593,
0.21133656799793243,
0.12048404663801193,
-0.02482973411679268,
-0.015876973047852516,
-0.06216523423790932,
-0.03387819975614548,
-0.013945956714451313,
0.038541413843631744,
-0.007977079600095749,
0.14389288425445557,
-0.009595573879778385,
0.1475646048784256,
-0.057301491498947144,
0.05172925442457199,
-0.01764683984220028,
0.004343416541814804,
0.02865501120686531,
-0.011354498565196991,
-0.0019943206571042538,
0.05369481071829796,
0.06569938361644745,
0.03976869210600853,
-0.07465734332799911,
0.14677157998085022,
-0.08509872108697891,
-0.055399857461452484,
0.009356173686683178,
0.040719859302043915,
0.01784289814531803,
0.0790150910615921,
-0.029480062425136566,
-0.08612371981143951,
-0.012769460678100586,
-0.01872495748102665,
0.0012342649279162288,
-0.10648355633020401,
-0.03166025131940842,
0.02577160857617855,
-0.03843485936522484,
-0.004281121306121349,
0.011966665275394917,
-0.03339855372905731,
0.05843053013086319,
0.0776495486497879,
-0.06716644763946533,
0.02576976828277111,
-0.0205066055059433,
-0.02622973918914795,
0.1673118621110916,
-0.0801747739315033,
-0.23186632990837097,
-0.07670403271913528,
-0.21580061316490173,
-0.12562604248523712,
0.1009954959154129,
0.006469095125794411,
-0.13905946910381317,
-0.04756101593375206,
0.03159092739224434,
0.15860363841056824,
-0.06701561063528061,
0.05206518620252609,
0.010908239521086216,
0.006474206689745188,
0.03310679644346237,
-0.15169811248779297,
0.010657303035259247,
-0.08146867901086807,
-0.06602973490953445,
0.012529661878943443,
-0.011461487971246243,
0.06577345728874207,
0.07390773296356201,
0.015142041258513927,
-0.051686517894268036,
-0.033998262137174606,
0.16999424993991852,
-0.015814239159226418,
-0.06522590667009354,
0.15283310413360596,
-0.033426735550165176,
0.06997639685869217,
0.007198861334472895,
0.022611819207668304,
-0.10972829908132553,
0.06207912787795067,
0.03166283294558525,
-0.0473036989569664,
-0.10295946896076202,
-0.05805029720067978,
-0.09480045735836029,
0.052229881286621094,
0.07232891023159027,
0.006471594795584679,
0.019037537276744843,
0.048064541071653366,
0.010846782475709915,
0.21173343062400818,
0.00680063059553504,
0.046051230281591415,
0.14817668497562408,
-0.005354925058782101,
0.07016392052173615,
-0.0224855188280344,
-0.14388352632522583,
0.05727493017911911,
0.09432481974363327,
0.24052946269512177,
-0.05926137790083885,
0.06343374401330948,
-0.004285563714802265,
0.14680291712284088,
0.04697496071457863,
0.1168997511267662,
-0.03339948505163193,
-0.010999244637787342,
-0.025174155831336975,
-0.06012147665023804,
-0.056536972522735596,
0.05566153675317764,
0.03465833142399788,
-0.13225157558918,
-0.0715208575129509,
0.036950912326574326,
-0.07349002361297607,
0.2668358087539673,
0.02105046808719635,
-0.16096282005310059,
-0.019500525668263435,
-0.05077173560857773,
-0.019839828833937645,
-0.06561882793903351,
0.06619075685739517,
0.07454272359609604,
-0.06519591808319092,
0.030998192727565765,
-0.05323343724012375,
0.07691895961761475,
-0.11645317822694778,
0.06012483686208725,
-0.07252418249845505,
0.11142729222774506,
0.07009445875883102,
0.006476127542555332,
-0.28353893756866455,
0.038583122193813324,
-0.01533033512532711,
0.015737924724817276,
-0.028219474479556084,
0.030294140800833702,
0.11735255271196365,
0.06934113800525665,
0.039307914674282074,
0.016481002792716026,
0.2494807094335556,
-0.1401493102312088,
-0.03949239104986191,
0.06019750237464905,
0.10256842523813248,
0.016471518203616142,
0.0799381285905838,
-0.043788593262434006,
0.009333365596830845,
0.027195656672120094,
0.05514037236571312,
-0.03409945219755173,
-0.08675957471132278,
0.08437453955411911,
-0.029520289972424507,
0.13171923160552979,
-0.06010020524263382,
-0.04738997668027878,
-0.043266478925943375,
0.12386363744735718,
0.011061769910156727,
-0.07776936888694763,
-0.10774379968643188,
-0.027206668630242348,
-0.02230445295572281,
-0.06295763701200485,
0.08979412168264389,
0.005359678063541651,
0.08032010495662689,
-0.028170602396130562,
-0.20018140971660614,
0.014134060591459274,
-0.005502097308635712,
-0.024177080020308495,
-0.042502038180828094,
-0.025028638541698456,
0.04017305001616478,
-0.022986596450209618,
0.01401154138147831,
-0.07876207679510117,
0.019609088078141212,
-0.08952602744102478,
-0.10649024695158005,
0.02000868320465088,
-0.07604505121707916,
-0.010141554288566113,
-0.08770184218883514,
0.0847863107919693,
-0.0626630187034607,
0.0976627990603447,
0.07065840065479279,
0.09389322251081467,
-0.030744265764951706,
0.0595380999147892,
0.11988428235054016,
0.03228098526597023,
-0.3704637587070465,
-0.10971255600452423,
0.07950950413942337,
-0.006764227524399757,
-0.03737793490290642,
-0.3076312243938446,
0.12084968388080597,
0.00525685865432024,
-0.008790251798927784,
0.16347965598106384,
-0.09352044761180878,
-0.09163077920675278,
0.11526526510715485,
0.12326756119728088,
0.3552989065647125,
-0.08253563940525055,
-0.04559962451457977,
0.003898794762790203,
-0.12859836220741272,
0.13068582117557526,
0.018001990392804146,
0.1171310693025589,
-0.04831038787961006,
0.04438461735844612,
0.0360109843313694,
0.019503310322761536,
0.08029942214488983,
-0.03540505841374397,
0.05598371848464012,
-0.06463521718978882,
-0.011662925593554974,
-0.000004127079591853544,
-0.08598080277442932,
0.08330133557319641,
-0.0485181026160717,
0.09618052840232849,
-0.0977645292878151,
-0.06019384786486626,
-0.02303074486553669,
0.0895218700170517,
0.03296094760298729,
-0.11422538757324219,
-0.07331576943397522,
0.0622999370098114,
-0.004203513730317354,
0.022123930975794792,
0.11944267153739929,
0.06547210365533829,
-0.09712708741426468,
0.0376090444624424,
0.06717127561569214,
-0.05456056445837021,
-0.004624221008270979,
-0.002754609100520611,
-0.03070656955242157,
0.16198138892650604,
-0.09562799334526062,
0.032063983380794525,
0.0889454111456871,
0.05425336956977844,
-0.0017553610960021615,
0.08141268044710159,
-0.07685723900794983,
0.009732178412377834,
0.06259002536535263,
-0.16343823075294495,
-0.09776203334331512,
-0.09259766340255737,
-0.048562340438365936,
-0.02626796066761017,
0.1570354849100113,
0.15930058062076569,
-0.05942223221063614,
-0.0026575212832540274,
0.014097677543759346,
-0.05855351686477661,
-0.05703812092542648,
0.04225623980164528,
0.06302838027477264,
-0.0066997637040913105,
-0.10294894874095917,
0.04740869253873825,
0.02423771098256111,
0.06107557192444801,
0.03532146289944649,
0.01458766870200634,
-0.12553605437278748,
-0.010589071549475193,
-0.15984989702701569,
0.1314873844385147,
-0.07831712812185287,
-0.03518327325582504,
-0.04243164137005806,
-0.13502970337867737,
0.015050019137561321,
0.10363214462995529,
0.07600855082273483,
0.1487775444984436,
-0.0655531957745552,
0.03540092334151268,
-0.0792325884103775,
-0.007145324256271124,
-0.048041243106126785,
0.1064949631690979,
-0.16057346761226654,
0.04702514410018921,
-0.02241349220275879,
0.176285982131958,
-0.0872383862733841,
-0.041385263204574585,
-0.11600968986749649,
-0.04566450044512749,
-0.2594228982925415,
-0.03738454356789589,
-0.04076388105750084,
0.018520355224609375,
-0.06020640209317207,
-0.03554181382060051,
-0.041393958032131195,
0.056291744112968445,
-0.043615419417619705,
-0.024771561846137047,
-0.018793078139424324,
0.00490211695432663,
-0.08474418520927429,
-0.05608280003070831,
-0.016676807776093483,
-0.11386680603027344,
0.08938916772603989,
0.029873646795749664,
0.016332240775227547,
0.09048673510551453,
0.12422221153974533,
-0.07837054878473282,
0.060328830033540726,
0.07468027621507645,
0.059655457735061646,
0.031197192147374153,
-0.030596407130360603,
-0.028285320848226547,
0.03294124826788902,
-0.05755368992686272,
0.17998561263084412,
-0.06327379494905472,
-0.07138419896364212,
-0.11611562222242355,
-0.029478657990694046,
-0.035970572382211685,
-0.015780387446284294,
0.1533721685409546,
0.1497286558151245,
0.1416814774274826,
-0.018352944403886795,
0.025431359186768532,
-0.07882028073072433,
-0.01632012613117695,
-0.0065166885033249855,
-0.09019210934638977,
0.026771893724799156,
-0.09693150967359543,
0.026385359466075897,
0.0058989389799535275,
0.08911280333995819,
-0.08601851761341095,
-0.08434645086526871,
-0.01853443682193756,
-0.026791198179125786,
-0.03264186903834343,
-0.0009196512401103973,
0.27935513854026794,
0.10902122408151627,
-0.02351733110845089,
-0.028014101088047028,
0.1138053610920906,
0.07376734912395477,
0.1495441496372223,
0.09006064385175705,
0.073578841984272,
-0.028870154172182083,
0.11132626235485077,
0.07741181552410126,
0.0019983192905783653,
-0.1401698738336563,
-0.09375163912773132,
-0.04010002687573433,
0.01356538850814104,
-0.035326723009347916,
0.07749146223068237,
0.08440474420785904,
-0.04287317395210266,
0.08002535998821259,
0.04995490238070488,
-0.038708776235580444,
-0.07425393164157867,
0.07320893555879593,
-0.049729153513908386,
-0.13862177729606628,
0.013083124533295631,
-0.08198001235723495,
-0.07557903975248337,
0.035508885979652405,
-0.009476245380938053,
-0.03638670966029167,
0.14799362421035767,
-0.02481812797486782,
-0.10168589651584625,
0.1071527823805809,
-0.007816114462912083,
-0.05326656252145767,
0.08331131190061569,
-0.06670229882001877,
-0.002726558595895767,
0.06554297357797623,
-0.0007625609869137406,
-0.03185759112238884,
0.010326682589948177,
0.06136353686451912,
0.01415963377803564,
-0.04374018311500549,
-0.005780197214335203,
0.04662739858031273,
-0.0030424126889556646,
0.15587957203388214,
-0.005257509183138609,
-0.0518663115799427,
-0.009115650318562984,
0.09903284162282944,
-0.04717279225587845,
-0.029412537813186646,
-0.12461579591035843,
0.2626839578151703,
0.039546068757772446,
0.0730694830417633,
-0.009746531024575233,
-0.0433756560087204,
-0.07358971238136292,
0.28491348028182983,
0.05902813747525215,
-0.1271439492702484,
0.01901930943131447,
0.0036955198738723993,
0.008659576065838337,
-0.04854191839694977,
0.21019969880580902,
0.10577577352523804,
0.26513391733169556,
0.02599094994366169,
-0.1083819717168808,
-0.032789889723062515,
0.03681008145213127,
-0.03247817978262901,
0.0713648796081543,
0.06783144176006317,
-0.04383949190378189,
-0.1097622960805893,
-0.0875570997595787,
-0.07956921309232712,
-0.1599663645029068,
0.09511777758598328,
-0.06002015992999077,
-0.020107615739107132,
-0.04315185546875,
-0.12382004410028458,
-0.009261160157620907,
0.09215915203094482,
-0.016536179929971695,
0.11554140597581863,
0.160710409283638,
-0.014718708582222462,
-0.06684234738349915,
0.034627754241228104,
0.15944091975688934,
0.029348766431212425,
0.0513780377805233,
-0.025952821597456932,
-0.011873212642967701,
0.02325003780424595,
0.13153235614299774,
-0.08085478842258453,
0.04089771956205368,
-0.029285535216331482,
-0.13560594618320465,
-0.07493306696414948,
0.017456330358982086,
-0.060766078531742096,
0.0050666299648582935,
-0.01742069236934185,
-0.05206599086523056,
-0.03114178590476513,
0.14902596175670624,
-0.019834963604807854,
-0.08481678366661072,
0.045994389802217484,
-0.10669994354248047,
0.050728294998407364,
0.13719171285629272,
0.00785677321255207,
0.01144135557115078,
-0.03446849063038826,
0.028014881536364555,
0.06362195312976837,
-0.09730465710163116,
-0.08380886167287827,
-0.08441989868879318,
-0.08798657357692719,
-0.036566346883773804,
-0.03135712444782257,
-0.16577649116516113,
-0.02179299108684063,
-0.05688011273741722,
0.016874374821782112,
-0.058125317096710205,
0.05029292032122612,
0.025446085259318352,
-0.014284113422036171,
-0.0033927694894373417,
0.006835244130343199,
-0.045133087784051895,
0.019496995955705643,
-0.11683086305856705,
-0.1585361361503601
] |
null | null |
transformers
|
## DynaBERT: Dynamic BERT with Adaptive Width and Depth
* DynaBERT can flexibly adjust the size and latency by selecting adaptive width and depth, and
the subnetworks of it have competitive performances as other similar-sized compressed models.
The training process of DynaBERT includes first training a width-adaptive BERT and then
allowing both adaptive width and depth using knowledge distillation.
* This code is modified based on the repository developed by Hugging Face: [Transformers v2.1.1](https://github.com/huggingface/transformers/tree/v2.1.1), and is released in [GitHub](https://github.com/huawei-noah/Pretrained-Language-Model/tree/master/DynaBERT).
### Reference
Lu Hou, Zhiqi Huang, Lifeng Shang, Xin Jiang, Xiao Chen, Qun Liu.
[DynaBERT: Dynamic BERT with Adaptive Width and Depth](https://arxiv.org/abs/2004.04037).
```
@inproceedings{hou2020dynabert,
title = {DynaBERT: Dynamic BERT with Adaptive Width and Depth},
author = {Lu Hou, Zhiqi Huang, Lifeng Shang, Xin Jiang, Xiao Chen, Qun Liu},
booktitle = {Advances in Neural Information Processing Systems},
year = {2020}
}
```
|
{}
| null |
huawei-noah/DynaBERT_SST-2
|
[
"transformers",
"pytorch",
"jax",
"bert",
"arxiv:2004.04037",
"endpoints_compatible",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[
"2004.04037"
] |
[] |
TAGS
#transformers #pytorch #jax #bert #arxiv-2004.04037 #endpoints_compatible #region-us
|
## DynaBERT: Dynamic BERT with Adaptive Width and Depth
* DynaBERT can flexibly adjust the size and latency by selecting adaptive width and depth, and
the subnetworks of it have competitive performances as other similar-sized compressed models.
The training process of DynaBERT includes first training a width-adaptive BERT and then
allowing both adaptive width and depth using knowledge distillation.
* This code is modified based on the repository developed by Hugging Face: Transformers v2.1.1, and is released in GitHub.
### Reference
Lu Hou, Zhiqi Huang, Lifeng Shang, Xin Jiang, Xiao Chen, Qun Liu.
DynaBERT: Dynamic BERT with Adaptive Width and Depth.
|
[
"## DynaBERT: Dynamic BERT with Adaptive Width and Depth\n\n* DynaBERT can flexibly adjust the size and latency by selecting adaptive width and depth, and \nthe subnetworks of it have competitive performances as other similar-sized compressed models.\nThe training process of DynaBERT includes first training a width-adaptive BERT and then \nallowing both adaptive width and depth using knowledge distillation. \n\n* This code is modified based on the repository developed by Hugging Face: Transformers v2.1.1, and is released in GitHub.",
"### Reference\nLu Hou, Zhiqi Huang, Lifeng Shang, Xin Jiang, Xiao Chen, Qun Liu.\nDynaBERT: Dynamic BERT with Adaptive Width and Depth."
] |
[
"TAGS\n#transformers #pytorch #jax #bert #arxiv-2004.04037 #endpoints_compatible #region-us \n",
"## DynaBERT: Dynamic BERT with Adaptive Width and Depth\n\n* DynaBERT can flexibly adjust the size and latency by selecting adaptive width and depth, and \nthe subnetworks of it have competitive performances as other similar-sized compressed models.\nThe training process of DynaBERT includes first training a width-adaptive BERT and then \nallowing both adaptive width and depth using knowledge distillation. \n\n* This code is modified based on the repository developed by Hugging Face: Transformers v2.1.1, and is released in GitHub.",
"### Reference\nLu Hou, Zhiqi Huang, Lifeng Shang, Xin Jiang, Xiao Chen, Qun Liu.\nDynaBERT: Dynamic BERT with Adaptive Width and Depth."
] |
[
34,
136,
47
] |
[
"passage: TAGS\n#transformers #pytorch #jax #bert #arxiv-2004.04037 #endpoints_compatible #region-us \n## DynaBERT: Dynamic BERT with Adaptive Width and Depth\n\n* DynaBERT can flexibly adjust the size and latency by selecting adaptive width and depth, and \nthe subnetworks of it have competitive performances as other similar-sized compressed models.\nThe training process of DynaBERT includes first training a width-adaptive BERT and then \nallowing both adaptive width and depth using knowledge distillation. \n\n* This code is modified based on the repository developed by Hugging Face: Transformers v2.1.1, and is released in GitHub.### Reference\nLu Hou, Zhiqi Huang, Lifeng Shang, Xin Jiang, Xiao Chen, Qun Liu.\nDynaBERT: Dynamic BERT with Adaptive Width and Depth."
] |
[
-0.04640497267246246,
0.062194932252168655,
-0.0024007975589483976,
0.06621027737855911,
0.13853752613067627,
-0.021721696481108665,
0.09933654963970184,
0.0463455431163311,
-0.032217804342508316,
-0.025705797597765923,
0.03086184523999691,
0.029750535264611244,
0.03861381486058235,
0.10609053075313568,
-0.02029876783490181,
-0.2867789566516876,
0.03639873489737511,
0.0957026481628418,
-0.1599423885345459,
0.0070722345262765884,
0.08523627370595932,
-0.11427120119333267,
0.10260650515556335,
0.040353428572416306,
-0.13776299357414246,
0.04648720473051071,
-0.05770221725106239,
-0.03340814262628555,
0.13822318613529205,
0.06742295622825623,
0.19895319640636444,
0.07370071113109589,
0.05249422788619995,
-0.03890644758939743,
0.031619325280189514,
0.02753223292529583,
0.007798473350703716,
0.03959975764155388,
0.04966900497674942,
0.202104851603508,
0.13618986308574677,
-0.033802181482315063,
-0.011621044017374516,
0.0007954379543662071,
-0.015552138909697533,
-0.08973048627376556,
-0.069710373878479,
-0.03801281750202179,
0.07276280224323273,
0.042332086712121964,
0.029340835288167,
0.06477876752614975,
-0.06322271376848221,
0.04206397756934166,
0.1570155769586563,
-0.34002000093460083,
0.018114173784852028,
0.12723305821418762,
0.06801590323448181,
-0.0074868639931082726,
-0.09286614507436752,
-0.0027618298772722483,
0.044369567185640335,
0.03978743031620979,
0.11871882528066635,
-0.10354158282279968,
0.0009015402756631374,
-0.006720948964357376,
-0.1236073300242424,
0.09890078008174896,
0.10532891750335693,
-0.010607123374938965,
-0.054431986063718796,
-0.07447993010282516,
-0.04105202481150627,
0.03848988935351372,
-0.039846718311309814,
-0.13111141324043274,
0.07209114730358124,
0.0038063055835664272,
-0.0691145807504654,
-0.10407252609729767,
-0.04718426242470741,
-0.06903630495071411,
-0.08260516822338104,
0.17413631081581116,
0.048464950174093246,
-0.007318675518035889,
-0.12049170583486557,
0.04290483146905899,
0.07104537636041641,
-0.08886678516864777,
-0.12050144374370575,
-0.08280674368143082,
0.07457096129655838,
-0.043132249265909195,
-0.14055640995502472,
-0.11050230264663696,
0.06348753720521927,
0.013121827505528927,
-0.04219494387507439,
0.019920745864510536,
0.05438508838415146,
-0.03481358289718628,
0.01243880670517683,
0.18262679874897003,
-0.09830140322446823,
0.001163846580311656,
0.091972716152668,
0.007036871742457151,
-0.018687298521399498,
-0.060217902064323425,
-0.12087764590978622,
-0.08382020145654678,
0.07269434630870819,
0.0083420155569911,
-0.09041920304298401,
0.133135125041008,
0.0218292698264122,
-0.08256914466619492,
0.029812345281243324,
-0.0817725881934166,
-0.06022792309522629,
-0.01235879771411419,
-0.05611801892518997,
0.057974305003881454,
0.052870966494083405,
-0.03244524449110031,
-0.03666243702173233,
0.0935652032494545,
-0.11797890067100525,
-0.05904586240649223,
-0.04432210326194763,
-0.09175629913806915,
-0.005146965850144625,
-0.0481896847486496,
0.05099806934595108,
-0.1619361937046051,
-0.03009507618844509,
0.03676380589604378,
-0.03916231915354729,
0.0847431868314743,
-0.02204873599112034,
-0.024049364030361176,
-0.03557829186320305,
-0.040001362562179565,
-0.026247723028063774,
-0.09007466584444046,
0.009831679984927177,
0.08477668464183807,
0.014668683521449566,
0.0321132130920887,
-0.20589688420295715,
0.08646875619888306,
-0.05804482102394104,
0.038303524255752563,
-0.09306348115205765,
0.09058544039726257,
-0.029688101261854172,
0.01658858358860016,
-0.009099104441702366,
-0.05957619845867157,
0.02726774662733078,
0.007929501123726368,
0.08833744376897812,
0.14044223725795746,
-0.11446104198694229,
-0.09034640341997147,
0.11191191524267197,
-0.07956854999065399,
-0.06539987772703171,
0.036691345274448395,
-0.05326138809323311,
-0.064402274787426,
0.11207320541143417,
0.12503032386302948,
0.19133301079273224,
-0.17333711683750153,
-0.040211305022239685,
0.10161904990673065,
-0.0026277604047209024,
-0.06416724622249603,
0.01622854731976986,
0.10280639678239822,
-0.03307482600212097,
0.06815136969089508,
-0.1582157015800476,
0.10361689329147339,
-0.01509149745106697,
-0.05536501482129097,
0.004975876305252314,
-0.10453414916992188,
0.08827631175518036,
0.003398026106879115,
0.07126712799072266,
0.07265561074018478,
0.05378071591258049,
0.1622493416070938,
0.12387096881866455,
-0.07077504694461823,
0.022600747644901276,
-0.10322577506303787,
-0.022321457043290138,
0.07743106037378311,
0.07699903100728989,
-0.09612777084112167,
-0.14096759259700775,
0.04892359673976898,
-0.006638683378696442,
-0.0244440995156765,
0.16660068929195404,
0.06161453202366829,
0.06194682419300079,
-0.028753064572811127,
0.01664140820503235,
-0.14625687897205353,
-0.02631732076406479,
-0.011470520868897438,
-0.03027181327342987,
-0.05517284572124481,
-0.05344060808420181,
0.030407361686229706,
-0.09837261587381363,
-0.012341664172708988,
0.028246574103832245,
0.045115597546100616,
0.07798200100660324,
-0.00845405925065279,
-0.020679239183664322,
0.05588245391845703,
-0.03525558114051819,
-0.04333777725696564,
0.026116106659173965,
0.05513326823711395,
-0.08150036633014679,
0.09329527616500854,
-0.02283664606511593,
0.21133656799793243,
0.12048404663801193,
-0.02482973411679268,
-0.015876973047852516,
-0.06216523423790932,
-0.03387819975614548,
-0.013945956714451313,
0.038541413843631744,
-0.007977079600095749,
0.14389288425445557,
-0.009595573879778385,
0.1475646048784256,
-0.057301491498947144,
0.05172925442457199,
-0.01764683984220028,
0.004343416541814804,
0.02865501120686531,
-0.011354498565196991,
-0.0019943206571042538,
0.05369481071829796,
0.06569938361644745,
0.03976869210600853,
-0.07465734332799911,
0.14677157998085022,
-0.08509872108697891,
-0.055399857461452484,
0.009356173686683178,
0.040719859302043915,
0.01784289814531803,
0.0790150910615921,
-0.029480062425136566,
-0.08612371981143951,
-0.012769460678100586,
-0.01872495748102665,
0.0012342649279162288,
-0.10648355633020401,
-0.03166025131940842,
0.02577160857617855,
-0.03843485936522484,
-0.004281121306121349,
0.011966665275394917,
-0.03339855372905731,
0.05843053013086319,
0.0776495486497879,
-0.06716644763946533,
0.02576976828277111,
-0.0205066055059433,
-0.02622973918914795,
0.1673118621110916,
-0.0801747739315033,
-0.23186632990837097,
-0.07670403271913528,
-0.21580061316490173,
-0.12562604248523712,
0.1009954959154129,
0.006469095125794411,
-0.13905946910381317,
-0.04756101593375206,
0.03159092739224434,
0.15860363841056824,
-0.06701561063528061,
0.05206518620252609,
0.010908239521086216,
0.006474206689745188,
0.03310679644346237,
-0.15169811248779297,
0.010657303035259247,
-0.08146867901086807,
-0.06602973490953445,
0.012529661878943443,
-0.011461487971246243,
0.06577345728874207,
0.07390773296356201,
0.015142041258513927,
-0.051686517894268036,
-0.033998262137174606,
0.16999424993991852,
-0.015814239159226418,
-0.06522590667009354,
0.15283310413360596,
-0.033426735550165176,
0.06997639685869217,
0.007198861334472895,
0.022611819207668304,
-0.10972829908132553,
0.06207912787795067,
0.03166283294558525,
-0.0473036989569664,
-0.10295946896076202,
-0.05805029720067978,
-0.09480045735836029,
0.052229881286621094,
0.07232891023159027,
0.006471594795584679,
0.019037537276744843,
0.048064541071653366,
0.010846782475709915,
0.21173343062400818,
0.00680063059553504,
0.046051230281591415,
0.14817668497562408,
-0.005354925058782101,
0.07016392052173615,
-0.0224855188280344,
-0.14388352632522583,
0.05727493017911911,
0.09432481974363327,
0.24052946269512177,
-0.05926137790083885,
0.06343374401330948,
-0.004285563714802265,
0.14680291712284088,
0.04697496071457863,
0.1168997511267662,
-0.03339948505163193,
-0.010999244637787342,
-0.025174155831336975,
-0.06012147665023804,
-0.056536972522735596,
0.05566153675317764,
0.03465833142399788,
-0.13225157558918,
-0.0715208575129509,
0.036950912326574326,
-0.07349002361297607,
0.2668358087539673,
0.02105046808719635,
-0.16096282005310059,
-0.019500525668263435,
-0.05077173560857773,
-0.019839828833937645,
-0.06561882793903351,
0.06619075685739517,
0.07454272359609604,
-0.06519591808319092,
0.030998192727565765,
-0.05323343724012375,
0.07691895961761475,
-0.11645317822694778,
0.06012483686208725,
-0.07252418249845505,
0.11142729222774506,
0.07009445875883102,
0.006476127542555332,
-0.28353893756866455,
0.038583122193813324,
-0.01533033512532711,
0.015737924724817276,
-0.028219474479556084,
0.030294140800833702,
0.11735255271196365,
0.06934113800525665,
0.039307914674282074,
0.016481002792716026,
0.2494807094335556,
-0.1401493102312088,
-0.03949239104986191,
0.06019750237464905,
0.10256842523813248,
0.016471518203616142,
0.0799381285905838,
-0.043788593262434006,
0.009333365596830845,
0.027195656672120094,
0.05514037236571312,
-0.03409945219755173,
-0.08675957471132278,
0.08437453955411911,
-0.029520289972424507,
0.13171923160552979,
-0.06010020524263382,
-0.04738997668027878,
-0.043266478925943375,
0.12386363744735718,
0.011061769910156727,
-0.07776936888694763,
-0.10774379968643188,
-0.027206668630242348,
-0.02230445295572281,
-0.06295763701200485,
0.08979412168264389,
0.005359678063541651,
0.08032010495662689,
-0.028170602396130562,
-0.20018140971660614,
0.014134060591459274,
-0.005502097308635712,
-0.024177080020308495,
-0.042502038180828094,
-0.025028638541698456,
0.04017305001616478,
-0.022986596450209618,
0.01401154138147831,
-0.07876207679510117,
0.019609088078141212,
-0.08952602744102478,
-0.10649024695158005,
0.02000868320465088,
-0.07604505121707916,
-0.010141554288566113,
-0.08770184218883514,
0.0847863107919693,
-0.0626630187034607,
0.0976627990603447,
0.07065840065479279,
0.09389322251081467,
-0.030744265764951706,
0.0595380999147892,
0.11988428235054016,
0.03228098526597023,
-0.3704637587070465,
-0.10971255600452423,
0.07950950413942337,
-0.006764227524399757,
-0.03737793490290642,
-0.3076312243938446,
0.12084968388080597,
0.00525685865432024,
-0.008790251798927784,
0.16347965598106384,
-0.09352044761180878,
-0.09163077920675278,
0.11526526510715485,
0.12326756119728088,
0.3552989065647125,
-0.08253563940525055,
-0.04559962451457977,
0.003898794762790203,
-0.12859836220741272,
0.13068582117557526,
0.018001990392804146,
0.1171310693025589,
-0.04831038787961006,
0.04438461735844612,
0.0360109843313694,
0.019503310322761536,
0.08029942214488983,
-0.03540505841374397,
0.05598371848464012,
-0.06463521718978882,
-0.011662925593554974,
-0.000004127079591853544,
-0.08598080277442932,
0.08330133557319641,
-0.0485181026160717,
0.09618052840232849,
-0.0977645292878151,
-0.06019384786486626,
-0.02303074486553669,
0.0895218700170517,
0.03296094760298729,
-0.11422538757324219,
-0.07331576943397522,
0.0622999370098114,
-0.004203513730317354,
0.022123930975794792,
0.11944267153739929,
0.06547210365533829,
-0.09712708741426468,
0.0376090444624424,
0.06717127561569214,
-0.05456056445837021,
-0.004624221008270979,
-0.002754609100520611,
-0.03070656955242157,
0.16198138892650604,
-0.09562799334526062,
0.032063983380794525,
0.0889454111456871,
0.05425336956977844,
-0.0017553610960021615,
0.08141268044710159,
-0.07685723900794983,
0.009732178412377834,
0.06259002536535263,
-0.16343823075294495,
-0.09776203334331512,
-0.09259766340255737,
-0.048562340438365936,
-0.02626796066761017,
0.1570354849100113,
0.15930058062076569,
-0.05942223221063614,
-0.0026575212832540274,
0.014097677543759346,
-0.05855351686477661,
-0.05703812092542648,
0.04225623980164528,
0.06302838027477264,
-0.0066997637040913105,
-0.10294894874095917,
0.04740869253873825,
0.02423771098256111,
0.06107557192444801,
0.03532146289944649,
0.01458766870200634,
-0.12553605437278748,
-0.010589071549475193,
-0.15984989702701569,
0.1314873844385147,
-0.07831712812185287,
-0.03518327325582504,
-0.04243164137005806,
-0.13502970337867737,
0.015050019137561321,
0.10363214462995529,
0.07600855082273483,
0.1487775444984436,
-0.0655531957745552,
0.03540092334151268,
-0.0792325884103775,
-0.007145324256271124,
-0.048041243106126785,
0.1064949631690979,
-0.16057346761226654,
0.04702514410018921,
-0.02241349220275879,
0.176285982131958,
-0.0872383862733841,
-0.041385263204574585,
-0.11600968986749649,
-0.04566450044512749,
-0.2594228982925415,
-0.03738454356789589,
-0.04076388105750084,
0.018520355224609375,
-0.06020640209317207,
-0.03554181382060051,
-0.041393958032131195,
0.056291744112968445,
-0.043615419417619705,
-0.024771561846137047,
-0.018793078139424324,
0.00490211695432663,
-0.08474418520927429,
-0.05608280003070831,
-0.016676807776093483,
-0.11386680603027344,
0.08938916772603989,
0.029873646795749664,
0.016332240775227547,
0.09048673510551453,
0.12422221153974533,
-0.07837054878473282,
0.060328830033540726,
0.07468027621507645,
0.059655457735061646,
0.031197192147374153,
-0.030596407130360603,
-0.028285320848226547,
0.03294124826788902,
-0.05755368992686272,
0.17998561263084412,
-0.06327379494905472,
-0.07138419896364212,
-0.11611562222242355,
-0.029478657990694046,
-0.035970572382211685,
-0.015780387446284294,
0.1533721685409546,
0.1497286558151245,
0.1416814774274826,
-0.018352944403886795,
0.025431359186768532,
-0.07882028073072433,
-0.01632012613117695,
-0.0065166885033249855,
-0.09019210934638977,
0.026771893724799156,
-0.09693150967359543,
0.026385359466075897,
0.0058989389799535275,
0.08911280333995819,
-0.08601851761341095,
-0.08434645086526871,
-0.01853443682193756,
-0.026791198179125786,
-0.03264186903834343,
-0.0009196512401103973,
0.27935513854026794,
0.10902122408151627,
-0.02351733110845089,
-0.028014101088047028,
0.1138053610920906,
0.07376734912395477,
0.1495441496372223,
0.09006064385175705,
0.073578841984272,
-0.028870154172182083,
0.11132626235485077,
0.07741181552410126,
0.0019983192905783653,
-0.1401698738336563,
-0.09375163912773132,
-0.04010002687573433,
0.01356538850814104,
-0.035326723009347916,
0.07749146223068237,
0.08440474420785904,
-0.04287317395210266,
0.08002535998821259,
0.04995490238070488,
-0.038708776235580444,
-0.07425393164157867,
0.07320893555879593,
-0.049729153513908386,
-0.13862177729606628,
0.013083124533295631,
-0.08198001235723495,
-0.07557903975248337,
0.035508885979652405,
-0.009476245380938053,
-0.03638670966029167,
0.14799362421035767,
-0.02481812797486782,
-0.10168589651584625,
0.1071527823805809,
-0.007816114462912083,
-0.05326656252145767,
0.08331131190061569,
-0.06670229882001877,
-0.002726558595895767,
0.06554297357797623,
-0.0007625609869137406,
-0.03185759112238884,
0.010326682589948177,
0.06136353686451912,
0.01415963377803564,
-0.04374018311500549,
-0.005780197214335203,
0.04662739858031273,
-0.0030424126889556646,
0.15587957203388214,
-0.005257509183138609,
-0.0518663115799427,
-0.009115650318562984,
0.09903284162282944,
-0.04717279225587845,
-0.029412537813186646,
-0.12461579591035843,
0.2626839578151703,
0.039546068757772446,
0.0730694830417633,
-0.009746531024575233,
-0.0433756560087204,
-0.07358971238136292,
0.28491348028182983,
0.05902813747525215,
-0.1271439492702484,
0.01901930943131447,
0.0036955198738723993,
0.008659576065838337,
-0.04854191839694977,
0.21019969880580902,
0.10577577352523804,
0.26513391733169556,
0.02599094994366169,
-0.1083819717168808,
-0.032789889723062515,
0.03681008145213127,
-0.03247817978262901,
0.0713648796081543,
0.06783144176006317,
-0.04383949190378189,
-0.1097622960805893,
-0.0875570997595787,
-0.07956921309232712,
-0.1599663645029068,
0.09511777758598328,
-0.06002015992999077,
-0.020107615739107132,
-0.04315185546875,
-0.12382004410028458,
-0.009261160157620907,
0.09215915203094482,
-0.016536179929971695,
0.11554140597581863,
0.160710409283638,
-0.014718708582222462,
-0.06684234738349915,
0.034627754241228104,
0.15944091975688934,
0.029348766431212425,
0.0513780377805233,
-0.025952821597456932,
-0.011873212642967701,
0.02325003780424595,
0.13153235614299774,
-0.08085478842258453,
0.04089771956205368,
-0.029285535216331482,
-0.13560594618320465,
-0.07493306696414948,
0.017456330358982086,
-0.060766078531742096,
0.0050666299648582935,
-0.01742069236934185,
-0.05206599086523056,
-0.03114178590476513,
0.14902596175670624,
-0.019834963604807854,
-0.08481678366661072,
0.045994389802217484,
-0.10669994354248047,
0.050728294998407364,
0.13719171285629272,
0.00785677321255207,
0.01144135557115078,
-0.03446849063038826,
0.028014881536364555,
0.06362195312976837,
-0.09730465710163116,
-0.08380886167287827,
-0.08441989868879318,
-0.08798657357692719,
-0.036566346883773804,
-0.03135712444782257,
-0.16577649116516113,
-0.02179299108684063,
-0.05688011273741722,
0.016874374821782112,
-0.058125317096710205,
0.05029292032122612,
0.025446085259318352,
-0.014284113422036171,
-0.0033927694894373417,
0.006835244130343199,
-0.045133087784051895,
0.019496995955705643,
-0.11683086305856705,
-0.1585361361503601
] |
null | null | null |
# Overview
<p align="center">
<img src="https://avatars.githubusercontent.com/u/12619994?s=200&v=4" width="150">
</p>
<!-- -------------------------------------------------------------------------------- -->
JABER (Junior Arabic BERt) is a 12-layer Arabic pretrained Language Model.
JABER obtained rank one on [ALUE leaderboard](https://www.alue.org/leaderboard) at `01/09/2021`.
This model is **only compatible** with the code in [this github repo](https://github.com/huawei-noah/Pretrained-Language-Model/tree/master/JABER-PyTorch) (not supported by the [Transformers](https://github.com/huggingface/transformers) library)
## Citation
Please cite the following [paper](https://arxiv.org/abs/2112.04329) when using our code and model:
``` bibtex
@misc{ghaddar2021jaber,
title={JABER: Junior Arabic BERt},
author={Abbas Ghaddar and Yimeng Wu and Ahmad Rashid and Khalil Bibi and Mehdi Rezagholizadeh and Chao Xing and Yasheng Wang and Duan Xinyu and Zhefeng Wang and Baoxing Huai and Xin Jiang and Qun Liu and Philippe Langlais},
year={2021},
eprint={2112.04329},
archivePrefix={arXiv},
primaryClass={cs.CL}
}
```
|
{}
| null |
huawei-noah/JABER
|
[
"pytorch",
"arxiv:2112.04329",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[
"2112.04329"
] |
[] |
TAGS
#pytorch #arxiv-2112.04329 #region-us
|
# Overview
<p align="center">
<img src="URL width="150">
</p>
JABER (Junior Arabic BERt) is a 12-layer Arabic pretrained Language Model.
JABER obtained rank one on ALUE leaderboard at '01/09/2021'.
This model is only compatible with the code in this github repo (not supported by the Transformers library)
Please cite the following paper when using our code and model:
|
[
"# Overview\n\n<p align=\"center\">\n <img src=\"URL width=\"150\">\n</p>\n\n\n\nJABER (Junior Arabic BERt) is a 12-layer Arabic pretrained Language Model. \nJABER obtained rank one on ALUE leaderboard at '01/09/2021'. \nThis model is only compatible with the code in this github repo (not supported by the Transformers library)\n \nPlease cite the following paper when using our code and model:"
] |
[
"TAGS\n#pytorch #arxiv-2112.04329 #region-us \n",
"# Overview\n\n<p align=\"center\">\n <img src=\"URL width=\"150\">\n</p>\n\n\n\nJABER (Junior Arabic BERt) is a 12-layer Arabic pretrained Language Model. \nJABER obtained rank one on ALUE leaderboard at '01/09/2021'. \nThis model is only compatible with the code in this github repo (not supported by the Transformers library)\n \nPlease cite the following paper when using our code and model:"
] |
[
20,
102
] |
[
"passage: TAGS\n#pytorch #arxiv-2112.04329 #region-us \n# Overview\n\n<p align=\"center\">\n <img src=\"URL width=\"150\">\n</p>\n\n\n\nJABER (Junior Arabic BERt) is a 12-layer Arabic pretrained Language Model. \nJABER obtained rank one on ALUE leaderboard at '01/09/2021'. \nThis model is only compatible with the code in this github repo (not supported by the Transformers library)\n \nPlease cite the following paper when using our code and model:"
] |
[
-0.020132608711719513,
0.03406504541635513,
-0.005926536861807108,
0.08489752560853958,
0.11396875977516174,
0.011912118643522263,
0.1786065399646759,
0.028351515531539917,
-0.005057728383690119,
-0.031509362161159515,
0.11660850048065186,
-0.017863992601633072,
0.06022438034415245,
0.08336467295885086,
0.00415993295609951,
-0.1546735018491745,
0.019501756876707077,
0.012712671421468258,
0.17943210899829865,
0.09653506428003311,
0.0824100449681282,
-0.005846146959811449,
0.06729177385568619,
0.03033919632434845,
-0.13519421219825745,
0.053599145263433456,
-0.013195385225117207,
-0.03508603200316429,
0.05980499088764191,
0.04232074320316315,
0.23976992070674896,
-0.05395667627453804,
-0.0481598861515522,
-0.04930378496646881,
0.058217812329530716,
0.026444517076015472,
-0.033341001719236374,
0.06544836610555649,
0.051322758197784424,
-0.07454202324151993,
-0.02763611264526844,
0.08438827097415924,
-0.12066400796175003,
-0.0034128285478800535,
-0.16112489998340607,
-0.0730317011475563,
0.05021539330482483,
0.12062860280275345,
0.036933526396751404,
0.020411521196365356,
0.008479366078972816,
-0.012532631866633892,
-0.08138063549995422,
0.06801944971084595,
0.1832016408443451,
-0.09473564475774765,
-0.07780159264802933,
0.03234495967626572,
-0.0828852504491806,
0.14739324152469635,
-0.010855669155716896,
-0.004476467613130808,
0.06373137980699539,
-0.013180235400795937,
-0.08096875250339508,
-0.08220236748456955,
-0.10962853580713272,
-0.013354330323636532,
-0.056041788309812546,
-0.11260075122117996,
0.19664530456066132,
-0.0931723564863205,
-0.05621185153722763,
0.08664613217115402,
-0.10215383023023605,
0.038182333111763,
-0.026872511953115463,
0.07198639214038849,
0.003467680886387825,
-0.009446467272937298,
0.2323140799999237,
-0.019641263410449028,
-0.08660326153039932,
-0.1294623166322708,
-0.1592324674129486,
0.2877785265445709,
0.06640587747097015,
0.061241235584020615,
-0.16765505075454712,
0.04295221343636513,
-0.06690125167369843,
-0.1263556182384491,
0.018856242299079895,
-0.06953894346952438,
0.11971832811832428,
0.05957671254873276,
-0.0050497110933065414,
-0.11131972074508667,
0.1070658341050148,
-0.03815609961748123,
-0.01336918119341135,
0.09646794199943542,
-0.06617336720228195,
0.10583128780126572,
-0.0010608576703816652,
0.10353168100118637,
0.05227569490671158,
0.10816417634487152,
0.0619712769985199,
0.07151663303375244,
0.04842294752597809,
0.000764509488362819,
-0.1381438821554184,
-0.019674712792038918,
0.05766918882727623,
0.004974983166903257,
-0.09994037449359894,
0.027931151911616325,
-0.06030083820223808,
-0.012291302904486656,
0.08304159343242645,
-0.09945764392614365,
-0.021004842594265938,
0.03525567799806595,
-0.00458740396425128,
-0.08720862865447998,
0.0720025971531868,
-0.033252011984586716,
-0.11517073959112167,
-0.07233521342277527,
-0.06421882659196854,
0.04333138465881348,
-0.057608626782894135,
-0.14165954291820526,
-0.01448131911456585,
-0.12976044416427612,
0.09645479172468185,
-0.23032517731189728,
0.01040733978152275,
0.049140963703393936,
0.07985571026802063,
0.0037249866873025894,
0.04664401337504387,
0.0033520436845719814,
0.00020444889378268272,
0.039110638201236725,
-0.06849853694438934,
-0.004400223959237337,
-0.06181067228317261,
0.07192859053611755,
0.1432710587978363,
0.020543452352285385,
-0.12502679228782654,
0.029271740466356277,
-0.05797228962182999,
0.03017517924308777,
-0.1938970983028412,
0.027026239782571793,
-0.06477441638708115,
0.025451596826314926,
-0.10249155759811401,
0.01959487795829773,
0.12076457589864731,
0.059907518327236176,
0.09740554541349411,
0.1156269907951355,
-0.09131461381912231,
-0.12852329015731812,
0.10989508032798767,
-0.11883752793073654,
-0.09305736422538757,
0.06494828313589096,
-0.023564932867884636,
0.006586078554391861,
0.06207134947180748,
0.2282194048166275,
0.004514586180448532,
-0.07122621685266495,
-0.042461756616830826,
0.03560500591993332,
0.0573117695748806,
-0.06357338279485703,
0.10831471532583237,
0.08125252276659012,
-0.19545604288578033,
0.020081568509340286,
-0.0256413035094738,
0.13144615292549133,
0.026235098019242287,
-0.0130265261977911,
0.02181762084364891,
-0.029286835342645645,
-0.04744361340999603,
0.10163994133472443,
0.0856887623667717,
-0.04430966079235077,
0.008831390179693699,
0.12012044340372086,
0.05649133026599884,
0.046866241842508316,
-0.024132980033755302,
-0.0631769448518753,
-0.007579247001558542,
-0.22295530140399933,
0.05891750752925873,
-0.061115823686122894,
0.09626840800046921,
-0.0015033400850370526,
-0.08365272730588913,
0.08354662358760834,
0.09551695734262466,
0.09657483547925949,
0.05698724463582039,
0.012432976625859737,
-0.053306519985198975,
-0.03363018110394478,
0.014626584015786648,
-0.026129158213734627,
-0.10877103358507156,
-0.004199794493615627,
-0.05120978504419327,
-0.03973918408155441,
0.002359123434871435,
0.011916982010006905,
-0.09435063600540161,
0.08390463143587112,
-0.022369898855686188,
0.02858639881014824,
0.10100431740283966,
0.01950516737997532,
-0.007901846431195736,
0.018490862101316452,
0.024111885577440262,
-0.027103671804070473,
0.01719643548130989,
0.060370560735464096,
0.023392649367451668,
0.2517867982387543,
0.1239149197936058,
0.00460234796628356,
0.06996824592351913,
0.008368766866624355,
-0.008612295612692833,
-0.005741966888308525,
0.12996600568294525,
0.038875479251146317,
0.34166470170021057,
0.0032452913001179695,
0.1734543740749359,
-0.1183546856045723,
0.086353600025177,
0.056523315608501434,
-0.11528581380844116,
-0.04042454808950424,
0.20303525030612946,
-0.023896459490060806,
-0.2923761308193207,
0.10348020493984222,
0.109084352850914,
-0.108310766518116,
0.19562412798404694,
-0.00022761145373806357,
-0.011232922784984112,
0.006925377529114485,
0.0048323823139071465,
0.010168365202844143,
0.13513807952404022,
-0.17556077241897583,
-0.11986653506755829,
0.027967438101768494,
0.007098781410604715,
0.020848505198955536,
-0.0696815773844719,
-0.027172308415174484,
-0.04913970082998276,
0.0032179385889321566,
-0.16291019320487976,
-0.03708964213728905,
-0.025382468476891518,
0.07882849127054214,
0.026073215529322624,
-0.08957836776971817,
-0.030588675290346146,
0.030782969668507576,
-0.022339506074786186,
0.138644278049469,
-0.07002527266740799,
-0.21869608759880066,
-0.05243774503469467,
-0.18247485160827637,
-0.1440897136926651,
0.05485111102461815,
0.03529440239071846,
-0.1496998816728592,
0.010874385014176369,
0.026796944439411163,
0.1361543834209442,
-0.08282975107431412,
0.003087220247834921,
-0.03451833873987198,
-0.07348920404911041,
0.031505048274993896,
-0.14667949080467224,
-0.020205022767186165,
-0.039652980864048004,
-0.021217191591858864,
0.05597356706857681,
-0.17432864010334015,
0.0823851227760315,
0.02553688734769821,
0.005171560682356358,
0.08618730306625366,
0.007321474142372608,
0.22504812479019165,
-0.030350714921951294,
-0.00888241920620203,
0.14459893107414246,
0.005231224466115236,
0.003909563645720482,
0.19016538560390472,
0.019625473767518997,
-0.018812047317624092,
-0.045471157878637314,
-0.03633638471364975,
-0.041293516755104065,
-0.1274537295103073,
0.030538054183125496,
-0.08399230986833572,
-0.01378333568572998,
0.09074520319700241,
0.07800708711147308,
-0.18033187091350555,
0.03542283922433853,
-0.04197046533226967,
0.15605460107326508,
-0.03396159037947655,
0.03254334256052971,
-0.0444965697824955,
-0.0062976498156785965,
0.03926661238074303,
-0.05900672450661659,
-0.05935245752334595,
0.0777711346745491,
0.20612557232379913,
-0.048365313559770584,
-0.010447762906551361,
0.16610637307167053,
0.06029801443219185,
0.03513788804411888,
0.14535953104496002,
0.11471473425626755,
-0.0631948709487915,
0.0005069206818006933,
-0.05061192810535431,
-0.026075975969433784,
-0.2216198593378067,
0.019874464720487595,
-0.10464737564325333,
0.0871901661157608,
0.009747995063662529,
-0.1232125535607338,
0.07824977487325668,
-0.07337528467178345,
0.04539516940712929,
-0.22431877255439758,
-0.04908648133277893,
0.09852681308984756,
-0.054351698607206345,
-0.22408956289291382,
-0.004691199399530888,
-0.009022832848131657,
0.00660300999879837,
0.029821079224348068,
0.014840126037597656,
0.17792586982250214,
-0.12955737113952637,
0.04789860546588898,
-0.21430353820323944,
-0.012760822661221027,
-0.025540869683027267,
0.10507595539093018,
-0.4601868987083435,
0.1819583922624588,
0.06792142242193222,
0.03526596352458,
-0.002250710502266884,
0.01004051323980093,
0.01921975240111351,
0.0861082449555397,
0.04159408062696457,
0.038813672959804535,
0.2636290192604065,
-0.1568240523338318,
-0.1255524903535843,
0.12102710455656052,
-0.014034819789230824,
-0.0688336044549942,
-0.029736798256635666,
0.03281724080443382,
0.07040873169898987,
-0.09147024899721146,
0.07417017966508865,
-0.05305302515625954,
-0.022349152714014053,
0.03624975308775902,
0.01911640539765358,
-0.008280634880065918,
-0.014751339331269264,
-0.02726936712861061,
0.06710021942853928,
0.008809690363705158,
-0.13673359155654907,
-0.1435704380273819,
-0.0682782232761383,
-0.08739583939313889,
0.0015549075324088335,
-0.1166171282529831,
-0.038826681673526764,
-0.18226422369480133,
-0.03303146734833717,
-0.08995044976472855,
-0.09768904000520706,
0.036290884017944336,
-0.10371502488851547,
0.07194414734840393,
-0.03483391925692558,
0.08146410435438156,
-0.055980291217565536,
0.024195924401283264,
-0.008021584711968899,
-0.030037956312298775,
-0.15389122068881989,
-0.05293916165828705,
0.012676475569605827,
-0.047344617545604706,
0.07374286651611328,
0.04621761664748192,
0.08347609639167786,
0.008676157332956791,
-0.03320721164345741,
0.049991339445114136,
-0.009442348033189774,
0.04989870265126228,
0.06973496079444885,
0.02894224412739277,
0.23797017335891724,
-0.01670950837433338,
-0.15333017706871033,
-0.2187584787607193,
0.09803302586078644,
0.03379925340414047,
-0.220368430018425,
-0.2408539056777954,
0.043740544468164444,
-0.0617285892367363,
-0.033756323158741,
0.021533753722906113,
-0.02790609933435917,
-0.026818472892045975,
0.21489758789539337,
0.09167934954166412,
0.4205491244792938,
-0.14019687473773956,
-0.04367971792817116,
-0.06681540608406067,
-0.12358995527029037,
0.017087195068597794,
-0.16610375046730042,
0.10719124227762222,
-0.0674954503774643,
0.037122808396816254,
0.006859937217086554,
-0.03376854956150055,
0.12229134887456894,
-0.03940647095441818,
0.015062264166772366,
-0.11342422664165497,
-0.17988303303718567,
-0.07144563645124435,
-0.021614976227283478,
0.06907441467046738,
-0.10013067722320557,
0.02760285697877407,
-0.23338532447814941,
-0.039825666695833206,
-0.05523374676704407,
-0.008528346195816994,
0.02478477731347084,
-0.06819173693656921,
0.005097127985209227,
-0.0409584566950798,
-0.10714642703533173,
0.05641486495733261,
0.31359997391700745,
-0.01932688057422638,
0.05034702643752098,
0.1835014522075653,
0.018092283979058266,
-0.17493341863155365,
0.04125657677650452,
-0.02203691378235817,
-0.004172037355601788,
0.1736111342906952,
-0.213469997048378,
-0.0039230152033269405,
0.07318172603845596,
-0.019617125391960144,
0.09069442003965378,
0.014675850979983807,
-0.10801809281110764,
0.05585236847400665,
0.09230097383260727,
-0.16796322166919708,
-0.0054555791430175304,
-0.06356894969940186,
0.11854106932878494,
0.04010709375143051,
0.10867548733949661,
0.1927330046892166,
-0.07436861097812653,
-0.03431084007024765,
0.039487287402153015,
-0.041389912366867065,
-0.04455803707242012,
0.02956811524927616,
0.09715999662876129,
0.06355682015419006,
-0.14399147033691406,
0.07604095339775085,
0.11356667429208755,
0.15182387828826904,
0.005281602498143911,
0.07975872606039047,
-0.04838509112596512,
-0.06927881389856339,
0.021906431764364243,
0.3702313005924225,
-0.004388104192912579,
-0.09740125387907028,
-0.11373057216405869,
-0.06234884634613991,
-0.008874849416315556,
0.11056601256132126,
0.07821601629257202,
-0.01032513752579689,
-0.050592124462127686,
-0.008139059878885746,
0.052963655441999435,
0.044976238161325455,
-0.08185508102178574,
-0.010299824178218842,
-0.11259277164936066,
-0.051579054445028305,
0.03440744802355766,
0.1583324670791626,
-0.05240783840417862,
-0.020301764830946922,
-0.19226892292499542,
0.0905691385269165,
-0.1820778250694275,
0.11879796534776688,
0.01203505601733923,
0.057474054396152496,
-0.01969219744205475,
-0.08330632746219635,
-0.04383940249681473,
0.006814897060394287,
-0.05048007890582085,
0.08533578366041183,
0.020881887525320053,
0.10146898031234741,
-0.06384655833244324,
0.019434407353401184,
0.04421903192996979,
0.014592936262488365,
0.07952515780925751,
0.07007073611021042,
-0.014336683787405491,
0.13422799110412598,
-0.04540489614009857,
0.0020689486991614103,
0.06382868438959122,
-0.019507350400090218,
0.05307997763156891,
0.008060638792812824,
0.03703020140528679,
-0.00426665972918272,
-0.01018046960234642,
-0.027876894921064377,
0.12162624299526215,
-0.010583996772766113,
-0.06954479962587357,
0.09302271902561188,
-0.05793556943535805,
-0.05924048274755478,
0.10978026688098907,
0.0964081808924675,
0.14967641234397888,
-0.0011584078893065453,
0.040743302553892136,
-0.011601046659052372,
-0.046298086643218994,
0.0016871168045327067,
-0.008838815614581108,
-0.21098291873931885,
-0.13670878112316132,
-0.08684082329273224,
-0.02529221400618553,
-0.036198340356349945,
0.11264145374298096,
0.11002644896507263,
0.0021458377595990896,
-0.04705372452735901,
0.11072001606225967,
-0.05188218131661415,
-0.002883928595110774,
0.09097468107938766,
0.009886484593153,
0.018256017938256264,
-0.042735397815704346,
-0.0007129900623112917,
0.03306153789162636,
0.1495128870010376,
-0.04504663497209549,
0.13661302626132965,
0.0553281269967556,
0.05318608507514,
0.0104603823274374,
0.013187318108975887,
-0.07537432760000229,
-0.08766768127679825,
0.1149214655160904,
-0.012671185657382011,
-0.004225598648190498,
0.03266477957367897,
0.22720688581466675,
-0.0015441509895026684,
-0.008907335810363293,
-0.014204665087163448,
-0.007336711045354605,
-0.030688973143696785,
-0.11113424599170685,
-0.05574061721563339,
-0.05228449031710625,
-0.03712814301252365,
-0.1023033857345581,
-0.060204144567251205,
0.13934914767742157,
-0.01690049096941948,
0.0011296506272628903,
0.10918333381414413,
0.25365033745765686,
-0.10973142087459564,
0.021627549082040787,
-0.029928363859653473,
0.038118477910757065,
-0.011048429645597935,
-0.009578360244631767,
0.04428721219301224,
-0.007430872414261103,
0.031207699328660965,
0.02837429754436016,
0.01798117347061634,
-0.015970241278409958,
-0.06085018813610077,
-0.13552457094192505,
-0.051762573421001434,
0.03329899534583092,
-0.011903176084160805,
0.16698810458183289,
0.0020229255314916372,
-0.025613876059651375,
0.019616691395640373,
0.1654542237520218,
0.09554433822631836,
0.04604800418019295,
-0.009839498437941074,
0.18289898335933685,
-0.09667477756738663,
0.07587545365095139,
-0.019030926749110222,
0.009858822450041771,
-0.09531161934137344,
0.15963266789913177,
0.02223268337547779,
-0.1141417995095253,
-0.0014601927250623703,
-0.011244433932006359,
0.05805926024913788,
0.08542095124721527,
0.11148035526275635,
-0.01941087283194065,
0.18812386691570282,
-0.048729490488767624,
-0.001422534347511828,
-0.011101552285254002,
-0.015330123715102673,
0.0034759731497615576,
0.047828033566474915,
0.056443750858306885,
-0.07661721110343933,
-0.13547977805137634,
-0.005152607336640358,
-0.1000540629029274,
-0.028411928564310074,
-0.11893212795257568,
-0.07900626957416534,
-0.06561855971813202,
-0.10498087853193283,
0.19449806213378906,
0.09166977554559708,
0.006714966148138046,
-0.037052933126688004,
0.018612967804074287,
-0.05400505289435387,
0.0006427469197660685,
-0.09961985051631927,
-0.16842754185199738,
0.1223505437374115,
0.07354112714529037,
0.06291592121124268,
0.031132254749536514,
0.010618212632834911,
0.00985261332243681,
-0.028304871171712875,
-0.054155781865119934,
0.09689189493656158,
-0.0165571216493845,
-0.00531574385240674,
-0.07042654603719711,
0.07353037595748901,
-0.020490629598498344,
-0.11920661479234695,
0.030531546100974083,
-0.06910194456577301,
-0.040538687258958817,
0.03643599525094032,
-0.11350420117378235,
-0.02969810552895069,
0.07360713928937912,
-0.11003246158361435,
0.003931702114641666,
0.11572573333978653,
0.009409958496689796,
-0.07244573533535004,
-0.08187194913625717,
0.0890195295214653,
0.061635926365852356,
-0.03934963792562485,
-0.05505926534533501,
-0.00023435316688846797,
-0.09068022668361664,
-0.008887285366654396,
-0.02634109929203987,
-0.09304582327604294,
0.09118390828371048,
-0.09865154325962067,
0.05482121929526329,
-0.06526737660169601,
0.008950677700340748,
0.07157761603593826,
0.031447380781173706,
-0.03808829188346863,
-0.08807411044836044,
0.02797889895737171,
0.009440302848815918,
0.0019955437164753675,
-0.1455087661743164
] |
null | null |
transformers
|
TinyBERT: Distilling BERT for Natural Language Understanding
========
TinyBERT is 7.5x smaller and 9.4x faster on inference than BERT-base and achieves competitive performances in the tasks of natural language understanding. It performs a novel transformer distillation at both the pre-training and task-specific learning stages. In general distillation, we use the original BERT-base without fine-tuning as the teacher and a large-scale text corpus as the learning data. By performing the Transformer distillation on the text from general domain, we obtain a general TinyBERT which provides a good initialization for the task-specific distillation. We here provide the general TinyBERT for your tasks at hand.
For more details about the techniques of TinyBERT, refer to our paper:
[TinyBERT: Distilling BERT for Natural Language Understanding](https://arxiv.org/abs/1909.10351)
Citation
========
If you find TinyBERT useful in your research, please cite the following paper:
```
@article{jiao2019tinybert,
title={Tinybert: Distilling bert for natural language understanding},
author={Jiao, Xiaoqi and Yin, Yichun and Shang, Lifeng and Jiang, Xin and Chen, Xiao and Li, Linlin and Wang, Fang and Liu, Qun},
journal={arXiv preprint arXiv:1909.10351},
year={2019}
}
```
|
{}
| null |
huawei-noah/TinyBERT_General_4L_312D
|
[
"transformers",
"pytorch",
"jax",
"bert",
"arxiv:1909.10351",
"endpoints_compatible",
"has_space",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[
"1909.10351"
] |
[] |
TAGS
#transformers #pytorch #jax #bert #arxiv-1909.10351 #endpoints_compatible #has_space #region-us
|
TinyBERT: Distilling BERT for Natural Language Understanding
========
TinyBERT is 7.5x smaller and 9.4x faster on inference than BERT-base and achieves competitive performances in the tasks of natural language understanding. It performs a novel transformer distillation at both the pre-training and task-specific learning stages. In general distillation, we use the original BERT-base without fine-tuning as the teacher and a large-scale text corpus as the learning data. By performing the Transformer distillation on the text from general domain, we obtain a general TinyBERT which provides a good initialization for the task-specific distillation. We here provide the general TinyBERT for your tasks at hand.
For more details about the techniques of TinyBERT, refer to our paper:
TinyBERT: Distilling BERT for Natural Language Understanding
Citation
========
If you find TinyBERT useful in your research, please cite the following paper:
|
[] |
[
"TAGS\n#transformers #pytorch #jax #bert #arxiv-1909.10351 #endpoints_compatible #has_space #region-us \n"
] |
[
38
] |
[
"passage: TAGS\n#transformers #pytorch #jax #bert #arxiv-1909.10351 #endpoints_compatible #has_space #region-us \n"
] |
[
-0.016308529302477837,
0.0752631202340126,
-0.008024814538657665,
-0.004149698186665773,
0.050848886370658875,
0.018873870372772217,
0.04356412962079048,
0.09436506778001785,
0.09139789640903473,
0.04240008443593979,
0.1983315795660019,
0.1446629911661148,
-0.055811457335948944,
0.006584931630641222,
-0.04735531285405159,
-0.21810004115104675,
0.046095531433820724,
0.08386818319559097,
-0.06030234321951866,
0.10566326230764389,
0.03576589375734329,
-0.14170394837856293,
0.05659594386816025,
-0.023042650893330574,
-0.11289442330598831,
0.0492221973836422,
0.025470642372965813,
-0.085634745657444,
0.12672561407089233,
0.0009151278645731509,
0.1726912260055542,
0.04124266281723976,
-0.0516684390604496,
-0.07780236005783081,
0.031164320185780525,
-0.0002384405815973878,
-0.07387880235910416,
0.06745614111423492,
0.00953275803476572,
-0.06076623126864433,
0.0406680554151535,
0.01240749005228281,
-0.008412960916757584,
-0.001504017156548798,
-0.19153374433517456,
-0.22769737243652344,
-0.06637176126241684,
0.05731717124581337,
-0.0014848595019429922,
0.07183805853128433,
0.02071206085383892,
0.1830640733242035,
-0.08703489601612091,
0.03735654056072235,
0.2651556134223938,
-0.3579108715057373,
-0.027102405205368996,
0.15678194165229797,
0.11765005439519882,
0.04974573105573654,
-0.0482824370265007,
0.0842309445142746,
0.042814724147319794,
-0.0010417706798762083,
0.08522215485572815,
-0.08442667871713638,
-0.06855250895023346,
0.12474920600652695,
-0.11706231534481049,
-0.08771311491727829,
0.22005803883075714,
-0.030617231503129005,
0.0750492736697197,
0.042492788285017014,
-0.09659881889820099,
-0.12356825917959213,
0.01992517150938511,
-0.03376911208033562,
0.016476556658744812,
0.032081831246614456,
0.02537132427096367,
-0.012402708642184734,
-0.14551521837711334,
0.04027697443962097,
-0.17677368223667145,
0.22766287624835968,
-0.023812245577573776,
0.08970708400011063,
-0.1924644261598587,
0.036626122891902924,
-0.0386490523815155,
-0.08478222787380219,
0.07710318267345428,
-0.08229623734951019,
0.05243249610066414,
0.043861743062734604,
-0.09435988962650299,
0.034925784915685654,
0.011702258139848709,
0.1027049869298935,
0.016763711348176003,
0.016959229484200478,
0.11712596565485,
0.1250273436307907,
0.035668838769197464,
0.08853067457675934,
-0.010615651495754719,
-0.04303961619734764,
0.007986094802618027,
-0.046411558985710144,
0.017845693975687027,
-0.05569338798522949,
-0.12108741700649261,
-0.08392725884914398,
0.053989119827747345,
0.03391270712018013,
0.06015971675515175,
0.0036248238757252693,
-0.0394894964993,
0.031871940940618515,
0.04357758164405823,
-0.023359432816505432,
0.005210318136960268,
-0.020182685926556587,
0.036544278264045715,
0.09206956624984741,
-0.024552248418331146,
-0.012555964291095734,
0.055803071707487106,
0.08082050085067749,
-0.12089026719331741,
-0.012571997940540314,
-0.039337627589702606,
-0.09126503020524979,
0.05087871849536896,
-0.08099433034658432,
0.06241099536418915,
-0.1649002581834793,
0.021877769380807877,
0.010671253316104412,
0.06879180669784546,
-0.0033632603008300066,
-0.006163842044770718,
0.06747078895568848,
-0.04725729301571846,
0.06009405106306076,
-0.05396623909473419,
-0.032203804701566696,
-0.06223064288496971,
0.08331820368766785,
-0.02938215434551239,
0.1406940221786499,
-0.0824035108089447,
0.05059818550944328,
-0.06894081085920334,
0.03114018589258194,
-0.10807089507579803,
-0.08090516179800034,
-0.03845878317952156,
0.12063539773225784,
0.024203285574913025,
-0.052835170179605484,
-0.12650224566459656,
0.03796340897679329,
0.012839146889746189,
0.1283995509147644,
-0.10024777054786682,
-0.060953717678785324,
0.13371632993221283,
-0.036484312266111374,
-0.15682806074619293,
0.039223045110702515,
0.01089328620582819,
-0.029713185504078865,
-0.009262063540518284,
0.228415846824646,
-0.031213851645588875,
-0.11005815863609314,
-0.017717918381094933,
0.10365571081638336,
-0.06433101743459702,
-0.13187506794929504,
0.06878939270973206,
0.03360152244567871,
-0.04213797673583031,
-0.0038752006366848946,
0.009787443093955517,
0.04408102482557297,
-0.07304950803518295,
-0.022931048646569252,
-0.012437405996024609,
-0.03284550458192825,
0.08762892335653305,
0.05822739005088806,
0.10866539925336838,
-0.08803950250148773,
-0.04325239732861519,
0.013772217556834221,
0.011530979536473751,
0.08703804016113281,
0.04889413341879845,
-0.00486110569909215,
0.12438064813613892,
-0.10018135607242584,
-0.01882593147456646,
-0.1653074473142624,
-0.0837835893034935,
-0.03984803706407547,
0.08376185595989227,
-0.022349873557686806,
0.2292431890964508,
0.08953523635864258,
-0.10765586793422699,
-0.0015392189379781485,
-0.03647229075431824,
0.08430032432079315,
0.04972218722105026,
-0.06275252252817154,
-0.07357348501682281,
-0.022648490965366364,
-0.08417785167694092,
-0.09376112371683121,
-0.0733199417591095,
0.025510506704449654,
0.04838864132761955,
0.09890462458133698,
-0.009168755263090134,
0.021786987781524658,
-0.00961610209196806,
0.027009032666683197,
-0.04174308478832245,
0.00731716537848115,
0.07646186649799347,
-0.007602081634104252,
-0.03629595413804054,
0.2003306895494461,
-0.12515370547771454,
0.3559722304344177,
0.19825835525989532,
-0.29323363304138184,
-0.008838756941258907,
0.058176301419734955,
-0.01760890707373619,
0.032460231333971024,
0.09498398751020432,
-0.0321788489818573,
-0.002828258089721203,
-0.03916808217763901,
0.09412772208452225,
-0.0254573542624712,
-0.059971071779727936,
-0.011104761622846127,
-0.04707731679081917,
-0.08189476281404495,
0.0824466124176979,
0.034319717437028885,
-0.1306077241897583,
0.17340226471424103,
0.36403384804725647,
-0.005487754940986633,
0.10639753937721252,
0.028109107166528702,
-0.009229512885212898,
-0.027938706800341606,
-0.08469488471746445,
-0.062427859753370285,
0.10213986039161682,
-0.19016395509243011,
-0.08949819207191467,
0.06882307678461075,
-0.012578148394823074,
0.056479454040527344,
-0.13705313205718994,
-0.09115584194660187,
0.03868233412504196,
0.09530269354581833,
-0.10768630355596542,
0.1281525194644928,
0.02627457119524479,
0.10347548127174377,
0.0329342782497406,
-0.06026880070567131,
0.027434149757027626,
0.008241711184382439,
-0.024280497804284096,
0.11624914407730103,
-0.08972956240177155,
-0.21016810834407806,
-0.0667409896850586,
-0.0920262560248375,
0.04761100560426712,
-0.0003532882547006011,
0.07777281105518341,
-0.06380485743284225,
0.006717015523463488,
0.05070644989609718,
-0.004663462750613689,
-0.1976550817489624,
0.05262209102511406,
-0.0017896925564855337,
0.0010767054045572877,
-0.08384804427623749,
-0.07450311630964279,
-0.0779329389333725,
-0.08034390211105347,
-0.017352590337395668,
0.11022182554006577,
-0.015532685443758965,
0.0830102488398552,
0.11048544943332672,
-0.0020609803032130003,
0.046172115951776505,
-0.004859893582761288,
0.20229408144950867,
-0.05660252273082733,
-0.02450602687895298,
0.14600256085395813,
0.0075960359536111355,
0.05983555316925049,
0.0924936905503273,
0.06020185351371765,
-0.04963742941617966,
-0.042801935225725174,
-0.045034948736429214,
-0.09872528165578842,
-0.13735423982143402,
-0.057562462985515594,
-0.12534666061401367,
0.005628121551126242,
0.022680379450321198,
0.033355794847011566,
0.0715164765715599,
0.02637360244989395,
0.04817669838666916,
-0.044104211032390594,
-0.10531269758939743,
0.05072321370244026,
0.20225894451141357,
-0.06081314757466316,
0.10538162291049957,
-0.028872711583971977,
-0.06104569137096405,
0.05887670814990997,
0.02840319089591503,
0.08538581430912018,
0.1069881021976471,
-0.05559942126274109,
0.04820151999592781,
0.18135647475719452,
0.13370051980018616,
0.08794017136096954,
-0.0032843351364135742,
-0.05988398194313049,
-0.026181545108556747,
-0.012504376471042633,
-0.026490505784749985,
0.08367499709129333,
0.09420066326856613,
-0.10788802802562714,
-0.023390674963593483,
-0.25921520590782166,
0.017457112669944763,
0.01989723928272724,
0.09725295007228851,
-0.1672413945198059,
-0.016395937651395798,
0.08021117746829987,
0.003386508207768202,
-0.030131449922919273,
0.06469539552927017,
0.06661918759346008,
-0.06515197455883026,
0.02892657183110714,
0.009917248971760273,
0.08653301000595093,
0.06734059751033783,
0.08119868487119675,
-0.08355732262134552,
-0.15813378989696503,
0.015774644911289215,
0.02596031129360199,
-0.19223101437091827,
0.27198395133018494,
-0.02189173549413681,
-0.11966868489980698,
-0.0006199841154739261,
-0.060485146939754486,
0.01331083383411169,
0.1278432011604309,
0.1023331731557846,
0.0556962713599205,
-0.11637371778488159,
-0.08915507048368454,
0.037397678941488266,
0.00029209256172180176,
0.08259335905313492,
-0.05096493288874626,
-0.0011841384693980217,
0.005063670687377453,
-0.010625595226883888,
0.0014867631252855062,
0.1870705932378769,
0.05287843197584152,
-0.09145239740610123,
0.07542461901903152,
0.04053173214197159,
-0.01104656420648098,
-0.0024229518603533506,
-0.05332522094249725,
-0.11662249267101288,
0.06968821585178375,
0.033840347081422806,
0.0006492666434496641,
-0.07976718991994858,
-0.12192552536725998,
0.1435960829257965,
-0.06738828867673874,
0.0622783899307251,
-0.037591688334941864,
-0.04285982623696327,
-0.07015381008386612,
-0.14274246990680695,
0.1429104506969452,
-0.10738223791122437,
0.02196444198489189,
-0.07018791139125824,
0.12232495844364166,
-0.07468231767416,
0.06801189482212067,
-0.026415636762976646,
0.09884055703878403,
-0.18692883849143982,
-0.06550520658493042,
0.10937388986349106,
-0.05630499869585037,
0.07318365573883057,
-0.06542745977640152,
0.0013391779502853751,
0.025286540389060974,
0.05261548236012459,
0.023407742381095886,
0.19917070865631104,
0.2573014795780182,
-0.11493708193302155,
0.11457359045743942,
0.09471975266933441,
0.0003980575711466372,
-0.259343683719635,
-0.054787129163742065,
-0.15685337781906128,
-0.01503689493983984,
0.03145023062825203,
-0.07789114117622375,
0.06349765509366989,
0.01869179680943489,
-0.06805425882339478,
0.13522568345069885,
-0.2476753443479538,
-0.06518343091011047,
0.10870017111301422,
-0.05027787387371063,
0.5427178144454956,
-0.12584462761878967,
-0.030247801914811134,
0.04637512192130089,
-0.22081518173217773,
0.11562182009220123,
0.06089700013399124,
0.04704555496573448,
-0.05669553950428963,
0.056388407945632935,
0.032314129173755646,
-0.06380321830511093,
0.13112670183181763,
-0.06286856532096863,
0.02072625793516636,
-0.08059965819120407,
-0.22846849262714386,
0.08472657948732376,
-0.05212847515940666,
-0.0588027760386467,
0.014658438041806221,
-0.004477033857256174,
-0.20173844695091248,
0.031286511570215225,
-0.14839418232440948,
0.06553211808204651,
0.01608751155436039,
-0.04296772554516792,
-0.039111558347940445,
-0.014235743321478367,
-0.013721832074224949,
-0.018194902688264847,
0.30317679047584534,
-0.02427011728286743,
0.23322127759456635,
0.04610117897391319,
-0.018797488883137703,
-0.1700507253408432,
-0.06010047718882561,
0.004991483874619007,
-0.05829382687807083,
0.11305874586105347,
-0.14598214626312256,
0.007199834566563368,
0.11673025786876678,
0.01656275801360607,
-0.02819567546248436,
0.09782645851373672,
-0.004209399223327637,
-0.011654950678348541,
0.13098052144050598,
-0.24213211238384247,
-0.041783787310123444,
-0.02586345188319683,
0.011562603525817394,
0.11440792679786682,
0.05294165760278702,
0.08874903619289398,
-0.010928821749985218,
-0.03963553532958031,
-0.00481571676209569,
-0.05317818373441696,
-0.05888422206044197,
-0.0013655353104695678,
0.06918296217918396,
0.057795025408267975,
-0.06798223406076431,
0.015774551779031754,
0.030351854860782623,
-0.2143317013978958,
-0.03538632392883301,
0.14144538342952728,
-0.053134091198444366,
-0.12581098079681396,
-0.09710661321878433,
0.008859495632350445,
-0.13867101073265076,
0.023032698780298233,
-0.010019699111580849,
-0.05981556698679924,
0.053412310779094696,
0.25057312846183777,
0.08285832405090332,
0.0486249141395092,
-0.004369244910776615,
0.0008407121640630066,
0.06427820771932602,
-0.03780742734670639,
-0.035874076187610626,
0.02106020599603653,
-0.10089456290006638,
0.02437056228518486,
-0.027916068211197853,
0.13594041764736176,
-0.08772341161966324,
-0.022349704056978226,
-0.15671661496162415,
0.02077677845954895,
-0.04316592589020729,
-0.13060466945171356,
-0.10954134911298752,
-0.09718010574579239,
-0.012615879066288471,
-0.12493818998336792,
-0.08669508993625641,
-0.03719034045934677,
-0.13918522000312805,
0.035855889320373535,
0.0010524268727749586,
0.02592659369111061,
-0.06950120627880096,
-0.042918961495161057,
0.10668893903493881,
-0.026823844760656357,
0.0766250267624855,
0.15316170454025269,
-0.008123042061924934,
0.07174781709909439,
-0.0189838707447052,
-0.1153075248003006,
0.07277946174144745,
0.006593463011085987,
0.07129790633916855,
0.055103398859500885,
-0.0313635878264904,
0.0171184204518795,
0.02559797465801239,
0.04236363619565964,
-0.013939385302364826,
-0.06301235407590866,
-0.02156710997223854,
0.045315396040678024,
-0.13519692420959473,
0.007286043372005224,
-0.09688195586204529,
0.1747545450925827,
0.026661785319447517,
0.052958350628614426,
0.030295297503471375,
0.05524545535445213,
-0.08773773908615112,
0.024466849863529205,
-0.028397927060723305,
-0.18933556973934174,
-0.0005596280097961426,
-0.024437401443719864,
0.0350193977355957,
-0.00983234029263258,
0.21188156306743622,
-0.017321912571787834,
-0.09161645174026489,
0.05054694786667824,
0.08478395640850067,
-0.030412500724196434,
0.01149791106581688,
0.1684495210647583,
0.091544508934021,
-0.07920147478580475,
-0.07645127922296524,
0.1127890944480896,
0.023447558283805847,
0.02486448548734188,
0.09543365240097046,
0.13309091329574585,
0.14750123023986816,
0.09523943811655045,
0.0002936564851552248,
-0.01995590142905712,
-0.12990722060203552,
-0.18705502152442932,
-0.027986565604805946,
0.04509739205241203,
-0.038133591413497925,
0.119367316365242,
0.1881268322467804,
0.0014307718956843019,
0.04275456443428993,
-0.08681844174861908,
0.03301463648676872,
-0.12059010565280914,
-0.040784694254398346,
-0.026517178863286972,
-0.09323891252279282,
-0.02054089866578579,
-0.047451265156269073,
0.060992587357759476,
0.1645970642566681,
0.021406957879662514,
0.01412133127450943,
0.05112553387880325,
0.11411100625991821,
-0.061407122761011124,
0.023709148168563843,
0.02265876531600952,
0.025899279862642288,
-0.05442943051457405,
0.026835069060325623,
-0.10024254769086838,
-0.09484304487705231,
-0.06315214931964874,
-0.006711815018206835,
-0.08191201835870743,
-0.037139326333999634,
-0.07697693258523941,
-0.10781776160001755,
-0.06278131902217865,
0.03649825230240822,
-0.00816943496465683,
0.07771122455596924,
-0.017936397343873978,
0.05368487164378166,
-0.012451968155801296,
0.26470911502838135,
-0.10704881697893143,
0.0584547333419323,
0.04201073199510574,
0.14059069752693176,
-0.004557896871119738,
0.09454340487718582,
-0.046313513070344925,
0.03581303730607033,
-0.0833403617143631,
0.22576068341732025,
0.2847914397716522,
-0.05624249577522278,
0.07220369577407837,
0.06950334459543228,
0.02867165394127369,
0.05147368460893631,
0.03725270926952362,
0.12451982498168945,
0.2409486323595047,
-0.1335807591676712,
-0.028181305155158043,
-0.0723150223493576,
0.037707142531871796,
-0.04479392245411873,
0.06795963644981384,
0.05061950534582138,
-0.08152519911527634,
-0.06414829194545746,
0.031069235876202583,
-0.08663168549537659,
0.05413535609841347,
0.06593523174524307,
-0.30486205220222473,
-0.05380941182374954,
0.0016089315759018064,
0.1923138052225113,
-0.028110947459936142,
0.13729740679264069,
-0.03900745511054993,
-0.11485236138105392,
-0.00020474407938309014,
0.01531259249895811,
-0.19838373363018036,
-0.10273808240890503,
0.13922470808029175,
-0.005403620656579733,
0.049625214189291,
-0.06538025289773941,
0.0032722647301852703,
0.08720825612545013,
0.0672408938407898,
-0.05803600326180458,
-0.018590077757835388,
0.06345627456903458,
-0.09190394729375839,
-0.1411285400390625,
-0.019239118322730064,
0.01530242245644331,
-0.01702236570417881,
0.08203009516000748,
-0.16959215700626373,
0.03673459589481354,
0.01395686436444521,
-0.028177127242088318,
0.02438635751605034,
-0.003630738938227296,
-0.012998519465327263,
0.07032262533903122,
0.03834224492311478,
-0.021663721650838852,
-0.0665566474199295,
-0.012115960009396076,
-0.06960845738649368,
0.09290081262588501,
-0.009013311937451363,
-0.15864159166812897,
-0.036508433520793915,
-0.038258105516433716,
0.05892025679349899,
-0.0010933337034657598,
-0.07106629014015198,
-0.038184963166713715,
-0.027590492740273476,
0.043047066777944565,
-0.057734414935112,
0.011564500629901886,
0.04115474224090576,
0.008196278475224972,
0.0017104578437283635,
-0.06976033747196198,
0.06152587756514549,
0.07075255364179611,
-0.12456463277339935,
-0.070296511054039
] |
null | null | null |
This is an Audacity wrapper for the model, forked from the repository `groadabike/ConvTasNet_DAMP-VSEP_enhboth`,
This model was trained using the Asteroid library: https://github.com/asteroid-team/asteroid.
The following info was copied directly from `groadabike/ConvTasNet_DAMP-VSEP_enhboth`:
### Description:
This model was trained by Gerardo Roa Dabike using Asteroid. It was trained on the enh_both task of the DAMP-VSEP dataset.
### Training config:
```yaml
data:
channels: 1
n_src: 2
root_path: data
sample_rate: 16000
samples_per_track: 10
segment: 3.0
task: enh_both
filterbank:
kernel_size: 20
n_filters: 256
stride: 10
main_args:
exp_dir: exp/train_convtasnet
help: None
masknet:
bn_chan: 256
conv_kernel_size: 3
hid_chan: 512
mask_act: relu
n_blocks: 8
n_repeats: 4
n_src: 2
norm_type: gLN
skip_chan: 256
optim:
lr: 0.0003
optimizer: adam
weight_decay: 0.0
positional arguments:
training:
batch_size: 12
early_stop: True
epochs: 50
half_lr: True
num_workers: 12
```
### Results:
```yaml
si_sdr: 14.018196157142519
si_sdr_imp: 14.017103133809577
sdr: 14.498517291333885
sdr_imp: 14.463389151567865
sir: 24.149634529133372
sir_imp: 24.11450638936735
sar: 15.338597389045935
sar_imp: -137.30634122401517
stoi: 0.7639416744417206
stoi_imp: 0.1843383526963759
```
### License notice:
This work "ConvTasNet_DAMP-VSEP_enhboth" is a derivative of DAMP-VSEP: Smule Digital Archive of Mobile Performances - Vocal Separation (Version 1.0.1) by Smule, Inc, used under Smule's Research Data License Agreement (Research only). "ConvTasNet_DAMP-VSEP_enhboth" is licensed under Attribution-ShareAlike 3.0 Unported by Gerardo Roa Dabike.
|
{"tags": ["audacity"], "inference": false, "sample_rate": 8000}
| null |
hugggof/ConvTasNet-DAMP-Vocals
|
[
"audacity",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#audacity #region-us
|
This is an Audacity wrapper for the model, forked from the repository 'groadabike/ConvTasNet_DAMP-VSEP_enhboth',
This model was trained using the Asteroid library: URL
The following info was copied directly from 'groadabike/ConvTasNet_DAMP-VSEP_enhboth':
### Description:
This model was trained by Gerardo Roa Dabike using Asteroid. It was trained on the enh_both task of the DAMP-VSEP dataset.
### Training config:
### Results:
### License notice:
This work "ConvTasNet_DAMP-VSEP_enhboth" is a derivative of DAMP-VSEP: Smule Digital Archive of Mobile Performances - Vocal Separation (Version 1.0.1) by Smule, Inc, used under Smule's Research Data License Agreement (Research only). "ConvTasNet_DAMP-VSEP_enhboth" is licensed under Attribution-ShareAlike 3.0 Unported by Gerardo Roa Dabike.
|
[
"### Description:\nThis model was trained by Gerardo Roa Dabike using Asteroid. It was trained on the enh_both task of the DAMP-VSEP dataset.",
"### Training config:",
"### Results:",
"### License notice:\nThis work \"ConvTasNet_DAMP-VSEP_enhboth\" is a derivative of DAMP-VSEP: Smule Digital Archive of Mobile Performances - Vocal Separation (Version 1.0.1) by Smule, Inc, used under Smule's Research Data License Agreement (Research only). \"ConvTasNet_DAMP-VSEP_enhboth\" is licensed under Attribution-ShareAlike 3.0 Unported by Gerardo Roa Dabike."
] |
[
"TAGS\n#audacity #region-us \n",
"### Description:\nThis model was trained by Gerardo Roa Dabike using Asteroid. It was trained on the enh_both task of the DAMP-VSEP dataset.",
"### Training config:",
"### Results:",
"### License notice:\nThis work \"ConvTasNet_DAMP-VSEP_enhboth\" is a derivative of DAMP-VSEP: Smule Digital Archive of Mobile Performances - Vocal Separation (Version 1.0.1) by Smule, Inc, used under Smule's Research Data License Agreement (Research only). \"ConvTasNet_DAMP-VSEP_enhboth\" is licensed under Attribution-ShareAlike 3.0 Unported by Gerardo Roa Dabike."
] |
[
10,
42,
6,
4,
109
] |
[
"passage: TAGS\n#audacity #region-us \n### Description:\nThis model was trained by Gerardo Roa Dabike using Asteroid. It was trained on the enh_both task of the DAMP-VSEP dataset.### Training config:### Results:### License notice:\nThis work \"ConvTasNet_DAMP-VSEP_enhboth\" is a derivative of DAMP-VSEP: Smule Digital Archive of Mobile Performances - Vocal Separation (Version 1.0.1) by Smule, Inc, used under Smule's Research Data License Agreement (Research only). \"ConvTasNet_DAMP-VSEP_enhboth\" is licensed under Attribution-ShareAlike 3.0 Unported by Gerardo Roa Dabike."
] |
[
-0.05139695107936859,
-0.11411935091018677,
-0.0028752966318279505,
0.040572639554739,
0.06716839969158173,
0.006272376049309969,
0.1127328872680664,
-0.05003158003091812,
0.03402869030833244,
-0.03862382844090462,
0.09223604947328568,
-0.005665054079145193,
0.021896250545978546,
0.11221813410520554,
0.0031004215124994516,
-0.14669200778007507,
0.02837420254945755,
-0.04881234094500542,
-0.15140455961227417,
0.017144257202744484,
0.09492633491754532,
-0.08249375224113464,
0.08794200420379639,
-0.031052060425281525,
-0.06332913786172867,
0.04844464361667633,
-0.025517042726278305,
-0.06875211745500565,
0.10444799810647964,
-0.04334540292620659,
0.18789231777191162,
0.05945691838860512,
0.07916077226400375,
-0.10878047347068787,
0.03169925883412361,
-0.035703979432582855,
-0.05426683649420738,
0.053875938057899475,
0.013044627383351326,
0.08572325110435486,
0.148219496011734,
0.08826510608196259,
0.013057288713753223,
-0.011335043236613274,
-0.10189681500196457,
-0.08073927462100983,
-0.09356337040662766,
-0.08502495288848877,
0.07600758224725723,
0.05850859731435776,
0.03919997438788414,
0.108199343085289,
-0.14120836555957794,
-0.0024596231523901224,
-0.0563577264547348,
-0.18389666080474854,
-0.011223546229302883,
0.20724771916866302,
-0.0012584052747115493,
0.02441222406923771,
-0.03179527819156647,
0.019978800788521767,
0.07925111800432205,
0.06459250301122665,
0.055418096482753754,
-0.062171824276447296,
-0.023068014532327652,
0.01601383276283741,
-0.09178005158901215,
-0.050062134861946106,
0.3835342228412628,
-0.009006989188492298,
-0.0643727108836174,
0.1336149424314499,
-0.05645696446299553,
0.004390096757560968,
0.02658378891646862,
-0.09196306765079498,
-0.022486276924610138,
0.051159054040908813,
-0.10763270407915115,
-0.05248158052563667,
-0.14043457806110382,
-0.09952765703201294,
-0.08258797228336334,
0.06078416854143143,
0.020567793399095535,
0.031825385987758636,
-0.19227655231952667,
0.0816689059138298,
-0.01556441280990839,
-0.058063484728336334,
0.040776778012514114,
-0.13458232581615448,
0.08728652447462082,
-0.008718814700841904,
-0.027683591470122337,
-0.24571764469146729,
0.109584279358387,
0.09560216963291168,
-0.016800574958324432,
-0.008220270276069641,
-0.0024788095615804195,
0.09978673607110977,
0.0460311658680439,
0.01682254672050476,
0.041013456881046295,
0.02617425099015236,
0.051914576441049576,
-0.07138693332672119,
0.0030173957347869873,
-0.009907347150146961,
-0.150643989443779,
0.034793753176927567,
-0.0059046195819973946,
0.019993018358945847,
0.0118485689163208,
-0.030994806438684464,
0.008867744356393814,
-0.007085123565047979,
0.19052308797836304,
0.030858999118208885,
0.002375435084104538,
-0.05273451283574104,
-0.02779649943113327,
-0.06488456577062607,
0.04350954294204712,
0.04432033374905586,
0.06859635561704636,
0.09093298017978668,
-0.07676271349191666,
-0.0628243163228035,
-0.058467548340559006,
-0.06292925029993057,
0.07548780739307404,
0.0773739293217659,
0.05999203026294708,
-0.2623630166053772,
-0.129365473985672,
0.0006923422333784401,
0.06198953092098236,
-0.025261161848902702,
0.05187227576971054,
-0.035861819982528687,
-0.013781283050775528,
-0.003959859721362591,
-0.021182794123888016,
-0.058287426829338074,
-0.02802773378789425,
0.01817828230559826,
-0.01248712558299303,
0.057585444301366806,
-0.22655551135540009,
0.020967165008187294,
-0.06379964202642441,
0.0633038803935051,
-0.06305567920207977,
-0.04716064780950546,
-0.07109619677066803,
0.10462510585784912,
-0.03613865748047829,
-0.009173492901027203,
-0.20649223029613495,
-0.0033075178507715464,
0.12216924875974655,
0.16260720789432526,
-0.18851816654205322,
-0.0015338992234319448,
0.08527917414903641,
-0.10620708018541336,
-0.13681600987911224,
0.08162885904312134,
-0.060331184417009354,
0.1317870169878006,
0.023432277143001556,
0.11562302708625793,
0.024741847068071365,
-0.17154890298843384,
-0.015779390931129456,
-0.011944251134991646,
-0.04907839745283127,
-0.18690448999404907,
0.06613212823867798,
-0.01144125685095787,
-0.12945862114429474,
0.004386637359857559,
0.024412568658590317,
0.09962955862283707,
-0.020976703613996506,
-0.07581182569265366,
0.012190735898911953,
-0.057921797037124634,
0.027201486751437187,
-0.044561274349689484,
0.1206335499882698,
0.0019984005484730005,
0.047007184475660324,
-0.13129062950611115,
0.0941312238574028,
0.012293508276343346,
0.035547878593206406,
-0.024234946817159653,
0.06308235973119736,
-0.0773068368434906,
0.012614374980330467,
-0.06861226260662079,
-0.007633765693753958,
0.048990894109010696,
0.008468976244330406,
0.05655210465192795,
0.02062864974141121,
0.027229957282543182,
0.016340214759111404,
-0.017076566815376282,
0.004400091245770454,
-0.0958738848567009,
0.025794370099902153,
0.018554922193288803,
-0.0737374946475029,
0.02465922012925148,
-0.07133298367261887,
0.060553837567567825,
-0.14554044604301453,
-0.011264017783105373,
0.04009446129202843,
-0.011007828637957573,
0.04897327348589897,
0.06819149851799011,
-0.021221060305833817,
0.06970269978046417,
-0.08623708784580231,
0.024730239063501358,
-0.01676715724170208,
0.015988629311323166,
-0.018521368503570557,
-0.040987834334373474,
0.08235533535480499,
-0.04549289494752884,
0.0734160840511322,
-0.1496940702199936,
-0.04424989968538284,
-0.03884599730372429,
0.007518948521465063,
-0.020001592114567757,
-0.0329304076731205,
0.005899849347770214,
0.06005614250898361,
-0.030686447396874428,
0.10101690888404846,
-0.05062461271882057,
0.10146472603082657,
0.003055962035432458,
-0.13069716095924377,
-0.08595278859138489,
0.02020718902349472,
0.21755041182041168,
-0.06533517688512802,
0.07450048625469208,
0.1993066668510437,
-0.08539842814207077,
0.11743707209825516,
0.005947588011622429,
-0.03279849514365196,
-0.09043564647436142,
0.017994394525885582,
0.03371833264827728,
0.1312870979309082,
-0.05426925793290138,
0.0030827627051621675,
0.0014944180147722363,
0.053454503417015076,
0.11389493942260742,
-0.09339461475610733,
-0.07151193171739578,
0.012125097215175629,
0.060207050293684006,
-0.06148898974061012,
0.0330691933631897,
-0.13135500252246857,
0.014047935605049133,
-0.03400550037622452,
-0.140297994017601,
0.03899451345205307,
0.0014074168866500258,
-0.03388434648513794,
0.10965122282505035,
-0.14215394854545593,
-0.1656622290611267,
-0.17566649615764618,
-0.062418270856142044,
-0.029161153361201286,
0.04589032754302025,
0.033510755747556686,
-0.10597024857997894,
-0.019319970160722733,
0.0013394012348726392,
-0.059598296880722046,
-0.08519486337900162,
-0.014052879065275192,
-0.015274593606591225,
0.08762267231941223,
-0.028602123260498047,
-0.06173907220363617,
-0.01878274790942669,
-0.07897426187992096,
0.02410334162414074,
0.14386482536792755,
-0.08418936282396317,
0.16006632149219513,
0.11879101395606995,
0.041889794170856476,
0.002597124082967639,
0.0014071851037442684,
0.11677860468626022,
-0.04806305095553398,
-0.0017887194408103824,
0.09847105294466019,
0.0274975523352623,
0.00804413203150034,
0.14779812097549438,
0.037730373442173004,
-0.07041605561971664,
0.04698804393410683,
-0.15120995044708252,
-0.15935198962688446,
-0.1993907243013382,
-0.13631097972393036,
-0.08814693242311478,
-0.013840451836585999,
-0.026691783219575882,
0.04874587431550026,
0.15862475335597992,
0.09376677870750427,
0.09735069423913956,
0.052601221948862076,
-0.04233868047595024,
0.0469936840236187,
0.058578357100486755,
-0.004744809586554766,
-0.0016062306240200996,
-0.061991047114133835,
-0.05357382446527481,
0.09216422587633133,
0.0961051657795906,
0.2678029239177704,
0.11912595480680466,
-0.017726903781294823,
0.10730287432670593,
0.1480681300163269,
0.04777649790048599,
0.09514141082763672,
0.010111469775438309,
0.007044048979878426,
-0.05390283092856407,
-0.045529402792453766,
-0.04039901867508888,
0.1349015086889267,
-0.019344953820109367,
-0.1256125271320343,
0.028508156538009644,
0.05250440537929535,
-0.011374927125871181,
-0.040122825652360916,
0.09000939875841141,
-0.1588802933692932,
0.02930871583521366,
0.030936349183321,
0.12241960316896439,
0.0005467653390951455,
0.050085343420505524,
0.12461412698030472,
-0.01741662621498108,
-0.014399252831935883,
0.020527267828583717,
0.031890664249658585,
-0.012332023121416569,
0.012163101695477962,
-0.07113578170537949,
-0.012213636189699173,
0.006043427158147097,
0.02439901977777481,
-0.2764969766139984,
0.2356313019990921,
-0.022477151826024055,
0.027438674122095108,
0.039636921137571335,
0.020778197795152664,
-0.009049704298377037,
0.1394348442554474,
0.04511642083525658,
0.03692060708999634,
-0.05668344721198082,
-0.03964732587337494,
-0.08811931312084198,
0.006149351596832275,
0.007021099794656038,
0.053151685744524,
0.002152117434889078,
0.03412145748734474,
0.0013245918089523911,
0.04433337599039078,
0.13610288500785828,
-0.16657787561416626,
-0.02453402802348137,
-0.035432472825050354,
0.09238738566637039,
0.02548600360751152,
-0.05780122056603432,
0.03371912240982056,
0.0858711451292038,
-0.07555394619703293,
-0.05012192949652672,
0.02229827456176281,
-0.05164458975195885,
-0.055032018572092056,
0.033370375633239746,
-0.055159792304039,
0.07274796068668365,
0.02635011076927185,
-0.06472879648208618,
-0.0398661270737648,
-0.06710443645715714,
0.05656236782670021,
-0.12186684459447861,
0.005904768127948046,
-0.0745973065495491,
-0.010413911193609238,
0.0572318434715271,
0.03133171424269676,
0.031110765412449837,
-0.004392640665173531,
-0.08503083139657974,
-0.0681157186627388,
0.011821011081337929,
0.05180481821298599,
-0.03379730507731438,
-0.004083461593836546,
-0.024225682020187378,
-0.0773078054189682,
-0.009344098158180714,
-0.04650220647454262,
0.1705223023891449,
0.20241688191890717,
-0.03113723360002041,
0.034315068274736404,
0.2908846139907837,
-0.09136853367090225,
-0.2194771021604538,
-0.10306454449892044,
-0.016725828871130943,
-0.0025046432856470346,
-0.022933779284358025,
-0.21110926568508148,
0.033698685467243195,
0.07547052949666977,
-0.04687202721834183,
0.005207729060202837,
-0.2070770114660263,
-0.06102912500500679,
0.1588248610496521,
-0.04681338742375374,
0.2755255103111267,
-0.03396936506032944,
-0.06916192173957825,
-0.10875048488378525,
-0.06855439394712448,
0.0185566246509552,
-0.034173619002103806,
0.04800155386328697,
-0.006854511797428131,
0.05472536012530327,
-0.016709918156266212,
-0.02449304796755314,
0.13425591588020325,
0.03555193170905113,
0.10142522305250168,
-0.032857246696949005,
-0.09733235836029053,
0.13205240666866302,
-0.0378212071955204,
0.07752031832933426,
0.13238279521465302,
0.026725513860583305,
-0.06115859001874924,
-0.02222926914691925,
-0.03334284946322441,
0.07783369719982147,
0.023223290219902992,
-0.10866016894578934,
-0.08685752749443054,
0.025233130902051926,
-0.009946933947503567,
0.000324695254676044,
0.1485757976770401,
0.0826408714056015,
-0.04193728044629097,
0.18879666924476624,
0.014016550965607166,
-0.05309941992163658,
0.06093451753258705,
0.03641454502940178,
-0.06692798435688019,
0.09194644540548325,
-0.1325255036354065,
-0.024422118440270424,
0.058828774839639664,
-0.016919251531362534,
0.052949611097574234,
0.06573407351970673,
-0.12418335676193237,
0.09763949364423752,
0.1418539136648178,
-0.09555845707654953,
0.01399120595306158,
-0.007321244105696678,
0.11122186481952667,
0.1066417470574379,
0.13025830686092377,
0.12553682923316956,
-0.08609185367822647,
0.04088282212615013,
-0.02782604657113552,
0.013752775266766548,
-0.09478623420000076,
0.07446719706058502,
0.13503892719745636,
-0.04088257625699043,
-0.07235879451036453,
0.15246689319610596,
0.12605103850364685,
0.11783599108457565,
0.019160130992531776,
-0.07252580672502518,
-0.046323083341121674,
-0.07381067425012589,
-0.1601584255695343,
0.06787578761577606,
-0.06141020357608795,
-0.13233810663223267,
-0.023518918082118034,
-0.04816102609038353,
0.00018811346672009677,
-0.04092225804924965,
0.041754286736249924,
0.0019628836307674646,
-0.05469578504562378,
0.03162285313010216,
-0.07236380875110626,
0.012047543190419674,
-0.033074136823415756,
0.0817931666970253,
-0.13843058049678802,
-0.057822730392217636,
-0.026976654306054115,
0.02871638722717762,
-0.05973069742321968,
-0.05876633897423744,
-0.06505105644464493,
0.05990619584918022,
-0.18000592291355133,
-0.05032981187105179,
-0.04664703831076622,
-0.05139349400997162,
0.03462681546807289,
-0.010933159850537777,
-0.07818837463855743,
0.04143941029906273,
-0.09122294187545776,
0.04795914888381958,
-0.0001767954381648451,
0.043619364500045776,
-0.048769496381282806,
-0.04374563321471214,
0.007119377609342337,
-0.021128743886947632,
0.012797782197594643,
0.043061934411525726,
0.028643079102039337,
0.05598842725157738,
-0.10144047439098358,
-0.006283777300268412,
0.1272358000278473,
0.06174647435545921,
0.04399396479129791,
-0.00029269588412716985,
-0.032318826764822006,
0.010357368737459183,
-0.034774914383888245,
-0.014038307592272758,
0.07848036289215088,
-0.05394769832491875,
0.0007111121667549014,
-0.03273410722613335,
-0.10271260142326355,
-0.017100974917411804,
-0.013072468340396881,
0.07690829783678055,
0.10426879674196243,
0.13358275592327118,
-0.0006835737731307745,
-0.0071298908442258835,
-0.04093939810991287,
-0.024260155856609344,
0.0025005906354635954,
-0.0495501384139061,
-0.03254400193691254,
-0.06858930736780167,
-0.0034771605860441923,
-0.029290778562426567,
0.15555340051651,
0.04720991849899292,
-0.1482747197151184,
-0.08197180181741714,
0.09010165184736252,
-0.042448677122592926,
0.015352719463407993,
0.10777786374092102,
0.08513906598091125,
-0.00512450048699975,
-0.15063677728176117,
0.1079309955239296,
0.11168752610683441,
0.14304354786872864,
0.07847495377063751,
0.0297706201672554,
-0.025377493351697922,
0.048542898148298264,
0.11048489809036255,
-0.010216406546533108,
0.025921305641531944,
-0.11609115451574326,
-0.07066164165735245,
0.047605663537979126,
-0.011978833936154842,
0.028669707477092743,
0.07068958133459091,
-0.051437243819236755,
-0.015428225509822369,
0.025857986882328987,
-0.060485076159238815,
-0.0757465660572052,
-0.11411237716674805,
-0.0626596212387085,
-0.1386464536190033,
0.027015769854187965,
-0.07539248466491699,
-0.09176002442836761,
0.14923042058944702,
-0.03905244916677475,
-0.05771174654364586,
0.1111665889620781,
0.0070640952326357365,
-0.06003211811184883,
0.026302171871066093,
-0.016512732952833176,
-0.0637640506029129,
-0.047182392328977585,
-0.030706433579325676,
-0.06250885128974915,
0.02868957258760929,
-0.020358771085739136,
0.03221284970641136,
0.011817552149295807,
-0.0016148778377100825,
0.008926752023398876,
-0.02275705337524414,
-0.07694760710000992,
0.011382296681404114,
-0.06569738686084747,
0.06570972502231598,
0.04715711623430252,
-0.04576989635825157,
0.072159044444561,
0.10854002833366394,
0.001732375007122755,
-0.032630909234285355,
-0.05784178152680397,
0.04858899116516113,
0.04389257729053497,
0.11016342043876648,
-0.030140502378344536,
-0.07016754895448685,
-0.053926412016153336,
0.1293380856513977,
0.2875509262084961,
-0.016252320259809494,
-0.00863371230661869,
0.0020152488723397255,
0.022761527448892593,
-0.07585359364748001,
0.20716452598571777,
0.03907027468085289,
0.19434237480163574,
-0.0064851571805775166,
0.007446396630257368,
-0.08934979140758514,
0.003811142174527049,
-0.04112998768687248,
0.031776491552591324,
0.04648570343852043,
-0.10929343104362488,
-0.08951251208782196,
0.09654662013053894,
-0.12378065288066864,
0.01621411368250847,
-0.013511686585843563,
-0.06900326907634735,
-0.042786646634340286,
-0.010916242375969887,
0.058618031442165375,
0.018083296716213226,
0.07283222675323486,
-0.11889896541833878,
0.023559320718050003,
0.07984945178031921,
0.006296220235526562,
-0.09228865802288055,
-0.0015930699883028865,
0.15333014726638794,
0.004847315140068531,
0.17304609715938568,
0.000033866872399812564,
0.09818053990602493,
0.018846338614821434,
0.02819633297622204,
-0.0131651246920228,
0.1318519115447998,
0.028155015781521797,
0.055640809237957,
-0.025732778012752533,
-0.11417977511882782,
-0.038902297616004944,
0.05665607750415802,
0.06762585043907166,
-0.006903717759996653,
0.0067977337166666985,
0.1293940246105194,
0.006647065747529268,
-0.08039061725139618,
0.037110015749931335,
-0.08276664465665817,
0.08399617671966553,
-0.003054618136957288,
-0.04203157499432564,
-0.003741414984688163,
-0.05705317109823227,
0.05215637758374214,
0.06296184659004211,
0.01932983659207821,
-0.0276905819773674,
-0.06832548975944519,
-0.039093174040317535,
0.0006283282418735325,
0.027616405859589577,
-0.1022920310497284,
-0.021400660276412964,
-0.12437468022108078,
-0.0190158449113369,
0.012315323576331139,
0.05065397918224335,
0.06546173244714737,
0.007118042092770338,
-0.03386801481246948,
0.23972955346107483,
-0.059984203428030014,
0.009655507281422615,
-0.05869225040078163,
-0.07555793970823288
] |
null | null | null |
This is an Audacity wrapper for the model, forked from the repository `JorisCos/ConvTasNet_Libri3Mix_sepnoisy_16k`,
This model was trained using the Asteroid library: https://github.com/asteroid-team/asteroid.
The following info was copied directly from `JorisCos/ConvTasNet_Libri3Mix_sepnoisy_16k`:
Description:
This model was trained by Joris Cosentino using the librimix recipe in [Asteroid](https://github.com/asteroid-team/asteroid).
It was trained on the `sep_noisy` task of the Libri3Mix dataset.
Training config:
```yml
data:
n_src: 3
sample_rate: 16000
segment: 3
task: sep_noisy
train_dir: data/wav16k/min/train-360
valid_dir: data/wav16k/min/dev
filterbank:
kernel_size: 32
n_filters: 512
stride: 16
masknet:
bn_chan: 128
hid_chan: 512
mask_act: relu
n_blocks: 8
n_repeats: 3
n_src: 3
skip_chan: 128
optim:
lr: 0.001
optimizer: adam
weight_decay: 0.0
training:
batch_size: 8
early_stop: true
epochs: 200
half_lr: true
num_workers: 4
```
Results:
On Libri3Mix min test set :
```yml
si_sdr: 5.926151147554517
si_sdr_imp: 10.282912158535625
sdr: 6.700975236867358
sdr_imp: 10.882972447337504
sir: 15.364110064569388
sir_imp: 18.574476587171688
sar: 7.918866830474568
sar_imp: -0.9638973409971135
stoi: 0.7713777027310713
stoi_imp: 0.2078696167973911
```
License notice:
This work "ConvTasNet_Libri3Mix_sepnoisy_16k" is a derivative of [LibriSpeech ASR corpus](http://www.openslr.org/12) by Vassil Panayotov,
used under [CC BY 4.0](https://creativecommons.org/licenses/by/4.0/); of The WSJ0 Hipster Ambient Mixtures
dataset by [Whisper.ai](http://wham.whisper.ai/), used under [CC BY-NC 4.0](https://creativecommons.org/licenses/by-nc/4.0/).
"ConvTasNet_Libri3Mix_sepnoisy_16k" is licensed under [Attribution-ShareAlike 3.0 Unported](https://creativecommons.org/licenses/by-sa/3.0/) by Joris Cosentino
|
{"tags": ["audacity"], "inference": false}
| null |
hugggof/ConvTasNet_Libri3Mix_sepnoisy_16k
|
[
"audacity",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#audacity #region-us
|
This is an Audacity wrapper for the model, forked from the repository 'JorisCos/ConvTasNet_Libri3Mix_sepnoisy_16k',
This model was trained using the Asteroid library: URL
The following info was copied directly from 'JorisCos/ConvTasNet_Libri3Mix_sepnoisy_16k':
Description:
This model was trained by Joris Cosentino using the librimix recipe in Asteroid.
It was trained on the 'sep_noisy' task of the Libri3Mix dataset.
Training config:
Results:
On Libri3Mix min test set :
License notice:
This work "ConvTasNet_Libri3Mix_sepnoisy_16k" is a derivative of LibriSpeech ASR corpus by Vassil Panayotov,
used under CC BY 4.0; of The WSJ0 Hipster Ambient Mixtures
dataset by URL, used under CC BY-NC 4.0.
"ConvTasNet_Libri3Mix_sepnoisy_16k" is licensed under Attribution-ShareAlike 3.0 Unported by Joris Cosentino
|
[] |
[
"TAGS\n#audacity #region-us \n"
] |
[
10
] |
[
"passage: TAGS\n#audacity #region-us \n"
] |
[
-0.019914701581001282,
0.05779961124062538,
-0.010926702991127968,
-0.04279547929763794,
0.11446382105350494,
0.09448901563882828,
0.08029607683420181,
-0.022797605022788048,
0.14681506156921387,
-0.0162777379155159,
0.13710515201091766,
-0.07226235419511795,
-0.035463739186525345,
-0.033024512231349945,
0.005356237292289734,
-0.10074841976165771,
0.007150833960622549,
-0.033344678580760956,
0.08124350756406784,
0.02943301387131214,
-0.06437834352254868,
-0.08880753070116043,
0.011320039629936218,
-0.07026758790016174,
0.0007598965312354267,
0.09990371018648148,
0.02207607589662075,
-0.004762526135891676,
0.13075418770313263,
-0.0023671428207308054,
0.21611620485782623,
-0.0013131789164617658,
-0.1079963818192482,
-0.2589193284511566,
0.033793505281209946,
-0.0679452195763588,
-0.048759639263153076,
-0.004788506776094437,
0.032918594777584076,
-0.07716623693704605,
-0.05942558869719505,
0.1867903769016266,
0.027057260274887085,
0.057751528918743134,
-0.2933948040008545,
-0.1713157594203949,
-0.044318027794361115,
-0.11327271908521652,
0.05205332487821579,
0.06488128006458282,
-0.0021672029979526997,
0.12185084074735641,
-0.12281964719295502,
-0.029398392885923386,
-0.06945153325796127,
-0.20017586648464203,
0.039045337587594986,
0.030407333746552467,
0.020608393475413322,
0.18956594169139862,
-0.03880661353468895,
0.0656677708029747,
0.057296812534332275,
-0.004693426191806793,
-0.164091557264328,
-0.09644902497529984,
0.020190980285406113,
0.13890232145786285,
-0.046129148453474045,
-0.10711164772510529,
0.3522171974182129,
0.033933125436306,
-0.0067940689623355865,
0.17791517078876495,
-0.040265437215566635,
-0.05243963375687599,
0.03606535866856575,
-0.01806752011179924,
-0.013442175462841988,
0.13498176634311676,
0.1699664145708084,
0.005228137131780386,
-0.12744294106960297,
0.08720370382070541,
-0.19098959863185883,
0.15892720222473145,
-0.01449578907340765,
0.1160096526145935,
-0.2023066133260727,
-0.06133773550391197,
-0.11251486837863922,
-0.02664046175777912,
0.08569790422916412,
-0.04693326726555824,
-0.015202969312667847,
-0.029618682339787483,
-0.021353373304009438,
-0.037031061947345734,
0.07402480393648148,
0.19581420719623566,
-0.09090454876422882,
0.05096610262989998,
-0.054620373994112015,
0.15048635005950928,
0.07693548500537872,
0.12036288529634476,
0.11039042472839355,
0.011743704788386822,
-0.07727596908807755,
-0.11500803381204605,
0.007380517665296793,
-0.039774972945451736,
-0.07354630529880524,
0.013560472056269646,
-0.07746995985507965,
0.11021328717470169,
-0.02678333781659603,
-0.15352201461791992,
-0.165897935628891,
0.07333701103925705,
-0.04297696426510811,
-0.00013136850611772388,
-0.04918977990746498,
-0.056767988950014114,
0.059058595448732376,
0.11667335033416748,
-0.13396702706813812,
0.040163036435842514,
0.09581905603408813,
0.1339111030101776,
-0.13031519949436188,
-0.033091239631175995,
-0.013801677152514458,
0.05866626650094986,
0.052876412868499756,
-0.03441518172621727,
0.11390253156423569,
-0.12725137174129486,
0.004931151866912842,
-0.022264661267399788,
0.04970357194542885,
0.007532306015491486,
0.11499388515949249,
-0.010113149881362915,
0.05110105872154236,
-0.000953131471760571,
-0.029062580317258835,
-0.17668868601322174,
-0.10328592360019684,
0.04374127835035324,
0.013122226111590862,
0.009676720015704632,
-0.1804816722869873,
0.009606181643903255,
-0.06409380584955215,
0.12774814665317535,
-0.03378288820385933,
-0.08541708439588547,
0.014192468486726284,
0.22506332397460938,
0.05282241106033325,
0.1006854772567749,
-0.1724855601787567,
0.003599646035581827,
-0.019493067637085915,
0.21236157417297363,
-0.08220423012971878,
-0.08138676732778549,
0.10268660634756088,
-0.07840625196695328,
-0.11874464899301529,
0.07724273204803467,
0.038784634321928024,
0.025601524859666824,
0.07102761417627335,
0.29563266038894653,
-0.0731085017323494,
-0.15871800482273102,
0.0686788409948349,
0.13011281192302704,
-0.06023421138525009,
-0.12328873574733734,
0.09222980588674545,
-0.10513880103826523,
-0.17189328372478485,
0.010083271190524101,
0.232631117105484,
0.13491258025169373,
-0.08463052660226822,
-0.06943131983280182,
0.07306202501058578,
-0.012712272815406322,
0.020237427204847336,
0.059156838804483414,
0.037619348615407944,
-0.08038962632417679,
0.03823816403746605,
-0.19657887518405914,
0.004407462663948536,
0.1697572022676468,
0.01639886386692524,
-0.04909393563866615,
0.0716392993927002,
-0.03402747958898544,
0.031193703413009644,
-0.09878302365541458,
-0.07956214994192123,
-0.019728856161236763,
0.12571705877780914,
-0.003956906963139772,
0.1149866133928299,
0.05289863422513008,
-0.11335212737321854,
-0.026068631559610367,
-0.037253446877002716,
0.005051902495324612,
0.012232155539095402,
0.013084834441542625,
-0.023270800709724426,
0.16632267832756042,
-0.08196469396352768,
0.002268345793709159,
-0.055360496044158936,
-0.04146995767951012,
0.11897814273834229,
-0.053218524903059006,
0.09621988981962204,
-0.010789749212563038,
-0.017260460183024406,
0.07674842327833176,
0.045527976006269455,
0.034035854041576385,
0.07907819002866745,
-0.034830160439014435,
-0.0906984955072403,
0.08063743263483047,
-0.06693808734416962,
0.19321605563163757,
0.12368225306272507,
-0.19652535021305084,
-0.023579489439725876,
-0.009180226363241673,
0.005826315376907587,
-0.0018989959498867393,
0.108733631670475,
-0.024737512692809105,
0.012634341605007648,
0.04265842214226723,
0.01845341920852661,
0.04411933198571205,
0.06479719281196594,
-0.04094400256872177,
-0.028917506337165833,
-0.1058577373623848,
0.08972824364900589,
0.0709294006228447,
-0.01802927441895008,
0.0899100974202156,
0.5066919922828674,
0.09791268408298492,
0.15231642127037048,
-0.09199150651693344,
-0.02377757988870144,
0.004111617337912321,
-0.020663609728217125,
-0.009413091465830803,
0.16156020760536194,
-0.10640115290880203,
0.04137662798166275,
0.00969754345715046,
0.017358323559165,
0.03780204802751541,
-0.12463916838169098,
-0.112360380589962,
-0.030478879809379578,
0.06677917391061783,
-0.03942357376217842,
0.013831750489771366,
-0.07615257054567337,
0.04158969596028328,
0.09013422578573227,
-0.07165062427520752,
0.10189533233642578,
-0.024082178249955177,
-0.02381778694689274,
0.06311161816120148,
-0.17908406257629395,
-0.19444851577281952,
-0.06752840429544449,
-0.05548268184065819,
0.040680937469005585,
0.024625340476632118,
-0.005136317107826471,
-0.13864435255527496,
-0.02908422239124775,
0.08805709332227707,
0.05729407072067261,
-0.15221236646175385,
0.010241538286209106,
0.018007883802056313,
0.08447563648223877,
-0.05700908228754997,
0.04972472041845322,
-0.02590595744550228,
-0.09138061106204987,
0.002512522740289569,
0.08773497492074966,
-0.10353470593690872,
0.1390758603811264,
0.1917080581188202,
0.10454132407903671,
0.025493783876299858,
0.010675451718270779,
0.18258953094482422,
-0.15105485916137695,
-0.1304367035627365,
0.07714436948299408,
-0.11734716594219208,
0.044962577521800995,
0.2233843058347702,
0.031684380024671555,
-0.10463154315948486,
-0.003469983348622918,
-0.04676547273993492,
-0.12693078815937042,
-0.18226036429405212,
-0.060958318412303925,
-0.09744668006896973,
0.20076002180576324,
0.0012977890437468886,
0.056443847715854645,
-0.05492844432592392,
0.002865558722987771,
0.12135601043701172,
-0.08794277161359787,
-0.08902571350336075,
-0.03130508214235306,
0.23185576498508453,
-0.07907517999410629,
-0.016707219183444977,
-0.0813768282532692,
-0.06817937642335892,
0.10927245765924454,
0.1784239113330841,
0.042485110461711884,
0.25141242146492004,
-0.01529120746999979,
0.05956966057419777,
0.032707493752241135,
0.11268060654401779,
0.03830089792609215,
0.056737031787633896,
-0.05229474976658821,
-0.036017950624227524,
0.033086199313402176,
-0.03215155377984047,
0.06188979372382164,
0.09052561223506927,
-0.22288063168525696,
-0.00018355542852077633,
-0.1874006688594818,
0.07674342393875122,
-0.13645312190055847,
0.16382482647895813,
0.04487433284521103,
0.07254045456647873,
0.11432640254497528,
-0.006908687297254801,
-0.048144567757844925,
0.16749441623687744,
0.041044630110263824,
-0.08392536640167236,
0.007957840338349342,
0.040108852088451385,
0.09385466575622559,
-0.05542761832475662,
0.10823865979909897,
-0.10375513881444931,
-0.16861197352409363,
0.015666499733924866,
0.01980459690093994,
-0.19648192822933197,
0.25793322920799255,
-0.004483161959797144,
-0.14627836644649506,
0.03969166427850723,
-0.08022485673427582,
0.016199585050344467,
0.1742914468050003,
0.07198935747146606,
0.06765776872634888,
-0.09778467565774918,
-0.07762656360864639,
0.06099579483270645,
0.028531787917017937,
0.1277051717042923,
0.02034187503159046,
-0.11804807186126709,
-0.0028785166796296835,
0.05894226208329201,
-0.04242273047566414,
0.11712394654750824,
-0.06841699779033661,
-0.09328094869852066,
-0.006683433894068003,
0.09414009749889374,
-0.03853561729192734,
-0.0058347852900624275,
0.04749997332692146,
0.002058602636680007,
-0.10418712347745895,
0.07030697166919708,
0.024848802015185356,
-0.06271625310182571,
-0.1682130992412567,
0.07563578337430954,
-0.04838734492659569,
0.004263483453541994,
-0.09165068715810776,
-0.15208227932453156,
-0.10194677114486694,
-0.05322356894612312,
0.11349347978830338,
-0.036217570304870605,
0.10298572480678558,
-0.08526270091533661,
0.0755581259727478,
-0.053325023502111435,
0.045333556830883026,
-0.06773395091295242,
0.06607968360185623,
-0.0479123480618,
-0.07695187628269196,
0.13829563558101654,
-0.2894185483455658,
-0.055552780628204346,
0.14189426600933075,
0.016879206523299217,
-0.006479946430772543,
0.018798980861902237,
-0.11741306632757187,
0.19689679145812988,
0.35578593611717224,
0.03385046496987343,
0.13395942747592926,
0.2187308818101883,
-0.06228160485625267,
-0.30797863006591797,
-0.07810231298208237,
-0.2287967950105667,
-0.055851470679044724,
0.1369660198688507,
-0.22752989828586578,
0.07691747695207596,
0.05305289477109909,
-0.08654773980379105,
0.30219292640686035,
-0.17917947471141815,
-0.016062403097748756,
0.17774532735347748,
-0.04481431469321251,
0.5816458463668823,
-0.1430119127035141,
-0.09836291521787643,
-0.04522782191634178,
-0.04539519175887108,
0.03978416696190834,
0.03879636898636818,
0.04079165682196617,
-0.006589636206626892,
0.02758682891726494,
0.03617465868592262,
0.01859908364713192,
0.18490567803382874,
0.020886186510324478,
0.03680313378572464,
-0.041644684970378876,
-0.13934871554374695,
0.10013005882501602,
0.004688136279582977,
-0.16385233402252197,
0.03643302246928215,
-0.057321518659591675,
-0.15089042484760284,
0.034729450941085815,
-0.0546313039958477,
-0.015037068165838718,
0.05461987853050232,
-0.05651901289820671,
-0.0633925274014473,
0.033461444079875946,
-0.12169618904590607,
-0.013170113787055016,
0.24228917062282562,
-0.0388687439262867,
0.1350758671760559,
-0.034903425723314285,
-0.008200401440262794,
-0.15104445815086365,
0.12760908901691437,
-0.02976180799305439,
-0.02488657645881176,
0.07498238980770111,
-0.10600040853023529,
0.04614400118589401,
0.13278955221176147,
-0.0745481625199318,
0.06773654371500015,
0.07300273329019547,
-0.05185369774699211,
0.0395653061568737,
0.1801140308380127,
-0.17189538478851318,
-0.09801355749368668,
-0.040786437690258026,
-0.023585941642522812,
0.12948420643806458,
-0.039507605135440826,
0.06705755740404129,
0.15279722213745117,
0.03767872974276543,
0.0034349565394222736,
-0.02973589114844799,
-0.10049909353256226,
-0.055739518254995346,
0.07789833098649979,
-0.011081283912062645,
-0.01755240373313427,
0.13556931912899017,
0.0766322985291481,
-0.18439364433288574,
-0.07496325671672821,
0.17362961173057556,
-0.023611171171069145,
-0.047512587159872055,
-0.2628113627433777,
0.1377924531698227,
-0.1507900506258011,
-0.052750129252672195,
0.012320579029619694,
-0.06474464386701584,
0.009235239587724209,
0.07726151496171951,
0.020442979410290718,
0.1039549708366394,
0.04586785286664963,
-0.04472675174474716,
0.14837181568145752,
-0.037746720016002655,
-0.12706628441810608,
-0.06064179912209511,
-0.06727295368909836,
-0.294737845659256,
-0.005672522354871035,
0.1548699289560318,
-0.08197496831417084,
-0.12423773854970932,
-0.24520590901374817,
0.12861491739749908,
-0.21700723469257355,
-0.09412039816379547,
-0.012572778388857841,
-0.06106030195951462,
0.0524301715195179,
-0.04540259391069412,
-0.04581892862915993,
-0.08922511339187622,
-0.1523037999868393,
0.047759898006916046,
0.08909787982702255,
0.045745931565761566,
0.04709434509277344,
-0.03490961343050003,
0.16961872577667236,
0.004899878520518541,
0.13295607268810272,
0.055445946753025055,
-0.00479267118498683,
0.16752395033836365,
-0.14760875701904297,
-0.05735689401626587,
0.07530035823583603,
-0.023682797327637672,
0.0017639723373576999,
0.16693679988384247,
-0.07734557241201401,
-0.000007220610314107034,
-0.03501299396157265,
0.06335780024528503,
-0.08348127454519272,
-0.06540147960186005,
-0.057089708745479584,
0.07074878364801407,
-0.25006598234176636,
0.025473104789853096,
-0.16761502623558044,
0.08602650463581085,
0.007795935962349176,
0.07143428176641464,
0.06677111238241196,
0.08822675794363022,
0.06124147027730942,
0.007646333891898394,
0.057723771780729294,
-0.12122171372175217,
-0.022483740001916885,
-0.07886261492967606,
-0.033710844814777374,
0.023804524913430214,
0.30653688311576843,
-0.07920853793621063,
-0.014961563050746918,
0.05315486714243889,
0.11362864077091217,
-0.06990054994821548,
0.013614371418952942,
0.092718206346035,
0.14012984931468964,
-0.0909809097647667,
-0.16764149069786072,
0.02885504998266697,
-0.017832137644290924,
-0.06434269994497299,
0.06304419785737991,
0.06157404184341431,
0.10324247181415558,
0.053672704845666885,
-0.0491168387234211,
0.043900877237319946,
0.10597001016139984,
-0.1515667885541916,
0.01875554956495762,
0.028657129034399986,
0.023451080545783043,
0.05253911390900612,
0.14387473464012146,
0.000058224024542141706,
0.039346564561128616,
-0.15546248853206635,
0.03540085256099701,
-0.10178782045841217,
0.07718528807163239,
0.03077881969511509,
-0.10134410858154297,
0.062202394008636475,
-0.014101392589509487,
-0.055623333901166916,
0.22002260386943817,
-0.0002508058096282184,
-0.02086701989173889,
0.09431373327970505,
-0.005971777718514204,
-0.08670125156641006,
0.014523470774292946,
-0.015617771074175835,
0.07089854031801224,
-0.03289302811026573,
-0.06309652328491211,
-0.12571631371974945,
-0.07492214441299438,
-0.10473018884658813,
0.028465088456869125,
-0.12114037573337555,
-0.06409977376461029,
-0.18488146364688873,
-0.0567828044295311,
-0.0386798121035099,
0.11850044876337051,
-0.07272458076477051,
0.061199087649583817,
-0.026673195883631706,
0.05379723384976387,
0.02941185235977173,
0.14421489834785461,
0.031015651300549507,
0.03114180639386177,
-0.0492279939353466,
0.06219583749771118,
-0.07069549709558487,
0.12538745999336243,
-0.13330423831939697,
-0.003643192583695054,
-0.015901105478405952,
0.19567430019378662,
0.1948401927947998,
-0.18277131021022797,
-0.02313648723065853,
0.02438260056078434,
0.05870823189616203,
0.06896509230136871,
0.11868225038051605,
0.027844084426760674,
0.18441569805145264,
-0.08691275864839554,
-0.001120557775720954,
-0.03341221436858177,
0.030974136665463448,
-0.003978518303483725,
0.05126040428876877,
0.09393976628780365,
0.020366651937365532,
-0.14858315885066986,
0.15922841429710388,
-0.21365763247013092,
0.16002105176448822,
0.11827868968248367,
-0.23527050018310547,
-0.043867696076631546,
-0.0864512100815773,
0.1468331664800644,
-0.04737457260489464,
0.12037485837936401,
-0.07547707110643387,
-0.17954730987548828,
-0.21946834027767181,
0.016171986237168312,
-0.2007199376821518,
-0.1921381801366806,
0.09607909619808197,
0.0865628719329834,
0.0713336318731308,
-0.030302181839942932,
-0.06979642063379288,
0.025887304916977882,
0.0752004012465477,
0.006023000925779343,
0.02530483342707157,
0.055240485817193985,
0.057483311742544174,
-0.24160082638263702,
-0.04059159755706787,
0.03847852349281311,
-0.048469554632902145,
0.08004598319530487,
0.04327286407351494,
-0.004299846012145281,
0.004393884912133217,
-0.05251950025558472,
0.08323945105075836,
0.06287918239831924,
-0.13486455380916595,
0.04133152216672897,
0.00035113628837279975,
0.05223225802183151,
-0.017445862293243408,
0.015243667177855968,
-0.06019064411520958,
0.03403530269861221,
-0.113582544028759,
-0.11144260317087173,
0.14682872593402863,
-0.03460019454360008,
0.21851785480976105,
-0.05222781375050545,
-0.13789592683315277,
0.05361998453736305,
-0.06828869879245758,
0.19617073237895966,
-0.09600894153118134,
0.06534767895936966,
0.0801374763250351,
-0.0034332021605223417,
0.013789433054625988,
-0.25245577096939087,
0.10257025063037872,
-0.02069946750998497,
0.03494172915816307,
-0.06150796636939049
] |
null | null | null |
This is an Audacity wrapper for the model, forked from the repository mpariente/ConvTasNet_WHAM_sepclean,
This model was trained using the Asteroid library: https://github.com/asteroid-team/asteroid.
The following info was copied from `mpariente/ConvTasNet_WHAM_sepclean`:
### Description:
This model was trained by Manuel Pariente
using the wham/ConvTasNet recipe in [Asteroid](https://github.com/asteroid-team/asteroid).
It was trained on the `sep_clean` task of the WHAM! dataset.
### Training config:
```yaml
data:
n_src: 2
mode: min
nondefault_nsrc: None
sample_rate: 8000
segment: 3
task: sep_clean
train_dir: data/wav8k/min/tr/
valid_dir: data/wav8k/min/cv/
filterbank:
kernel_size: 16
n_filters: 512
stride: 8
main_args:
exp_dir: exp/wham
gpus: -1
help: None
masknet:
bn_chan: 128
hid_chan: 512
mask_act: relu
n_blocks: 8
n_repeats: 3
n_src: 2
skip_chan: 128
optim:
lr: 0.001
optimizer: adam
weight_decay: 0.0
positional arguments:
training:
batch_size: 24
early_stop: True
epochs: 200
half_lr: True
num_workers: 4
```
### Results:
```yaml
si_sdr: 16.21326632846293
si_sdr_imp: 16.21441705664987
sdr: 16.615180021738933
sdr_imp: 16.464137807433435
sir: 26.860503975131923
sir_imp: 26.709461760826414
sar: 17.18312813480803
sar_imp: -131.99332048277296
stoi: 0.9619940905157323
stoi_imp: 0.2239480672473015
```
### License notice:
This work "ConvTasNet_WHAM!_sepclean" is a derivative of [CSR-I (WSJ0) Complete](https://catalog.ldc.upenn.edu/LDC93S6A)
by [LDC](https://www.ldc.upenn.edu/), used under [LDC User Agreement for
Non-Members](https://catalog.ldc.upenn.edu/license/ldc-non-members-agreement.pdf) (Research only).
"ConvTasNet_WHAM!_sepclean" is licensed under [Attribution-ShareAlike 3.0 Unported](https://creativecommons.org/licenses/by-sa/3.0/)
by Manuel Pariente.
|
{"tags": ["audacity"], "inference": false}
| null |
hugggof/ConvTasNet_WHAM_sepclean
|
[
"audacity",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#audacity #region-us
|
This is an Audacity wrapper for the model, forked from the repository mpariente/ConvTasNet_WHAM_sepclean,
This model was trained using the Asteroid library: URL
The following info was copied from 'mpariente/ConvTasNet_WHAM_sepclean':
### Description:
This model was trained by Manuel Pariente
using the wham/ConvTasNet recipe in Asteroid.
It was trained on the 'sep_clean' task of the WHAM! dataset.
### Training config:
### Results:
### License notice:
This work "ConvTasNet_WHAM!_sepclean" is a derivative of CSR-I (WSJ0) Complete
by LDC, used under LDC User Agreement for
Non-Members (Research only).
"ConvTasNet_WHAM!_sepclean" is licensed under Attribution-ShareAlike 3.0 Unported
by Manuel Pariente.
|
[
"### Description:\nThis model was trained by Manuel Pariente \nusing the wham/ConvTasNet recipe in Asteroid.\nIt was trained on the 'sep_clean' task of the WHAM! dataset.",
"### Training config:",
"### Results:",
"### License notice:\nThis work \"ConvTasNet_WHAM!_sepclean\" is a derivative of CSR-I (WSJ0) Complete\nby LDC, used under LDC User Agreement for \nNon-Members (Research only). \n\"ConvTasNet_WHAM!_sepclean\" is licensed under Attribution-ShareAlike 3.0 Unported\nby Manuel Pariente."
] |
[
"TAGS\n#audacity #region-us \n",
"### Description:\nThis model was trained by Manuel Pariente \nusing the wham/ConvTasNet recipe in Asteroid.\nIt was trained on the 'sep_clean' task of the WHAM! dataset.",
"### Training config:",
"### Results:",
"### License notice:\nThis work \"ConvTasNet_WHAM!_sepclean\" is a derivative of CSR-I (WSJ0) Complete\nby LDC, used under LDC User Agreement for \nNon-Members (Research only). \n\"ConvTasNet_WHAM!_sepclean\" is licensed under Attribution-ShareAlike 3.0 Unported\nby Manuel Pariente."
] |
[
10,
49,
6,
4,
85
] |
[
"passage: TAGS\n#audacity #region-us \n### Description:\nThis model was trained by Manuel Pariente \nusing the wham/ConvTasNet recipe in Asteroid.\nIt was trained on the 'sep_clean' task of the WHAM! dataset.### Training config:### Results:### License notice:\nThis work \"ConvTasNet_WHAM!_sepclean\" is a derivative of CSR-I (WSJ0) Complete\nby LDC, used under LDC User Agreement for \nNon-Members (Research only). \n\"ConvTasNet_WHAM!_sepclean\" is licensed under Attribution-ShareAlike 3.0 Unported\nby Manuel Pariente."
] |
[
-0.028535686433315277,
0.04354986548423767,
-0.0022126073017716408,
0.05914982035756111,
0.05470456928014755,
-0.014612616039812565,
0.0833859071135521,
-0.03743501380085945,
-0.04361344128847122,
-0.0453924685716629,
0.07727338373661041,
0.03065570443868637,
-0.012689745053648949,
-0.004231524653732777,
-0.007826106622815132,
-0.0856328085064888,
0.05369402840733528,
-0.06387270241975784,
-0.08888506889343262,
0.02162902057170868,
0.008600348606705666,
-0.09807249158620834,
0.0642460286617279,
-0.010655258782207966,
-0.08046283572912216,
0.033288970589637756,
-0.012587372213602066,
-0.09757792949676514,
0.10622967034578323,
-0.037286996841430664,
0.20157065987586975,
0.11256682872772217,
0.06670292466878891,
-0.11351408064365387,
0.022612228989601135,
-0.10139311850070953,
-0.0497891828417778,
0.04789964482188225,
0.005780242849141359,
0.08093800395727158,
0.1318502128124237,
0.0988389253616333,
-0.00960101280361414,
0.01437082327902317,
-0.15925879776477814,
0.01579897105693817,
-0.13400398194789886,
0.012150187976658344,
0.067515529692173,
0.02883654646575451,
0.04197552427649498,
0.08661139011383057,
-0.14391078054904938,
-0.0226316899061203,
-0.04325438290834427,
-0.2914634346961975,
0.012288026511669159,
0.12870945036411285,
-0.06273030489683151,
-0.014157482422888279,
0.02660367451608181,
0.02234046906232834,
0.12658601999282837,
0.006913141347467899,
0.035668835043907166,
-0.0913025364279747,
-0.10129458457231522,
0.041961442679166794,
-0.08429088443517685,
-0.10003875941038132,
0.34591588377952576,
0.02629004791378975,
-0.08806581050157547,
0.12934161722660065,
-0.058760348707437515,
-0.044075485318899155,
0.023863060399889946,
0.017975499853491783,
-0.002106265164911747,
0.025751736015081406,
-0.004446829669177532,
-0.012546218000352383,
-0.12834355235099792,
-0.0940704271197319,
-0.04271789267659187,
0.1913672238588333,
-0.009941433556377888,
0.037035368382930756,
-0.1052059531211853,
0.12586809694766998,
-0.02491786889731884,
-0.09068455547094345,
0.02433590777218342,
-0.11975663900375366,
0.0915602296590805,
-0.008735760115087032,
-0.025038432329893112,
-0.15888698399066925,
0.1104206070303917,
0.026079455390572548,
-0.02314203791320324,
-0.013144860975444317,
0.06807781010866165,
0.10009898245334625,
0.07799132913351059,
0.021181251853704453,
0.07872080057859421,
0.002344086766242981,
0.048641953617334366,
0.008119916543364525,
0.019780075177550316,
0.016355562955141068,
-0.10828462243080139,
-0.04849689453840256,
-0.02973182685673237,
0.009234747849404812,
0.0012875553220510483,
-0.02479909546673298,
0.015325237065553665,
-0.0003837970725726336,
0.2517165541648865,
-0.012728201225399971,
-0.04612936079502106,
-0.02167738601565361,
-0.04986567422747612,
-0.04229568690061569,
0.07494889199733734,
0.045982662588357925,
0.09331765025854111,
0.015358216129243374,
-0.07591031491756439,
-0.016138669103384018,
-0.09511719644069672,
-0.0395624004304409,
0.029532497748732567,
0.1053515300154686,
0.01751112751662731,
-0.2088826596736908,
-0.14293891191482544,
-0.06237925961613655,
0.01991790346801281,
0.055610913783311844,
-0.03730430454015732,
-0.04227215051651001,
-0.010593234561383724,
-0.013609298504889011,
-0.029049154371023178,
-0.13293717801570892,
-0.04868580028414726,
0.051671918481588364,
0.009207839146256447,
0.04584106430411339,
-0.21723312139511108,
0.00921799335628748,
-0.13002368807792664,
0.10549955070018768,
-0.13248637318611145,
-0.05088840425014496,
-0.062050167471170425,
0.16062718629837036,
-0.01069286372512579,
-0.018216179683804512,
-0.13681064546108246,
-0.0036667378153651953,
0.06349296867847443,
0.19611378014087677,
-0.18201947212219238,
0.008050384931266308,
0.09282945096492767,
-0.0921408161520958,
-0.13394638895988464,
0.026873566210269928,
-0.06321696192026138,
0.12527859210968018,
0.019341813400387764,
0.12550801038742065,
0.06122376769781113,
0.04056267440319061,
-0.0543501153588295,
-0.10085432231426239,
0.009676419198513031,
-0.15568310022354126,
0.06342130899429321,
-0.05708322301506996,
-0.18318532407283783,
-0.01360008679330349,
-0.037208836525678635,
0.06858986616134644,
0.007855353876948357,
-0.10659608244895935,
-0.0210903137922287,
-0.09615859389305115,
0.057045187801122665,
-0.07553108781576157,
0.08989450335502625,
-0.021291373297572136,
0.07298344373703003,
0.007968306541442871,
0.1151764988899231,
-0.03755846247076988,
0.03365204483270645,
-0.023959297686815262,
0.10126291960477829,
-0.07603275030851364,
-0.037621378898620605,
-0.07467070966959,
0.005412573926150799,
0.01978399232029915,
0.11743783205747604,
0.10466968268156052,
0.04870910197496414,
0.023448152467608452,
0.06295392662286758,
0.03964174911379814,
-0.07510210573673248,
-0.12704382836818695,
0.05229676142334938,
0.05234280973672867,
-0.06105044111609459,
-0.02732851542532444,
-0.07615231722593307,
0.11288631707429886,
-0.16629131138324738,
0.017461398616433144,
-0.01926528476178646,
-0.09607934206724167,
0.04128078371286392,
0.04563160985708237,
0.034842293709516525,
0.09763564169406891,
-0.07476874440908432,
0.046686697751283646,
0.015483351424336433,
-0.002248851815238595,
-0.09414751827716827,
-0.09366856515407562,
-0.04097525775432587,
-0.010164416395127773,
0.11575428396463394,
-0.19452902674674988,
-0.058247197419404984,
0.005635246634483337,
-0.014064042828977108,
-0.005261112004518509,
-0.02505546621978283,
0.05449135601520538,
0.09613348543643951,
-0.057410553097724915,
0.09307079762220383,
-0.017213836312294006,
0.13692452013492584,
-0.04556526616215706,
-0.13005156815052032,
-0.0489511601626873,
0.009057578630745411,
0.20172405242919922,
-0.005089568439871073,
0.043810613453388214,
0.2508389949798584,
-0.10249853134155273,
0.07326346635818481,
-0.019049623981118202,
-0.020433491095900536,
-0.0222493764013052,
0.037726860493421555,
0.020597925409674644,
0.13660137355327606,
-0.04894259199500084,
-0.00016204657731577754,
0.011136556975543499,
-0.04126643389463425,
0.07064211368560791,
-0.15671105682849884,
-0.1053914725780487,
0.056990545243024826,
0.021758131682872772,
-0.08986496180295944,
-0.034657008945941925,
-0.1314939558506012,
0.022945966571569443,
-0.021761493757367134,
-0.18093405663967133,
-0.0032626716420054436,
0.01895017921924591,
-0.04469158500432968,
0.10838816314935684,
-0.04551176726818085,
0.006041509564965963,
-0.1749698966741562,
0.008108973503112793,
0.006533705163747072,
0.08170520514249802,
0.04374881833791733,
-0.07222454249858856,
-0.038487426936626434,
-0.05347912013530731,
-0.0790998786687851,
-0.016831645742058754,
-0.026006057858467102,
0.03634614497423172,
0.03336447477340698,
-0.0310505460947752,
-0.037926118820905685,
-0.03374061360955238,
-0.026326019316911697,
-0.03238384798169136,
0.11689269542694092,
-0.050735652446746826,
0.2195848971605301,
0.2115897834300995,
0.004943582694977522,
-0.019483657553792,
-0.0012178143952041864,
0.15565519034862518,
-0.03506752476096153,
-0.039119504392147064,
0.1834738403558731,
-0.0271717868745327,
-0.0020568568725138903,
0.11159493029117584,
0.09154163300991058,
-0.03517748787999153,
0.053548235446214676,
-0.16393733024597168,
-0.14359597861766815,
-0.17346593737602234,
-0.10531844198703766,
-0.005352567881345749,
0.020106801763176918,
-0.048009369522333145,
0.028164027258753777,
0.13514988124370575,
0.15547005832195282,
0.06050141900777817,
0.058487989008426666,
-0.07623205333948135,
0.03954414278268814,
0.09145909547805786,
0.019456611946225166,
0.017412198707461357,
-0.09247229993343353,
-0.07576138526201248,
0.045644063502550125,
0.05539780855178833,
0.1450820118188858,
0.1663421392440796,
0.029446838423609734,
0.07887855917215347,
0.08288763463497162,
0.04148261994123459,
0.0705748125910759,
0.019063927233219147,
0.007222681771963835,
-0.0446372851729393,
-0.09359889477491379,
-0.10843785107135773,
0.117416150867939,
-0.027290279045701027,
-0.053545933216810226,
-0.04476284608244896,
0.008041427470743656,
0.029960716143250465,
-0.017724918201565742,
0.07420873641967773,
-0.1620163917541504,
-0.017639772966504097,
0.09992586821317673,
0.1184578537940979,
0.047411274164915085,
0.04400220513343811,
0.05074864625930786,
-0.016290953382849693,
-0.01838327758014202,
0.06659701466560364,
0.042558152228593826,
-0.053023602813482285,
0.03162693977355957,
-0.06468226760625839,
-0.10026337951421738,
-0.009996926411986351,
-0.010252832435071468,
-0.12907172739505768,
0.1776675283908844,
-0.022240711376070976,
-0.032700128853321075,
0.083132304251194,
-0.008272780105471611,
-0.0028091217391192913,
0.23002730309963226,
0.05916691944003105,
-0.024022389203310013,
-0.007421916350722313,
-0.05205133557319641,
-0.11211482435464859,
0.025130130350589752,
-0.05906950309872627,
-0.027592040598392487,
0.0929107815027237,
0.04754962772130966,
0.030660880729556084,
0.020930152386426926,
0.15838146209716797,
-0.21235474944114685,
0.03341042995452881,
-0.05533559247851372,
0.014120043255388737,
0.10729184001684189,
-0.0541113018989563,
-0.014119839295744896,
-0.1662239134311676,
0.038969144225120544,
-0.022336682304739952,
-0.01894078217446804,
-0.044316742569208145,
-0.12183783948421478,
0.02179485373198986,
-0.023616980761289597,
0.06450098752975464,
0.04921797290444374,
-0.1690230518579483,
-0.030310219153761864,
-0.08488374203443527,
0.07016010582447052,
-0.040291547775268555,
-0.018149973824620247,
-0.03247829154133797,
-0.0031118434853851795,
0.01032361388206482,
0.10433738678693771,
0.01734290085732937,
0.010483141988515854,
-0.0326155461370945,
-0.11212974041700363,
-0.03972271457314491,
0.0738702118396759,
0.02487519569694996,
0.02139383926987648,
-0.022991567850112915,
-0.13148050010204315,
-0.022672146558761597,
-0.007933706976473331,
0.19373415410518646,
0.24793994426727295,
-0.058128856122493744,
0.010182271711528301,
0.10737583786249161,
-0.05426487326622009,
-0.17462079226970673,
-0.048003409057855606,
-0.09848224371671677,
0.026878220960497856,
0.033639565110206604,
-0.1961280107498169,
0.031235501170158386,
0.10479173064231873,
-0.0598042830824852,
0.02731054648756981,
-0.3289063572883606,
-0.05538851395249367,
0.12587633728981018,
0.06712834537029266,
0.2947027385234833,
-0.10384280234575272,
-0.066923126578331,
-0.08495642989873886,
-0.1632157564163208,
0.07485200464725494,
-0.09556134790182114,
0.04024558886885643,
-0.03299926593899727,
0.08738895505666733,
0.022282950580120087,
-0.040203530341386795,
0.1748700588941574,
0.030409526079893112,
0.10452799499034882,
-0.03685365989804268,
-0.10082363337278366,
0.11215338110923767,
-0.019803930073976517,
0.10886169970035553,
0.13030710816383362,
0.005608216859400272,
-0.14963583648204803,
-0.009627840481698513,
-0.03858990967273712,
0.07420794665813446,
-0.021956492215394974,
-0.08832836896181107,
-0.04077301546931267,
-0.01564440317451954,
-0.015967553481459618,
0.05016006529331207,
0.21361155807971954,
0.020030830055475235,
-0.02352551557123661,
0.14891210198402405,
0.10287504643201828,
0.05594347044825554,
0.0877954512834549,
0.037690166383981705,
-0.04610631242394447,
0.057029157876968384,
-0.21153207123279572,
-0.04326876997947693,
0.0493856780230999,
0.03002075105905533,
0.02737601101398468,
0.0796632319688797,
-0.07444792985916138,
0.09500306844711304,
0.13285018503665924,
-0.2088470757007599,
-0.014690089970827103,
-0.008299828507006168,
0.07060515880584717,
0.08992696553468704,
0.10987699031829834,
0.09092980623245239,
-0.14643239974975586,
0.022235123440623283,
-0.004441533703356981,
-0.07275965064764023,
-0.09144353866577148,
0.03787277638912201,
0.19788996875286102,
-0.04250754788517952,
-0.046006638556718826,
0.07488536089658737,
0.12872345745563507,
0.06542641669511795,
-0.026276130229234695,
-0.09123121201992035,
-0.008317919448018074,
-0.105471171438694,
-0.12129387259483337,
0.04658415541052818,
-0.18442955613136292,
-0.13859598338603973,
-0.08844044059515,
-0.06147787719964981,
-0.024931486696004868,
0.061198506504297256,
0.0984153151512146,
0.026296932250261307,
-0.019706306979060173,
0.0199956763535738,
0.018887342885136604,
-0.03769877925515175,
-0.10070051997900009,
0.11611524969339371,
-0.139809712767601,
0.09656790643930435,
-0.06935182958841324,
0.040935590863227844,
-0.04425640031695366,
-0.04225096106529236,
-0.038050659000873566,
0.06311065703630447,
-0.1688886135816574,
-0.03811614215373993,
-0.09895165264606476,
-0.01643005572259426,
0.001590158324688673,
0.057526908814907074,
-0.02697405032813549,
0.02096696011722088,
-0.08597125858068466,
0.0520717017352581,
0.0020779240876436234,
0.03420976549386978,
-0.059033602476119995,
0.02698967233300209,
0.03140581026673317,
-0.029369480907917023,
0.00892416387796402,
-0.015255208127200603,
0.03139297291636467,
0.046772170811891556,
-0.045403577387332916,
0.0286637581884861,
0.10043143481016159,
0.03300842270255089,
0.06266307830810547,
0.05550511181354523,
-0.043087247759103775,
-0.04181094095110893,
-0.07288980484008789,
-0.015275634825229645,
0.150539368391037,
-0.03400561213493347,
-0.04479123279452324,
0.09480290859937668,
-0.07703623175621033,
-0.005064740777015686,
-0.02731912024319172,
0.08190061151981354,
0.05110333114862442,
0.13593409955501556,
0.01768188551068306,
-0.03104228898882866,
-0.08674228936433792,
-0.02574653923511505,
0.01257680170238018,
-0.0257803276181221,
-0.11020484566688538,
-0.034576017409563065,
0.0036025159060955048,
0.0013711251085624099,
0.08337587118148804,
0.00135800801217556,
-0.04883548244833946,
-0.08732111752033234,
0.06771128624677658,
0.10213258117437363,
0.02761104330420494,
0.24987994134426117,
0.09747033566236496,
-0.008237474597990513,
-0.06363545358181,
0.0807308703660965,
0.0885079875588417,
0.021105170249938965,
0.1651238203048706,
-0.016108719632029533,
-0.0800880640745163,
0.045709189027547836,
0.059363968670368195,
0.005179421976208687,
0.054593078792095184,
-0.15867459774017334,
-0.002952614100649953,
0.012539317831397057,
-0.04372416436672211,
0.05461004748940468,
0.08970040082931519,
-0.02864394709467888,
0.0022534795571118593,
0.032379548996686935,
-0.034199342131614685,
-0.07014678418636322,
-0.07852404564619064,
-0.04844200983643532,
-0.08897384256124496,
0.03554564714431763,
-0.0733514055609703,
-0.10210759192705154,
0.12099029868841171,
-0.09120429307222366,
-0.05839518830180168,
0.15761619806289673,
0.005658267065882683,
-0.015379025600850582,
0.021613508462905884,
-0.018313884735107422,
-0.06998008489608765,
-0.09043469280004501,
-0.025265786796808243,
-0.05581969767808914,
-0.00034481287002563477,
-0.022754119709134102,
0.05407267063856125,
0.003097068751230836,
0.0012711652088910341,
0.01943155936896801,
-0.025430506095290184,
-0.07713954895734787,
0.038080595433712006,
0.012721071019768715,
0.005436737090349197,
0.05713331326842308,
0.00028081517666578293,
0.05059444159269333,
0.13043241202831268,
-0.01290036365389824,
-0.03613324463367462,
-0.057751890271902084,
0.083071269094944,
0.056389883160591125,
0.08640839904546738,
0.013671409338712692,
-0.12091672420501709,
0.032301951199769974,
0.07919038087129593,
0.22164171934127808,
-0.1431424915790558,
0.06439877301454544,
-0.0405237078666687,
0.02471749670803547,
-0.06479497253894806,
0.1593046933412552,
-0.017333529889583588,
0.07570979744195938,
-0.049160826951265335,
-0.07985491305589676,
-0.15105730295181274,
0.0176090020686388,
-0.06273580342531204,
0.05261151120066643,
0.011236591264605522,
-0.09485503286123276,
-0.07914233207702637,
0.1116572842001915,
-0.1430240422487259,
0.08569955825805664,
-0.035742953419685364,
-0.07468204945325851,
-0.0538196861743927,
0.007416931446641684,
0.09041373431682587,
0.024070262908935547,
0.02531118132174015,
-0.13092362880706787,
-0.026169881224632263,
0.027605939656496048,
0.033367056399583817,
-0.07398702204227448,
0.0391792468726635,
0.13899986445903778,
0.009495166130363941,
0.153911292552948,
0.054592568427324295,
0.07061776518821716,
-0.008473500609397888,
0.06071953475475311,
-0.046108637005090714,
0.18738283216953278,
0.06291120499372482,
0.05297958478331566,
-0.057445138692855835,
-0.10057613253593445,
0.029919458553195,
0.08281884342432022,
0.003813024377450347,
-0.020148636773228645,
0.059738293290138245,
0.20949958264827728,
0.05428796634078026,
-0.06807390600442886,
0.0007859329925850034,
-0.08345988392829895,
0.1066650003194809,
0.02822888270020485,
-0.06648648530244827,
0.027108725160360336,
-0.032934535294771194,
0.04416755214333534,
0.03486025333404541,
-0.01618511974811554,
-0.0956811010837555,
-0.027692638337612152,
-0.013055350631475449,
-0.07047244906425476,
0.011390740051865578,
-0.08850529789924622,
-0.0391521193087101,
-0.09493311494588852,
0.03537774831056595,
-0.010473043657839298,
0.006325382739305496,
0.10618453472852707,
-0.00744493305683136,
-0.02083033137023449,
0.21777939796447754,
-0.026796529069542885,
0.06863440573215485,
-0.08914057910442352,
-0.10602302849292755
] |
null | null | null |
## Music Source Separation in the Waveform Domain
This is the Demucs model, serialized from Facebook Research's pretrained models.
From Facebook research:
Demucs is based on U-Net convolutional architecture inspired by Wave-U-Net and SING, with GLUs, a BiLSTM between the encoder and decoder, specific initialization of weights and transposed convolutions in the decoder.
This is the `demucs_extra` version, meaning that is was trained on the MusDB dataset, along with 150 extra songs of data.
See [facebookresearch's repository](https://github.com/facebookresearch/demucs) for more information on Demucs.
|
{"tags": "audacity"}
| null |
hugggof/demucs_extra
|
[
"audacity",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#audacity #region-us
|
## Music Source Separation in the Waveform Domain
This is the Demucs model, serialized from Facebook Research's pretrained models.
From Facebook research:
Demucs is based on U-Net convolutional architecture inspired by Wave-U-Net and SING, with GLUs, a BiLSTM between the encoder and decoder, specific initialization of weights and transposed convolutions in the decoder.
This is the 'demucs_extra' version, meaning that is was trained on the MusDB dataset, along with 150 extra songs of data.
See facebookresearch's repository for more information on Demucs.
|
[
"## Music Source Separation in the Waveform Domain\n\nThis is the Demucs model, serialized from Facebook Research's pretrained models. \n\nFrom Facebook research:\n\n Demucs is based on U-Net convolutional architecture inspired by Wave-U-Net and SING, with GLUs, a BiLSTM between the encoder and decoder, specific initialization of weights and transposed convolutions in the decoder.\n\n\nThis is the 'demucs_extra' version, meaning that is was trained on the MusDB dataset, along with 150 extra songs of data. \n\nSee facebookresearch's repository for more information on Demucs."
] |
[
"TAGS\n#audacity #region-us \n",
"## Music Source Separation in the Waveform Domain\n\nThis is the Demucs model, serialized from Facebook Research's pretrained models. \n\nFrom Facebook research:\n\n Demucs is based on U-Net convolutional architecture inspired by Wave-U-Net and SING, with GLUs, a BiLSTM between the encoder and decoder, specific initialization of weights and transposed convolutions in the decoder.\n\n\nThis is the 'demucs_extra' version, meaning that is was trained on the MusDB dataset, along with 150 extra songs of data. \n\nSee facebookresearch's repository for more information on Demucs."
] |
[
10,
147
] |
[
"passage: TAGS\n#audacity #region-us \n## Music Source Separation in the Waveform Domain\n\nThis is the Demucs model, serialized from Facebook Research's pretrained models. \n\nFrom Facebook research:\n\n Demucs is based on U-Net convolutional architecture inspired by Wave-U-Net and SING, with GLUs, a BiLSTM between the encoder and decoder, specific initialization of weights and transposed convolutions in the decoder.\n\n\nThis is the 'demucs_extra' version, meaning that is was trained on the MusDB dataset, along with 150 extra songs of data. \n\nSee facebookresearch's repository for more information on Demucs."
] |
[
-0.0984254851937294,
0.11051095277070999,
-0.0036050330381840467,
0.006045943591743708,
0.07251926511526108,
-0.08850971609354019,
0.12860839068889618,
-0.1260506957769394,
-0.09716453403234482,
0.0719556212425232,
-0.059320662170648575,
-0.09073952585458755,
0.02789967693388462,
0.11035822331905365,
-0.08522844314575195,
-0.12327053397893906,
0.10861693322658539,
-0.06748563796281815,
-0.09746930748224258,
-0.020924536511301994,
0.046000611037015915,
-0.05249878764152527,
0.035554539412260056,
-0.09113292396068573,
-0.08299572765827179,
-0.01002824492752552,
-0.06052923575043678,
0.013304353691637516,
-0.003731825388967991,
0.08585608005523682,
0.08530444651842117,
0.038281407207250595,
0.00033467059256508946,
-0.03964163735508919,
0.062261663377285004,
0.06954625248908997,
-0.0338096097111702,
-0.016680138185620308,
0.052178625017404556,
0.0068933311849832535,
-0.0022053478751331568,
0.13928088545799255,
-0.02989039570093155,
-0.00453580217435956,
-0.08578896522521973,
-0.05896621197462082,
-0.1794430911540985,
-0.09491106867790222,
0.09249408543109894,
0.06579923629760742,
-0.06430996209383011,
-0.056158870458602905,
-0.11766819655895233,
0.0534934401512146,
-0.036730311810970306,
-0.13497096300125122,
-0.006370102521032095,
0.19609364867210388,
0.03113451413810253,
0.06727851182222366,
0.024140723049640656,
0.1004834994673729,
0.07858597487211227,
0.05589380860328674,
-0.05035271868109703,
-0.05640767514705658,
0.12144464254379272,
-0.08055379241704941,
-0.14330710470676422,
0.030325356870889664,
0.28894540667533875,
0.04391150549054146,
-0.04722191020846367,
0.0931529626250267,
-0.041835181415081024,
0.10746963322162628,
-0.03343968093395233,
-0.029907967895269394,
-0.02274254523217678,
0.09941544383764267,
-0.022843610495328903,
-0.013700587674975395,
0.0027054641395807266,
-0.029522674158215523,
-0.16087424755096436,
0.05813288688659668,
-0.005472056567668915,
0.04199574887752533,
-0.10288447141647339,
-0.028613382950425148,
-0.020785896107554436,
-0.08845612406730652,
0.03931684419512749,
-0.1066790521144867,
-0.038521066308021545,
0.026306703686714172,
-0.16186359524726868,
-0.2764608561992645,
0.1254165768623352,
0.13995866477489471,
-0.1545744687318802,
-0.01910500042140484,
-0.11567003279924393,
0.06556206196546555,
0.22442056238651276,
-0.010423751547932625,
-0.17200149595737457,
-0.09960819035768509,
0.0986093133687973,
-0.04790283739566803,
0.021417828276753426,
-0.08278940618038177,
-0.1581125557422638,
0.02696804888546467,
0.062148477882146835,
-0.00596094923093915,
0.020473815500736237,
-0.023518884554505348,
-0.07145234942436218,
-0.09664677083492279,
0.08280033618211746,
-0.06485799700021744,
-0.006337923463433981,
-0.011428061872720718,
0.06745970994234085,
0.0800149217247963,
-0.0174886304885149,
0.12971189618110657,
0.07830356806516647,
0.02090667560696602,
-0.040101729333400726,
-0.09846826642751694,
-0.014873722568154335,
-0.07788527011871338,
0.08891217410564423,
-0.13302332162857056,
0.05172630026936531,
-0.12352089583873749,
-0.0024025102611631155,
-0.06331660598516464,
0.06366491317749023,
-0.011966883204877377,
-0.016327839344739914,
-0.02570476569235325,
-0.008533501997590065,
-0.03004765696823597,
-0.04935407638549805,
-0.10523593425750732,
-0.0354885533452034,
-0.02933238260447979,
-0.10319141298532486,
0.03789672628045082,
-0.2942942678928375,
0.12787406146526337,
0.029706522822380066,
0.0996650755405426,
-0.209195077419281,
0.06873147189617157,
-0.13774847984313965,
-0.011133700609207153,
0.0026211002841591835,
0.03177595138549805,
-0.08186653256416321,
0.011623703874647617,
0.1056135818362236,
-0.00020005424448754638,
-0.2727729380130768,
-0.08421549201011658,
0.07217937707901001,
-0.1566835194826126,
0.05948839709162712,
0.14945638179779053,
-0.04204999655485153,
-0.1288738250732422,
0.04874207079410553,
0.10737820714712143,
0.10795653611421585,
-0.0413970872759819,
-0.046339455991983414,
0.009042586199939251,
0.030279748141765594,
-0.02271769568324089,
0.06207374855875969,
-0.025366611778736115,
-0.12130764871835709,
0.027701914310455322,
0.1579856425523758,
0.12716539204120636,
-0.02406640350818634,
-0.019072894006967545,
0.043925635516643524,
-0.11557097733020782,
-0.0032160719856619835,
0.022358955815434456,
0.00960633996874094,
0.09208310395479202,
0.0697568878531456,
0.03393423929810524,
0.05379744991660118,
0.006455971393734217,
0.008441505953669548,
-0.08741337060928345,
0.2439771145582199,
-0.13628792762756348,
0.0715826004743576,
-0.24770186841487885,
0.005742948967963457,
0.04613962396979332,
0.04241098836064339,
0.04227655753493309,
0.07052308320999146,
-0.045439667999744415,
0.007819079793989658,
-0.02134288102388382,
0.05346928909420967,
-0.05130886286497116,
0.016123544424772263,
-0.007042242214083672,
-0.13650521636009216,
0.059913020581007004,
-0.12015294283628464,
-0.022309310734272003,
0.04078306630253792,
-0.058575861155986786,
0.13438084721565247,
0.056869037449359894,
0.06039608642458916,
-0.02069809101521969,
0.0615801066160202,
0.13595722615718842,
-0.021434053778648376,
0.023558178916573524,
-0.05071302875876427,
0.029796143993735313,
0.026831571012735367,
-0.028687352314591408,
-0.011773766949772835,
0.14730097353458405,
0.11542661488056183,
0.11344332993030548,
0.0335749015212059,
-0.028218742460012436,
0.00031848542857915163,
-0.026733694598078728,
0.05698832497000694,
0.006329831201583147,
0.07482113689184189,
0.02104845643043518,
0.042964596301317215,
-0.06160668656229973,
0.12043051421642303,
0.08042293787002563,
0.053338151425123215,
-0.07970892637968063,
-0.008432148024439812,
0.10005926340818405,
-0.10209258645772934,
0.06709666550159454,
0.19676633179187775,
0.07225626707077026,
0.14743317663669586,
-0.146858811378479,
-0.012434447184205055,
-0.08895740658044815,
-0.09614753723144531,
-0.032020747661590576,
0.1434505134820938,
0.011378311552107334,
0.025325894355773926,
-0.04922667145729065,
0.08800777792930603,
0.08146785199642181,
0.0256052166223526,
-0.013954794965684414,
-0.014690213836729527,
0.05999378487467766,
-0.06825948506593704,
0.06181831657886505,
-0.03244772180914879,
-0.024597635492682457,
0.030486082658171654,
-0.12005998194217682,
-0.0014369155978783965,
-0.05877319350838661,
-0.010666929185390472,
0.07631231099367142,
-0.23421631753444672,
-0.165174663066864,
-0.03831997513771057,
-0.042315635830163956,
-0.179552361369133,
0.03318701684474945,
0.017213456332683563,
-0.10378463566303253,
-0.004299539607018232,
-0.025180062279105186,
0.05392482876777649,
-0.004749849904328585,
0.014719259925186634,
0.08294569700956345,
-0.009693427011370659,
-0.06091507151722908,
-0.05766920745372772,
0.022637585178017616,
-0.06589538604021072,
0.09369078278541565,
-0.03297708183526993,
-0.049511075019836426,
0.12937092781066895,
0.2265757918357849,
0.14319145679473877,
-0.0010242338757961988,
0.06500605493783951,
0.24675343930721283,
-0.04107438027858734,
0.022672081366181374,
0.09558217227458954,
-0.07844647020101547,
-0.009549189358949661,
0.12007208913564682,
-0.005483218468725681,
-0.07778231054544449,
0.034775909036397934,
0.05580909177660942,
-0.0854572132229805,
-0.17723813652992249,
-0.15613165497779846,
-0.06332800537347794,
-0.026980947703123093,
-0.15250742435455322,
-0.009803470224142075,
0.12748406827449799,
0.015102745965123177,
0.09506511688232422,
0.11553476750850677,
0.058699626475572586,
-0.10522141307592392,
0.02281198464334011,
-0.0825154259800911,
0.06381526589393616,
-0.04743935167789459,
0.02032189816236496,
0.12486475706100464,
0.009531349875032902,
0.2655128538608551,
0.12400183826684952,
-0.12751680612564087,
0.10317637771368027,
0.004235494881868362,
0.02601170726120472,
0.08456536382436752,
-0.045065149664878845,
0.060959525406360626,
-0.04740152880549431,
-0.03805002570152283,
0.03413933515548706,
0.026083996519446373,
0.09058920294046402,
-0.17709960043430328,
-0.08141490817070007,
0.09888279438018799,
-0.036640238016843796,
0.17037996649742126,
0.036091431975364685,
0.002797152614220977,
-0.06749226897954941,
0.03861244395375252,
0.13200508058071136,
-0.02139023132622242,
0.14845208823680878,
0.1739177405834198,
0.1046161875128746,
-0.014624773524701595,
0.026970794424414635,
0.10775377601385117,
-0.07116547971963882,
0.016289157792925835,
-0.008849507197737694,
0.08179923892021179,
0.034085750579833984,
-0.09432489424943924,
-0.13192589581012726,
0.06140252947807312,
0.03535807132720947,
0.013069795444607735,
0.06213928014039993,
0.04259536415338516,
0.036825742572546005,
0.23726558685302734,
0.004954407457262278,
0.05647313967347145,
-0.18298238515853882,
0.03994264453649521,
-0.1306331753730774,
0.05651390925049782,
0.09466419368982315,
-0.034990210086107254,
-0.037020184099674225,
0.025663059204816818,
0.013035310432314873,
0.049606334418058395,
0.12544426321983337,
-0.2666390538215637,
-0.09161331504583359,
0.019038354977965355,
0.08895724266767502,
0.005490568932145834,
-0.025957848876714706,
-0.034650832414627075,
-0.11832897365093231,
-0.02186417393386364,
-0.06509732455015182,
-0.0056512667797505856,
-0.05556543171405792,
-0.05232706293463707,
0.11042412370443344,
0.009904718957841396,
0.05140898749232292,
-0.05507173016667366,
0.0026972955092787743,
-0.054768018424510956,
-0.18704286217689514,
0.07622304558753967,
-0.06386454403400421,
-0.019205419346690178,
-0.03376197814941406,
0.005093995947390795,
0.14990803599357605,
-0.054035596549510956,
0.030909257009625435,
0.005731901619583368,
0.06720616668462753,
-0.08025289326906204,
0.06560802459716797,
0.13445481657981873,
0.040986575186252594,
0.12480498850345612,
-0.01995663344860077,
-0.17239297926425934,
0.08178786188364029,
0.004098046105355024,
0.06075083091855049,
0.04241108521819115,
-0.023704279214143753,
0.06836014240980148,
0.31932947039604187,
-0.033890970051288605,
-0.286073237657547,
0.030191628262400627,
-0.02901158109307289,
0.002335210796445608,
-0.05560445412993431,
-0.3495447039604187,
0.023434780538082123,
-0.011049911379814148,
-0.020955000072717667,
0.08325118571519852,
-0.1099502220749855,
-0.03982491046190262,
0.2123277336359024,
-0.08620263636112213,
0.4563305377960205,
-0.05764986574649811,
-0.08344469964504242,
-0.0798477828502655,
-0.10497302561998367,
0.11247462779283524,
-0.2590191066265106,
0.05089731886982918,
0.09166215360164642,
-0.07373958826065063,
-0.00762568786740303,
0.0034638328943401575,
0.03885031118988991,
0.12862499058246613,
0.10797363519668579,
-0.008880236186087132,
-0.04672218859195709,
0.09418384730815887,
-0.007900015451014042,
-0.032260581851005554,
0.09544503688812256,
0.06798213720321655,
0.053519126027822495,
-0.06857284903526306,
-0.030034346505999565,
-0.01402990985661745,
-0.003967273980379105,
-0.06131896749138832,
-0.08825316280126572,
0.1363583654165268,
0.00417021568864584,
0.02044738084077835,
0.12035008519887924,
0.043070536106824875,
-0.08180688321590424,
0.1253424882888794,
0.02452230080962181,
-0.04747417941689491,
0.12682296335697174,
0.00427387747913599,
-0.08135872334241867,
0.15098388493061066,
-0.043728057295084,
0.041795216500759125,
0.04692096635699272,
-0.03474003076553345,
0.044259678572416306,
0.07086393237113953,
-0.1550169587135315,
0.03162568807601929,
0.07055824995040894,
-0.09102218598127365,
-0.1908915787935257,
-0.03823896497488022,
-0.10222160071134567,
0.13976556062698364,
0.16492167115211487,
0.1624106466770172,
0.056336209177970886,
0.02964153327047825,
-0.012256993912160397,
0.06563686579465866,
-0.07104195654392242,
-0.019043806940317154,
0.03503834456205368,
-0.06398896127939224,
-0.1200665533542633,
0.14166274666786194,
0.07298050075769424,
-0.05349982529878616,
0.05294365435838699,
-0.0996924489736557,
-0.08015742897987366,
-0.09790229052305222,
-0.1217314675450325,
0.14451567828655243,
0.03513782098889351,
-0.0980423167347908,
0.02330661192536354,
-0.15034599602222443,
0.042145274579524994,
0.2251821905374527,
-0.014267854392528534,
0.11876256763935089,
-0.041557442396879196,
0.017164718359708786,
0.021014608442783356,
0.04444205015897751,
-0.08455436676740646,
0.003914275206625462,
-0.103535495698452,
-0.19526024162769318,
-0.05767256021499634,
0.05926940217614174,
-0.08561358600854874,
-0.05717528238892555,
-0.11202998459339142,
-0.0171333197504282,
-0.40642693638801575,
-0.09220243245363235,
-0.03316734358668327,
-0.03259893134236336,
-0.03742014989256859,
-0.01080884225666523,
-0.01079822238534689,
0.059302881360054016,
-0.002716044196859002,
0.023007091134786606,
-0.02089136838912964,
0.0151566406711936,
-0.07927072793245316,
-0.01873483508825302,
-0.061599601060152054,
0.013943037018179893,
0.11627840250730515,
0.048462290316820145,
0.010946320369839668,
0.02570340596139431,
-0.14311043918132782,
-0.07661770284175873,
0.013731914572417736,
0.054790567606687546,
0.02604222670197487,
-0.003779560560360551,
-0.03083694912493229,
-0.03328651562333107,
-0.02291017211973667,
-0.022694822400808334,
0.14713920652866364,
-0.01943664625287056,
-0.04146181046962738,
-0.06773171573877335,
-0.019417323172092438,
-0.015625031664967537,
-0.025943832471966743,
0.1135106310248375,
0.1872452199459076,
0.05227990448474884,
0.007213601376861334,
0.0442255400121212,
0.031776491552591324,
0.001160785206593573,
0.05739358812570572,
-0.06270278990268707,
0.02334754168987274,
-0.09137222170829773,
0.007464520633220673,
-0.010481334291398525,
0.22068440914154053,
0.0731603354215622,
-0.03027505800127983,
-0.06679899245500565,
-0.023336464539170265,
-0.06460975855588913,
0.07459346204996109,
0.016986655071377754,
0.08929593861103058,
-0.058818716555833817,
-0.11988690495491028,
0.008997536264359951,
0.03958269953727722,
0.08971432596445084,
0.021981023252010345,
0.0653097927570343,
0.13276788592338562,
-0.020252978429198265,
0.1698632538318634,
-0.05028338357806206,
0.025272777304053307,
0.0484582856297493,
-0.07149339467287064,
0.07091361284255981,
-0.024766523391008377,
-0.08030541241168976,
-0.10918400436639786,
-0.021104857325553894,
0.11935756355524063,
-0.02643369883298874,
-0.010887732729315758,
-0.07190991938114166,
-0.052854910492897034,
-0.04940885305404663,
-0.07677581161260605,
0.0023730911780148745,
-0.128541961312294,
-0.11715088039636612,
-0.08740036934614182,
-0.007373051717877388,
-0.02164641208946705,
0.055207159370183945,
-0.11804715543985367,
0.00035759806632995605,
0.130796417593956,
-0.034745603799819946,
-0.12292823195457458,
0.10494808852672577,
-0.05099385976791382,
0.09980740398168564,
0.035134945064783096,
-0.011486525647342205,
0.012310569174587727,
-0.06720423698425293,
0.13044145703315735,
0.0362943671643734,
-0.03387879580259323,
-0.025060322135686874,
0.015797343105077744,
-0.07932209223508835,
0.13205914199352264,
0.08355264365673065,
-0.0331016480922699,
0.06455827504396439,
-0.02202746272087097,
0.04714246839284897,
0.03155713155865669,
-0.1697293519973755,
-0.04791245236992836,
-0.08054816722869873,
0.17317238450050354,
-0.013558685779571533,
-0.043667539954185486,
-0.0912189930677414,
0.10773228853940964,
0.16276322305202484,
-0.12683606147766113,
-0.025872766971588135,
0.10589508712291718,
0.0068242233246564865,
-0.10611388087272644,
0.10692187398672104,
0.09822621196508408,
0.29894161224365234,
0.08696286380290985,
-0.07664337754249573,
-0.09684017300605774,
-0.0960930734872818,
0.02245800755918026,
-0.030624819919466972,
0.05352722108364105,
-0.03415026143193245,
0.007793406490236521,
0.12585453689098358,
-0.2553997039794922,
-0.04223945736885071,
-0.010647390969097614,
-0.05775701627135277,
-0.10206619650125504,
-0.08978589624166489,
-0.02309618890285492,
0.06025676801800728,
0.012106244452297688,
-0.10855475813150406,
0.018848566338419914,
0.13564006984233856,
-0.027759401127696037,
0.04150335118174553,
0.005497797857969999,
0.06278321892023087,
0.0961226373910904,
-0.08053169399499893,
-0.030960986390709877,
0.016862893477082253,
0.014751696027815342,
0.12211859226226807,
0.010645265690982342,
0.011129957623779774,
-0.01864597573876381,
0.01953298971056938,
-0.1206325963139534,
0.03090134635567665,
-0.05246696248650551,
0.0905231311917305,
0.059266913682222366,
0.15306776762008667,
0.03210970386862755,
0.009286152198910713,
-0.12441185116767883,
-0.04317324608564377,
0.07079322636127472,
-0.13444466888904572,
0.0675542876124382,
-0.03643158823251724,
0.014971575699746609,
-0.0026903017424046993,
-0.013660537078976631,
0.08748278021812439,
-0.030714938417077065,
0.026586422696709633,
-0.07996036857366562,
0.006993372458964586,
0.027406608685851097,
0.0615178644657135,
0.05597847327589989,
-0.13751129806041718,
0.0656125396490097,
-0.050306618213653564,
0.07311392575502396,
-0.017649415880441666,
0.009766277857124805,
0.08063667267560959,
0.011566836386919022,
-0.03926575183868408,
-0.03390324115753174,
-0.016789628192782402,
-0.04050784930586815,
-0.05390946939587593,
-0.08715096116065979
] |
null | null | null |
# Labeler With Timestamps
## Being used for the `Audio Labeler` effect in Audacity
This is a audio labeler model which is used in Audacity's labeler effect.
metadata:
```
{
"sample_rate": 48000,
"domain_tags": ["Music"],
"tags": ["Audio Labeler"],
"effect_type": "waveform-to-labels",
"multichannel": false,
"labels": ["Acoustic Guitar", "Auxiliary Percussion", "Brass", "Clean Electric Guitar", "Distorted Electric Guitar", "Double Bass", "Drum Set", "Electric Bass", "Flute", "piano", "Reeds", "Saxophone", "Strings", "Trumpet", "Voice"],
"short_description": "Use me to label some instruments!",
"long_description": "An audio labeler, which outputs label predictions and time ranges for the labels. This model can label various instruments listed in the labels section."
}
```
|
{"tags": ["audacity"], "inference": false}
| null |
hugggof/openl3-labeler-w-timestamps
|
[
"audacity",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[] |
TAGS
#audacity #region-us
|
# Labeler With Timestamps
## Being used for the 'Audio Labeler' effect in Audacity
This is a audio labeler model which is used in Audacity's labeler effect.
metadata:
|
[
"# Labeler With Timestamps",
"## Being used for the 'Audio Labeler' effect in Audacity\n\nThis is a audio labeler model which is used in Audacity's labeler effect. \n\nmetadata:"
] |
[
"TAGS\n#audacity #region-us \n",
"# Labeler With Timestamps",
"## Being used for the 'Audio Labeler' effect in Audacity\n\nThis is a audio labeler model which is used in Audacity's labeler effect. \n\nmetadata:"
] |
[
10,
7,
40
] |
[
"passage: TAGS\n#audacity #region-us \n# Labeler With Timestamps## Being used for the 'Audio Labeler' effect in Audacity\n\nThis is a audio labeler model which is used in Audacity's labeler effect. \n\nmetadata:"
] |
[
-0.021484505385160446,
-0.0006438632844947278,
-0.005067161750048399,
0.018337098881602287,
0.05285165458917618,
0.051790207624435425,
0.22809752821922302,
-0.02128700539469719,
0.07179230451583862,
0.04418132081627846,
0.00492812180891633,
-0.04134761542081833,
0.0004073689051438123,
0.014410305768251419,
-0.05807766690850258,
-0.11423502117395401,
0.05318465456366539,
0.0027831371407955885,
0.14243218302726746,
0.10906808078289032,
0.037421125918626785,
-0.03748791292309761,
0.015337055549025536,
0.02590627409517765,
-0.07439304888248444,
-0.01827181689441204,
0.07663801312446594,
-0.1162421703338623,
0.10875940322875977,
-0.024547215551137924,
0.1139574870467186,
0.011637337505817413,
0.0004541972593870014,
-0.14214883744716644,
0.013121322728693485,
0.015870077535510063,
0.024494854733347893,
-0.02153090201318264,
-0.023856857791543007,
0.009746860712766647,
-0.083816297352314,
0.23894289135932922,
0.0664057582616806,
-0.00969631690531969,
-0.20188815891742706,
-0.22404679656028748,
-0.09084179252386093,
-0.10739291459321976,
0.004756412468850613,
0.0490371398627758,
-0.0543861947953701,
0.09276431053876877,
-0.043161772191524506,
0.031687818467617035,
0.08228517323732376,
-0.2126762717962265,
0.01638895832002163,
0.07198253273963928,
0.1466739922761917,
0.18836431205272675,
-0.05098732188344002,
0.08834094554185867,
0.08505135029554367,
-0.014379620552062988,
-0.016056768596172333,
-0.08335728943347931,
0.13410186767578125,
-0.011556998826563358,
-0.09106345474720001,
-0.05527634918689728,
0.5256308913230896,
0.008073483593761921,
-0.07996555417776108,
0.01653292588889599,
-0.03708580136299133,
0.009111994877457619,
-0.057205505669116974,
-0.07035591453313828,
0.014127888716757298,
0.0312060434371233,
0.09739303588867188,
0.12827852368354797,
-0.06300453841686249,
-0.004282607231289148,
-0.0353289432823658,
0.3585406541824341,
-0.015461822971701622,
0.06933444738388062,
-0.14780087769031525,
-0.11841633915901184,
-0.00484044011682272,
-0.06330302357673645,
0.06329280138015747,
-0.01707923226058483,
-0.015314407646656036,
-0.07428644597530365,
-0.030275540426373482,
-0.19959770143032074,
0.0239147637039423,
0.1922265887260437,
-0.10960890352725983,
0.01757093146443367,
-0.11754702031612396,
0.10381396859884262,
0.1550574153661728,
0.1755087971687317,
0.01399944443255663,
-0.09360602498054504,
-0.03711871802806854,
-0.05178084596991539,
0.018119335174560547,
-0.07467194646596909,
-0.1203484907746315,
0.003606916405260563,
0.08772862702608109,
-0.005448746029287577,
-0.12272675335407257,
-0.10438061505556107,
-0.14663578569889069,
-0.0401589497923851,
0.0037264458369463682,
-0.03471369668841362,
0.015775229781866074,
-0.07291935384273529,
0.06292800605297089,
0.05641881376504898,
-0.028370194137096405,
0.061318203806877136,
0.09652682393789291,
0.2034740447998047,
-0.06862051784992218,
-0.04358157142996788,
0.013286505825817585,
-0.0944499522447586,
-0.007912985980510712,
0.05450157821178436,
0.1130652129650116,
-0.06443953514099121,
0.012059970758855343,
-0.08191393315792084,
0.03745824471116066,
0.07213488221168518,
-0.05025411397218704,
0.00242071645334363,
0.060772620141506195,
0.09258454293012619,
-0.024805467575788498,
-0.17067119479179382,
-0.1252039074897766,
0.03164743632078171,
-0.08342541754245758,
0.15662729740142822,
-0.11623119562864304,
0.09062706679105759,
-0.0650658905506134,
0.04718049243092537,
-0.1104879230260849,
-0.055287592113018036,
-0.014869136735796928,
0.004122885875403881,
-0.002884294604882598,
0.017479922622442245,
-0.08286096900701523,
0.08338399976491928,
-0.04309766739606857,
0.05989504233002663,
-0.19367079436779022,
-0.1260310709476471,
0.17534612119197845,
-0.14640302956104279,
-0.045514341443777084,
0.12003584951162338,
0.002150110434740782,
0.08241105824708939,
0.09046270698308945,
0.09736472368240356,
0.058002058416604996,
-0.19176232814788818,
0.09319666028022766,
-0.01988665759563446,
-0.043303538113832474,
0.0070592244155704975,
0.08272061496973038,
-0.02439957857131958,
-0.04209648445248604,
-0.025481626391410828,
0.2435605823993683,
0.036264386028051376,
-0.06420319527387619,
-0.042213212698698044,
0.04024505615234375,
0.015946930274367332,
0.07790469378232956,
-0.01035181526094675,
-0.04128796607255936,
-0.015576517209410667,
-0.010334626771509647,
0.0368223637342453,
-0.046076949685811996,
0.13618651032447815,
0.053620100021362305,
-0.04193887859582901,
0.051555268466472626,
0.01616564206779003,
0.011450044810771942,
-0.1377691924571991,
0.11049740016460419,
0.02333139069378376,
0.09019296616315842,
0.057246554642915726,
-0.0050320872105658054,
-0.004398754332214594,
-0.11768411099910736,
-0.008046472445130348,
0.03624694421887398,
-0.03705732524394989,
0.0075672161765396595,
0.005749550648033619,
-0.04319573938846588,
0.19982707500457764,
-0.05833710357546806,
-0.016767514869570732,
0.14189119637012482,
-0.06242610514163971,
0.094996377825737,
-0.1271081119775772,
0.09056445956230164,
-0.027975162491202354,
0.008967814967036247,
0.09832466393709183,
-0.031396448612213135,
0.08509203791618347,
-0.004561265464872122,
-0.0584564208984375,
-0.05204380303621292,
0.14304734766483307,
-0.14863286912441254,
0.13474692404270172,
0.09811428189277649,
0.02335822395980358,
-0.11332233995199203,
0.06877093762159348,
0.0314069464802742,
-0.04998651146888733,
-0.04519324004650116,
-0.023626601323485374,
0.1721908301115036,
0.015140045434236526,
0.04786427691578865,
-0.0074551720172166824,
0.09076029807329178,
0.04407624900341034,
-0.012242430821061134,
-0.05969199910759926,
-0.047708459198474884,
-0.12668460607528687,
0.10573811829090118,
0.0010336339473724365,
0.350055068731308,
0.05134793370962143,
0.17534898221492767,
-0.04449750855565071,
-0.05920090898871422,
0.012056415900588036,
-0.12444210052490234,
-0.004740368574857712,
0.10270487517118454,
-0.11464513093233109,
0.024277957156300545,
0.03619319573044777,
0.08212264627218246,
-0.06898751109838486,
-0.09334809333086014,
0.009126500226557255,
-0.019483700394630432,
0.05287101864814758,
-0.05913432314991951,
0.022474166005849838,
-0.01992453634738922,
0.03406579792499542,
0.08350486308336258,
-0.14223431050777435,
0.05789046734571457,
-0.014491894282400608,
-0.012532847933471203,
0.10257723927497864,
-0.14472141861915588,
-0.2891426980495453,
-0.19624750316143036,
-0.08518832176923752,
0.034790050238370895,
0.028780968859791756,
0.07209110260009766,
0.036252766847610474,
-0.011335141025483608,
0.07293584197759628,
0.16482123732566833,
-0.005761728622019291,
-0.028893698006868362,
0.0007436582236550748,
-0.013605151325464249,
-0.058115795254707336,
-0.0649532824754715,
0.026550473645329475,
-0.0562545508146286,
0.07952843606472015,
0.09646628051996231,
-0.05103760212659836,
0.1036817654967308,
0.2728244960308075,
0.05923086032271385,
-0.025361625477671623,
0.03125392645597458,
0.17647133767604828,
-0.19338639080524445,
0.006980955135077238,
0.12757015228271484,
-0.12372341752052307,
0.01010411698371172,
0.2134244292974472,
0.03452664986252785,
-0.08430557698011398,
0.01494095753878355,
0.025962108746170998,
-0.15896068513393402,
-0.08445219695568085,
-0.1227358803153038,
-0.10085370391607285,
0.05185725539922714,
-0.03441533073782921,
0.018296627327799797,
-0.033048637211322784,
-0.014218373224139214,
0.08099888265132904,
-0.0143593680113554,
0.024497278034687042,
-0.05483734980225563,
0.15062864124774933,
-0.09468375146389008,
0.0772961750626564,
0.020381903275847435,
-0.09199268370866776,
0.10506660491228104,
0.20482592284679413,
0.10585767030715942,
0.21446315944194794,
-0.0016521638026461005,
0.11428239941596985,
-0.15044303238391876,
0.14183612167835236,
0.04478134587407112,
0.05431683734059334,
-0.026906060054898262,
-0.0862727239727974,
-0.047132477164268494,
0.012507380917668343,
0.06577476114034653,
-0.026141580194234848,
-0.10212116688489914,
0.034652505069971085,
0.008729012683033943,
-0.017680905759334564,
0.0036890241317451,
0.15594594180583954,
-0.08333175629377365,
0.030340585857629776,
0.04300851747393608,
0.04005967080593109,
-0.10001058131456375,
0.21149621903896332,
-0.02823803387582302,
-0.03222974017262459,
-0.003745042020455003,
0.04110971838235855,
0.08343156427145004,
-0.14952799677848816,
-0.0018625571392476559,
-0.025877583771944046,
-0.09708667546510696,
-0.06843140721321106,
-0.0680423304438591,
-0.08458024263381958,
0.1322202980518341,
-0.009345171973109245,
-0.053615421056747437,
0.06041434779763222,
-0.01025574654340744,
0.0110896285623312,
0.17080256342887878,
0.09851231426000595,
0.034868720918893814,
-0.10945840179920197,
0.014785755425691605,
-0.014660876244306564,
-0.044587261974811554,
0.09216324239969254,
0.11667495965957642,
-0.09419670701026917,
-0.005596071947365999,
0.06717099249362946,
0.04681786522269249,
0.0641000047326088,
-0.09002681076526642,
-0.016234586015343666,
0.04291970655322075,
0.2629513144493103,
-0.007236740086227655,
-0.004813991021364927,
-0.05849563330411911,
-0.18254528939723969,
-0.16057088971138,
0.2357768714427948,
0.00551609368994832,
-0.049157872796058655,
-0.24055224657058716,
0.12458933144807816,
0.03016638569533825,
0.06019039452075958,
-0.05400712788105011,
0.05368025228381157,
-0.052789971232414246,
0.012589472346007824,
0.11660688370466232,
0.0023232242092490196,
0.0999569445848465,
-0.09381167590618134,
0.13038797676563263,
0.01847541518509388,
0.012302043847739697,
-0.020896652713418007,
0.02948508970439434,
0.004276649560779333,
-0.06764915585517883,
0.08325237035751343,
-0.26703161001205444,
-0.18711677193641663,
0.14760367572307587,
-0.017820335924625397,
-0.19044965505599976,
0.03098556026816368,
-0.05834845453500748,
0.05085662007331848,
0.1423172652721405,
0.009495090693235397,
0.20603500306606293,
0.1399824172258377,
-0.025159304961562157,
-0.33388087153434753,
-0.05143028870224953,
-0.045972324907779694,
0.015732472762465477,
0.04291923716664314,
-0.16003818809986115,
0.12336684763431549,
0.016799015924334526,
-0.05440633371472359,
0.23861414194107056,
-0.23513120412826538,
-0.12660576403141022,
0.2122175395488739,
-0.06457522511482239,
0.2644694745540619,
-0.06212148070335388,
-0.04672051966190338,
-0.03514842316508293,
-0.1799527257680893,
0.012854089960455894,
-0.003894312772899866,
0.06716112047433853,
0.00604367908090353,
0.10770870000123978,
0.013084123842418194,
0.05448165535926819,
0.03274652361869812,
0.15878601372241974,
0.03369911387562752,
0.018853850662708282,
-0.13668116927146912,
0.18280509114265442,
0.05741563439369202,
-0.10356690734624863,
0.14527180790901184,
-0.008830551989376545,
-0.035573579370975494,
-0.030047202482819557,
-0.010649016126990318,
-0.008924041874706745,
-0.009016573429107666,
-0.06948252022266388,
-0.0672823116183281,
0.015485942363739014,
-0.08117838948965073,
-0.048946596682071686,
0.20216596126556396,
-0.05159614235162735,
0.011027611792087555,
0.12403208017349243,
-0.011435006745159626,
-0.26176539063453674,
0.028023652732372284,
-0.011821278370916843,
-0.058301571756601334,
0.14020512998104095,
-0.09313308447599411,
0.11063060909509659,
0.04986250400543213,
0.03896137326955795,
0.07439582049846649,
0.07209175825119019,
-0.009818561375141144,
0.08371114730834961,
0.09869398921728134,
-0.09444840252399445,
-0.027821596711874008,
0.0011558390688151121,
-0.13931778073310852,
0.03972447291016579,
-0.08768053352832794,
0.10970877856016159,
0.06300700455904007,
0.014435716904699802,
0.02541528269648552,
-0.026219038292765617,
-0.051854196935892105,
0.03441007807850838,
-0.06393726170063019,
0.033102672547101974,
-0.05841590091586113,
0.11295750737190247,
0.0365445539355278,
-0.06363508850336075,
-0.06558698415756226,
-0.0705740749835968,
-0.037514086812734604,
0.019596023485064507,
-0.14307817816734314,
0.08929405361413956,
-0.23767414689064026,
-0.0584879145026207,
0.04094400256872177,
-0.19466911256313324,
0.0029014800675213337,
0.024716651067137718,
0.03797433525323868,
0.11869176477193832,
-0.018185090273618698,
-0.059497933834791183,
0.010908928699791431,
0.06400460004806519,
-0.1176125556230545,
-0.003620689967647195,
-0.1001986414194107,
-0.14130334556102753,
-0.05868416652083397,
0.029526878148317337,
-0.10568644851446152,
-0.11890694499015808,
-0.14850857853889465,
0.1059892326593399,
-0.08497946709394455,
0.03201470524072647,
0.08367466181516647,
-0.00280367280356586,
0.02105284482240677,
0.021649803966283798,
-0.05931747704744339,
-0.025826692581176758,
-0.06417136639356613,
0.025843366980552673,
0.008523713797330856,
0.07865966856479645,
0.0352700799703598,
-0.04719173535704613,
0.09318393468856812,
-0.002589520998299122,
0.07731382548809052,
0.010508579201996326,
-0.059428587555885315,
0.010786592960357666,
-0.2688494026660919,
-0.0012296898057684302,
0.08465216308832169,
-0.01340681966394186,
-0.015460592694580555,
-0.035417817533016205,
-0.022898469120264053,
-0.009045785292983055,
-0.0924508348107338,
0.022860487923026085,
-0.16531339287757874,
-0.04812479764223099,
-0.03044496849179268,
-0.1157197654247284,
-0.1271408200263977,
-0.004615178797394037,
-0.04251336678862572,
0.10755633562803268,
0.062401071190834045,
0.039347585290670395,
-0.012389088049530983,
-0.0043200175277888775,
0.06835009902715683,
0.018591677770018578,
0.032895367592573166,
-0.12574823200702667,
-0.11910491436719894,
-0.04357936233282089,
-0.046493496745824814,
-0.00045691151171922684,
0.29997023940086365,
0.05617842078208923,
0.05909380689263344,
0.040856510400772095,
0.11521424353122711,
-0.18266475200653076,
-0.0018162823980674148,
0.17294585704803467,
0.07575566321611404,
-0.05981313809752464,
-0.05247589573264122,
0.03960612416267395,
0.016241108998656273,
0.006442406680434942,
0.02243826724588871,
0.03410492464900017,
0.06481055915355682,
0.020537616685032845,
-0.019840165972709656,
0.038949351757764816,
-0.08548671007156372,
0.013296794146299362,
-0.0007909836713224649,
0.08623629063367844,
0.07151569426059723,
0.007140942383557558,
0.0016894114669412374,
0.0022441933397203684,
0.13880868256092072,
-0.09242458641529083,
0.0023987730965018272,
-0.09070748090744019,
0.22647464275360107,
-0.04325670748949051,
-0.12169764190912247,
0.038826365023851395,
-0.10475980490446091,
0.006399885285645723,
0.021193863824009895,
-0.041773345321416855,
0.001963880844414234,
0.13854971528053284,
-0.319352388381958,
-0.01908038556575775,
0.10468185693025589,
-0.05477023124694824,
-0.026111433282494545,
0.01109049841761589,
-0.06028540059924126,
0.008812285028398037,
-0.03588986396789551,
-0.011119775474071503,
-0.06035342067480087,
-0.006628931500017643,
-0.03466998413205147,
-0.211971715092659,
-0.041656192392110825,
0.009380905888974667,
0.016507409512996674,
0.04839109256863594,
-0.14962239563465118,
0.036059118807315826,
-0.015684083104133606,
0.016732703894376755,
0.12131481617689133,
0.040341634303331375,
-0.04087461903691292,
-0.0879555270075798,
-0.04029614105820656,
-0.016562217846512794,
0.1649085283279419,
-0.06949940323829651,
-0.021617960184812546,
-0.06810110062360764,
0.05687868595123291,
0.05716446042060852,
-0.09010352939367294,
-0.04603949189186096,
0.05267687141895294,
0.048909034579992294,
-0.002316117286682129,
0.11230386048555374,
0.005894436035305262,
0.19003604352474213,
0.048014890402555466,
-0.06532235443592072,
-0.12203668802976608,
-0.016009889543056488,
-0.019967423751950264,
-0.026607157662510872,
0.12088795751333237,
-0.015542862936854362,
-0.0735861286520958,
0.164265975356102,
-0.23197025060653687,
-0.03211001306772232,
0.05905170366168022,
-0.1742759644985199,
-0.061509229242801666,
-0.09256364405155182,
0.02207442931830883,
0.0020379936322569847,
0.06619436293840408,
-0.050982385873794556,
-0.12229273468255997,
-0.1578919142484665,
-0.010168825276196003,
-0.01863432116806507,
0.01847742684185505,
-0.026376839727163315,
-0.017067529261112213,
0.11589868366718292,
-0.01960325613617897,
0.03148339316248894,
-0.0004135226190555841,
0.08781499415636063,
0.07510780543088913,
0.06403015553951263,
-0.05204116180539131,
-0.05012733116745949,
-0.1063343957066536,
0.07812801003456116,
-0.04503290727734566,
0.046771563589572906,
0.03632231801748276,
0.06484950333833694,
0.013368056155741215,
-0.10554923862218857,
-0.06601636856794357,
0.017392858862876892,
0.017682379111647606,
-0.05966072529554367,
0.11206098645925522,
-0.04563582316040993,
0.06506180763244629,
-0.051277030259370804,
-0.007694774307310581,
0.012419302016496658,
0.033992357552051544,
0.011566379107534885,
-0.02209188975393772,
0.02886912412941456,
-0.03064168244600296,
0.0551360547542572,
-0.12101724743843079,
-0.2759518325328827,
0.017512356862425804,
-0.047146447002887726,
0.10899578779935837,
-0.02772471494972706,
0.019445661455392838,
0.07968158274888992,
0.02958609163761139,
0.01568187028169632,
-0.2527823746204376,
0.08299526572227478,
0.07532165944576263,
-0.03713420778512955,
-0.07192482799291611
] |
null | null |
transformers
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('https://images.genius.com/9fd98af9a817af8cd78636f71895b6ad.500x500x1.jpg')">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">100 gecs</div>
<a href="https://genius.com/artists/100-gecs">
<div style="text-align: center; font-size: 14px;">@100-gecs</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from 100 gecs.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/100-gecs).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/100-gecs")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/3c9j4tvq/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on 100 gecs's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/1v0ffa4e) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/1v0ffa4e/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/100-gecs')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/100-gecs")
model = AutoModelWithLMHead.from_pretrained("huggingartists/100-gecs")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/100-gecs"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/100-gecs
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/100-gecs",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/100-gecs #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">100 gecs</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@100-gecs</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from 100 gecs.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on 100 gecs's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">21 Savage</div>
<a href="https://genius.com/artists/21-savage">
<div style="text-align: center; font-size: 14px;">@21-savage</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from 21 Savage.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/21-savage).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/21-savage")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/3lbkznnf/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on 21 Savage's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/1fw9b6m4) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/1fw9b6m4/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/21-savage')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/21-savage")
model = AutoModelWithLMHead.from_pretrained("huggingartists/21-savage")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/21-savage"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/21-savage
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/21-savage",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/21-savage #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">21 Savage</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@21-savage</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from 21 Savage.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on 21 Savage's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">25/17</div>
<a href="https://genius.com/artists/25-17">
<div style="text-align: center; font-size: 14px;">@25-17</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from 25/17.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/25-17).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/25-17")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/1iuytbjp/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on 25/17's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/knv4l4gw) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/knv4l4gw/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/25-17')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/25-17")
model = AutoModelWithLMHead.from_pretrained("huggingartists/25-17")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/25-17"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/25-17
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/25-17",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/25-17 #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">25/17</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@25-17</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from 25/17.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on 25/17's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">50 Cent</div>
<a href="https://genius.com/artists/50-cent">
<div style="text-align: center; font-size: 14px;">@50-cent</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from 50 Cent.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/50-cent).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/50-cent")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/1291qx5n/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on 50 Cent's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/1igwpphq) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/1igwpphq/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/50-cent')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/50-cent")
model = AutoModelWithLMHead.from_pretrained("huggingartists/50-cent")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/50-cent"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/50-cent
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/50-cent",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/50-cent #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">50 Cent</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@50-cent</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from 50 Cent.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on 50 Cent's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">5’Nizza</div>
<a href="https://genius.com/artists/5nizza">
<div style="text-align: center; font-size: 14px;">@5nizza</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from 5’Nizza.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/5nizza).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/5nizza")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/1zcp1grf/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on 5’Nizza's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/2zg6pzw7) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/2zg6pzw7/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/5nizza')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/5nizza")
model = AutoModelWithLMHead.from_pretrained("huggingartists/5nizza")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/5nizza"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/5nizza
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/5nizza",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/5nizza #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">5’Nizza</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@5nizza</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from 5’Nizza.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on 5’Nizza's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">5opka</div>
<a href="https://genius.com/artists/5opka">
<div style="text-align: center; font-size: 14px;">@5opka</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from 5opka.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/5opka).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/5opka")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/1o2s4fw8/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on 5opka's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/3vitposx) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/3vitposx/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/5opka')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/5opka")
model = AutoModelWithLMHead.from_pretrained("huggingartists/5opka")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/5opka"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/5opka
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/5opka",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/5opka #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">5opka</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@5opka</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from 5opka.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on 5opka's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">6ix9ine</div>
<a href="https://genius.com/artists/6ix9ine">
<div style="text-align: center; font-size: 14px;">@6ix9ine</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from 6ix9ine.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/6ix9ine).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/6ix9ine")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/eqmcaj0r/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on 6ix9ine's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/s5dpg3h2) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/s5dpg3h2/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/6ix9ine')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/6ix9ine")
model = AutoModelWithLMHead.from_pretrained("huggingartists/6ix9ine")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/6ix9ine"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/6ix9ine
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/6ix9ine",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/6ix9ine #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">6ix9ine</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@6ix9ine</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from 6ix9ine.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on 6ix9ine's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Aaron Watson</div>
<a href="https://genius.com/artists/aaron-watson">
<div style="text-align: center; font-size: 14px;">@aaron-watson</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Aaron Watson.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/aaron-watson).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/aaron-watson")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/14ha1tnc/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Aaron Watson's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/34e4zb2v) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/34e4zb2v/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/aaron-watson')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/aaron-watson")
model = AutoModelWithLMHead.from_pretrained("huggingartists/aaron-watson")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/aaron-watson"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/aaron-watson
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/aaron-watson",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/aaron-watson #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Aaron Watson</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@aaron-watson</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Aaron Watson.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Aaron Watson's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">ABBA</div>
<a href="https://genius.com/artists/abba">
<div style="text-align: center; font-size: 14px;">@abba</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from ABBA.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/abba).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/abba")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/3pc6wfre/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on ABBA's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/3b7wqd1w) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/3b7wqd1w/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/abba')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/abba")
model = AutoModelWithLMHead.from_pretrained("huggingartists/abba")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/abba"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/abba
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/abba",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/abba #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">ABBA</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@abba</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from ABBA.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on ABBA's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Adele</div>
<a href="https://genius.com/artists/adele">
<div style="text-align: center; font-size: 14px;">@adele</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Adele.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/adele).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/adele")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/1yyqw6ss/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Adele's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/3qruwjpr) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/3qruwjpr/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/adele')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/adele")
model = AutoModelWithLMHead.from_pretrained("huggingartists/adele")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/adele"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/adele
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/adele",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/adele #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Adele</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@adele</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Adele.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Adele's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Агата Кристи (Agata Christie)</div>
<a href="https://genius.com/artists/agata-christie">
<div style="text-align: center; font-size: 14px;">@agata-christie</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Агата Кристи (Agata Christie).
Dataset is available [here](https://huggingface.co/datasets/huggingartists/agata-christie).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/agata-christie")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/1dtf6ia5/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Агата Кристи (Agata Christie)'s lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/q27fvz1h) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/q27fvz1h/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/agata-christie')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/agata-christie")
model = AutoModelWithLMHead.from_pretrained("huggingartists/agata-christie")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/agata-christie"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/agata-christie
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/agata-christie",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/agata-christie #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Агата Кристи (Agata Christie)</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@agata-christie</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Агата Кристи (Agata Christie).
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Агата Кристи (Agata Christie)'s lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
.\n\nDataset is available here.\nAnd can be used with:\n\n\n\nExplore the data, which is tracked with W&B artifacts at every step of the pipeline.",
"## Training procedure\n\nThe model is based on a pre-trained GPT-2 which is fine-tuned on Агата Кристи (Agata Christie)'s lyrics.\n\nHyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.\n\nAt the end of training, the final model is logged and versioned.",
"## How to use\n\nYou can use this model directly with a pipeline for text generation:\n\n\n\nOr with Transformers library:",
"## Limitations and bias\n\nThe model suffers from the same limitations and bias as GPT-2.\n\nIn addition, the data present in the user's tweets further affects the text generated by the model.",
"## About\n\n*Built by Aleksey Korshuk*\n\n\n\nFor more details, visit the project repository.\n\n.\n\nDataset is available here.\nAnd can be used with:\n\n\n\nExplore the data, which is tracked with W&B artifacts at every step of the pipeline.",
"## Training procedure\n\nThe model is based on a pre-trained GPT-2 which is fine-tuned on Агата Кристи (Agata Christie)'s lyrics.\n\nHyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.\n\nAt the end of training, the final model is logged and versioned.",
"## How to use\n\nYou can use this model directly with a pipeline for text generation:\n\n\n\nOr with Transformers library:",
"## Limitations and bias\n\nThe model suffers from the same limitations and bias as GPT-2.\n\nIn addition, the data present in the user's tweets further affects the text generated by the model.",
"## About\n\n*Built by Aleksey Korshuk*\n\n\n\nFor more details, visit the project repository.\n\n.\n\nDataset is available here.\nAnd can be used with:\n\n\n\nExplore the data, which is tracked with W&B artifacts at every step of the pipeline.## Training procedure\n\nThe model is based on a pre-trained GPT-2 which is fine-tuned on Агата Кристи (Agata Christie)'s lyrics.\n\nHyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.\n\nAt the end of training, the final model is logged and versioned.## How to use\n\nYou can use this model directly with a pipeline for text generation:\n\n\n\nOr with Transformers library:## Limitations and bias\n\nThe model suffers from the same limitations and bias as GPT-2.\n\nIn addition, the data present in the user's tweets further affects the text generated by the model.## About\n\n*Built by Aleksey Korshuk*\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">aikko</div>
<a href="https://genius.com/artists/aikko">
<div style="text-align: center; font-size: 14px;">@aikko</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from aikko.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/aikko).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/aikko")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/1cfdpsrg/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on aikko's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/oesyn53g) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/oesyn53g/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/aikko')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/aikko")
model = AutoModelWithLMHead.from_pretrained("huggingartists/aikko")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/aikko"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/aikko
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/aikko",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/aikko #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">aikko</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@aikko</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from aikko.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on aikko's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Aimer</div>
<a href="https://genius.com/artists/aimer">
<div style="text-align: center; font-size: 14px;">@aimer</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Aimer.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/aimer).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/aimer")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/1rtjxc8q/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Aimer's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/2rguugmg) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/2rguugmg/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/aimer')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/aimer")
model = AutoModelWithLMHead.from_pretrained("huggingartists/aimer")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/aimer"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/aimer
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/aimer",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/aimer #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Aimer</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@aimer</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Aimer.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Aimer's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Alan Walker</div>
<a href="https://genius.com/artists/alan-walker">
<div style="text-align: center; font-size: 14px;">@alan-walker</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Alan Walker.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/alan-walker).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/alan-walker")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/3oyxxcos/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Alan Walker's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/huoxll6m) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/huoxll6m/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/alan-walker')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/alan-walker")
model = AutoModelWithLMHead.from_pretrained("huggingartists/alan-walker")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/alan-walker"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/alan-walker
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/alan-walker",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/alan-walker #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Alan Walker</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@alan-walker</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Alan Walker.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Alan Walker's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">André 3000</div>
<a href="https://genius.com/artists/andre-3000">
<div style="text-align: center; font-size: 14px;">@andre-3000</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from André 3000.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/andre-3000).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/andre-3000")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/2hnhboqf/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on André 3000's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/1mydp6nh) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/1mydp6nh/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/andre-3000')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/andre-3000")
model = AutoModelWithLMHead.from_pretrained("huggingartists/andre-3000")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/andre-3000"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/andre-3000
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/andre-3000",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/andre-3000 #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">André 3000</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@andre-3000</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from André 3000.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on André 3000's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Arash</div>
<a href="https://genius.com/artists/arash">
<div style="text-align: center; font-size: 14px;">@arash</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Arash.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/arash).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/arash")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/27u6df87/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Arash's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/3eav8xpf) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/3eav8xpf/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/arash')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/arash")
model = AutoModelWithLMHead.from_pretrained("huggingartists/arash")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/arash"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/arash
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/arash",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/arash #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Arash</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@arash</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Arash.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Arash's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Architects</div>
<a href="https://genius.com/artists/architects">
<div style="text-align: center; font-size: 14px;">@architects</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Architects.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/architects).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/architects")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/licizuue/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Architects's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/1a9mrzf8) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/1a9mrzf8/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/architects')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/architects")
model = AutoModelWithLMHead.from_pretrained("huggingartists/architects")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/architects"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/architects
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/architects",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/architects #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Architects</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@architects</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Architects.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Architects's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Arctic Monkeys</div>
<a href="https://genius.com/artists/arctic-monkeys">
<div style="text-align: center; font-size: 14px;">@arctic-monkeys</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Arctic Monkeys.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/arctic-monkeys).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/arctic-monkeys")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/1x4ii6qz/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Arctic Monkeys's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/bmnqvn53) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/bmnqvn53/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/arctic-monkeys')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/arctic-monkeys")
model = AutoModelWithLMHead.from_pretrained("huggingartists/arctic-monkeys")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/arctic-monkeys"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/arctic-monkeys
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/arctic-monkeys",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/arctic-monkeys #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Arctic Monkeys</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@arctic-monkeys</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Arctic Monkeys.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Arctic Monkeys's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Ariana Grande</div>
<a href="https://genius.com/artists/ariana-grande">
<div style="text-align: center; font-size: 14px;">@ariana-grande</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Ariana Grande.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/ariana-grande).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/ariana-grande")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/2nfg7v7i/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Ariana Grande's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/3u3sn1bx) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/3u3sn1bx/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/ariana-grande')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/ariana-grande")
model = AutoModelWithLMHead.from_pretrained("huggingartists/ariana-grande")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/ariana-grande"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/ariana-grande
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/ariana-grande",
"autotrain_compatible",
"endpoints_compatible",
"has_space",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/ariana-grande #autotrain_compatible #endpoints_compatible #has_space #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Ariana Grande</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@ariana-grande</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Ariana Grande.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Ariana Grande's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Ария (Ariya)</div>
<a href="https://genius.com/artists/ariya">
<div style="text-align: center; font-size: 14px;">@ariya</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Ария (Ariya).
Dataset is available [here](https://huggingface.co/datasets/huggingartists/ariya).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/ariya")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/uo73s5z1/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Ария (Ariya)'s lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/69c1r7ea) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/69c1r7ea/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/ariya')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/ariya")
model = AutoModelWithLMHead.from_pretrained("huggingartists/ariya")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/ariya"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/ariya
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/ariya",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/ariya #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Ария (Ariya)</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@ariya</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Ария (Ariya).
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Ария (Ariya)'s lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
.\n\nDataset is available here.\nAnd can be used with:\n\n\n\nExplore the data, which is tracked with W&B artifacts at every step of the pipeline.",
"## Training procedure\n\nThe model is based on a pre-trained GPT-2 which is fine-tuned on Ария (Ariya)'s lyrics.\n\nHyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.\n\nAt the end of training, the final model is logged and versioned.",
"## How to use\n\nYou can use this model directly with a pipeline for text generation:\n\n\n\nOr with Transformers library:",
"## Limitations and bias\n\nThe model suffers from the same limitations and bias as GPT-2.\n\nIn addition, the data present in the user's tweets further affects the text generated by the model.",
"## About\n\n*Built by Aleksey Korshuk*\n\n\n\nFor more details, visit the project repository.\n\n.\n\nDataset is available here.\nAnd can be used with:\n\n\n\nExplore the data, which is tracked with W&B artifacts at every step of the pipeline.",
"## Training procedure\n\nThe model is based on a pre-trained GPT-2 which is fine-tuned on Ария (Ariya)'s lyrics.\n\nHyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.\n\nAt the end of training, the final model is logged and versioned.",
"## How to use\n\nYou can use this model directly with a pipeline for text generation:\n\n\n\nOr with Transformers library:",
"## Limitations and bias\n\nThe model suffers from the same limitations and bias as GPT-2.\n\nIn addition, the data present in the user's tweets further affects the text generated by the model.",
"## About\n\n*Built by Aleksey Korshuk*\n\n\n\nFor more details, visit the project repository.\n\n.\n\nDataset is available here.\nAnd can be used with:\n\n\n\nExplore the data, which is tracked with W&B artifacts at every step of the pipeline.## Training procedure\n\nThe model is based on a pre-trained GPT-2 which is fine-tuned on Ария (Ariya)'s lyrics.\n\nHyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.\n\nAt the end of training, the final model is logged and versioned.## How to use\n\nYou can use this model directly with a pipeline for text generation:\n\n\n\nOr with Transformers library:## Limitations and bias\n\nThe model suffers from the same limitations and bias as GPT-2.\n\nIn addition, the data present in the user's tweets further affects the text generated by the model.## About\n\n*Built by Aleksey Korshuk*\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Armin van Buuren</div>
<a href="https://genius.com/artists/armin-van-buuren">
<div style="text-align: center; font-size: 14px;">@armin-van-buuren</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Armin van Buuren.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/armin-van-buuren).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/armin-van-buuren")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/hrrfc55y/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Armin van Buuren's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/3q93rwo8) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/3q93rwo8/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/armin-van-buuren')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/armin-van-buuren")
model = AutoModelWithLMHead.from_pretrained("huggingartists/armin-van-buuren")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/armin-van-buuren"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/armin-van-buuren
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/armin-van-buuren",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/armin-van-buuren #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Armin van Buuren</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@armin-van-buuren</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Armin van Buuren.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Armin van Buuren's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">As I Lay Dying</div>
<a href="https://genius.com/artists/as-i-lay-dying">
<div style="text-align: center; font-size: 14px;">@as-i-lay-dying</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from As I Lay Dying.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/as-i-lay-dying).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/as-i-lay-dying")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/2zq9ub8b/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on As I Lay Dying's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/cjg5ac7f) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/cjg5ac7f/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/as-i-lay-dying')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/as-i-lay-dying")
model = AutoModelWithLMHead.from_pretrained("huggingartists/as-i-lay-dying")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/as-i-lay-dying"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/as-i-lay-dying
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/as-i-lay-dying",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/as-i-lay-dying #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">As I Lay Dying</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@as-i-lay-dying</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from As I Lay Dying.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on As I Lay Dying's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">BAKLAN</div>
<a href="https://genius.com/artists/baklan">
<div style="text-align: center; font-size: 14px;">@baklan</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from BAKLAN.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/baklan).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/baklan")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/2k5w5yhe/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on BAKLAN's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/28fvfef4) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/28fvfef4/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/baklan')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/baklan")
model = AutoModelWithLMHead.from_pretrained("huggingartists/baklan")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/baklan"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/baklan
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/baklan",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/baklan #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">BAKLAN</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@baklan</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from BAKLAN.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on BAKLAN's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Big Baby Tape</div>
<a href="https://genius.com/artists/big-baby-tape">
<div style="text-align: center; font-size: 14px;">@big-baby-tape</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Big Baby Tape.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/big-baby-tape).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/big-baby-tape")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/1mu9ki6z/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Big Baby Tape's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/30qklxvh) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/30qklxvh/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/big-baby-tape')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/big-baby-tape")
model = AutoModelWithLMHead.from_pretrained("huggingartists/big-baby-tape")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/big-baby-tape"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/big-baby-tape
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/big-baby-tape",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/big-baby-tape #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Big Baby Tape</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@big-baby-tape</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Big Baby Tape.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Big Baby Tape's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Big Russian Boss</div>
<a href="https://genius.com/artists/big-russian-boss">
<div style="text-align: center; font-size: 14px;">@big-russian-boss</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Big Russian Boss.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/big-russian-boss).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/big-russian-boss")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/1ju9bqqi/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Big Russian Boss's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/3820n7qx) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/3820n7qx/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/big-russian-boss')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/big-russian-boss")
model = AutoModelWithLMHead.from_pretrained("huggingartists/big-russian-boss")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/big-russian-boss"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/big-russian-boss
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/big-russian-boss",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/big-russian-boss #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Big Russian Boss</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@big-russian-boss</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Big Russian Boss.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Big Russian Boss's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Bill Wurtz</div>
<a href="https://genius.com/artists/bill-wurtz">
<div style="text-align: center; font-size: 14px;">@bill-wurtz</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Bill Wurtz.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/bill-wurtz).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/bill-wurtz")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/27ysbe74/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Bill Wurtz's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/2f8oa51l) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/2f8oa51l/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/bill-wurtz')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/bill-wurtz")
model = AutoModelWithLMHead.from_pretrained("huggingartists/bill-wurtz")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/bill-wurtz"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/bill-wurtz
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/bill-wurtz",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/bill-wurtz #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Bill Wurtz</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@bill-wurtz</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Bill Wurtz.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Bill Wurtz's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Billie Eilish</div>
<a href="https://genius.com/artists/billie-eilish">
<div style="text-align: center; font-size: 14px;">@billie-eilish</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Billie Eilish.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/billie-eilish).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/billie-eilish")
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/billie-eilish")
model = AutoModelWithLMHead.from_pretrained("huggingartists/billie-eilish")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/3l1r2mnu/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Billie Eilish's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/209kskmi) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/209kskmi/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/billie-eilish')
generator("I am", num_return_sequences=5)
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/billie-eilish"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/billie-eilish
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/billie-eilish",
"autotrain_compatible",
"endpoints_compatible",
"has_space",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/billie-eilish #autotrain_compatible #endpoints_compatible #has_space #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Billie Eilish</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@billie-eilish</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Billie Eilish.
Dataset is available here.
And can be used with:
Or with Transformers library:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Billie Eilish's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Billy Talent</div>
<a href="https://genius.com/artists/billy-talent">
<div style="text-align: center; font-size: 14px;">@billy-talent</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Billy Talent.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/billy-talent).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/billy-talent")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/37amfbe8/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Billy Talent's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/pyw6tj9v) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/pyw6tj9v/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/billy-talent')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/billy-talent")
model = AutoModelWithLMHead.from_pretrained("huggingartists/billy-talent")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/billy-talent"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/billy-talent
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/billy-talent",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/billy-talent #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Billy Talent</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@billy-talent</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Billy Talent.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Billy Talent's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Bladee</div>
<a href="https://genius.com/artists/bladee">
<div style="text-align: center; font-size: 14px;">@bladee</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Bladee.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/bladee).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/bladee")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/326nmhkf/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Bladee's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/28bmutxl) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/28bmutxl/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/bladee')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/bladee")
model = AutoModelWithLMHead.from_pretrained("huggingartists/bladee")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/bladee"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/bladee
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/bladee",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/bladee #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Bladee</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@bladee</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Bladee.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Bladee's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Bob Dylan</div>
<a href="https://genius.com/artists/bob-dylan">
<div style="text-align: center; font-size: 14px;">@bob-dylan</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Bob Dylan.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/bob-dylan).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/bob-dylan")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/3mj0lvel/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Bob Dylan's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/2rt8ywgd) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/2rt8ywgd/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/bob-dylan')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/bob-dylan")
model = AutoModelWithLMHead.from_pretrained("huggingartists/bob-dylan")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/bob-dylan"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/bob-dylan
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/bob-dylan",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/bob-dylan #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Bob Dylan</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@bob-dylan</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Bob Dylan.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Bob Dylan's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">BONES</div>
<a href="https://genius.com/artists/bones">
<div style="text-align: center; font-size: 14px;">@bones</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from BONES.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/bones).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/bones")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/26h7sojw/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on BONES's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/1yr1mvc2) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/1yr1mvc2/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/bones')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/bones")
model = AutoModelWithLMHead.from_pretrained("huggingartists/bones")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/bones"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/bones
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/bones",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/bones #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">BONES</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@bones</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from BONES.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on BONES's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Борис Гребенщиков (Boris Grebenshikov)</div>
<a href="https://genius.com/artists/boris-grebenshikov">
<div style="text-align: center; font-size: 14px;">@boris-grebenshikov</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Борис Гребенщиков (Boris Grebenshikov).
Dataset is available [here](https://huggingface.co/datasets/huggingartists/boris-grebenshikov).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/boris-grebenshikov")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/3nb43gls/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Борис Гребенщиков (Boris Grebenshikov)'s lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/34p8ye7k) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/34p8ye7k/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/boris-grebenshikov')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/boris-grebenshikov")
model = AutoModelWithLMHead.from_pretrained("huggingartists/boris-grebenshikov")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/boris-grebenshikov"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/boris-grebenshikov
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/boris-grebenshikov",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/boris-grebenshikov #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Борис Гребенщиков (Boris Grebenshikov)</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@boris-grebenshikov</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Борис Гребенщиков (Boris Grebenshikov).
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Борис Гребенщиков (Boris Grebenshikov)'s lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
.\n\nDataset is available here.\nAnd can be used with:\n\n\n\nExplore the data, which is tracked with W&B artifacts at every step of the pipeline.",
"## Training procedure\n\nThe model is based on a pre-trained GPT-2 which is fine-tuned on Борис Гребенщиков (Boris Grebenshikov)'s lyrics.\n\nHyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.\n\nAt the end of training, the final model is logged and versioned.",
"## How to use\n\nYou can use this model directly with a pipeline for text generation:\n\n\n\nOr with Transformers library:",
"## Limitations and bias\n\nThe model suffers from the same limitations and bias as GPT-2.\n\nIn addition, the data present in the user's tweets further affects the text generated by the model.",
"## About\n\n*Built by Aleksey Korshuk*\n\n\n\nFor more details, visit the project repository.\n\n.\n\nDataset is available here.\nAnd can be used with:\n\n\n\nExplore the data, which is tracked with W&B artifacts at every step of the pipeline.",
"## Training procedure\n\nThe model is based on a pre-trained GPT-2 which is fine-tuned on Борис Гребенщиков (Boris Grebenshikov)'s lyrics.\n\nHyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.\n\nAt the end of training, the final model is logged and versioned.",
"## How to use\n\nYou can use this model directly with a pipeline for text generation:\n\n\n\nOr with Transformers library:",
"## Limitations and bias\n\nThe model suffers from the same limitations and bias as GPT-2.\n\nIn addition, the data present in the user's tweets further affects the text generated by the model.",
"## About\n\n*Built by Aleksey Korshuk*\n\n\n\nFor more details, visit the project repository.\n\n.\n\nDataset is available here.\nAnd can be used with:\n\n\n\nExplore the data, which is tracked with W&B artifacts at every step of the pipeline.## Training procedure\n\nThe model is based on a pre-trained GPT-2 which is fine-tuned on Борис Гребенщиков (Boris Grebenshikov)'s lyrics.\n\nHyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.\n\nAt the end of training, the final model is logged and versioned.## How to use\n\nYou can use this model directly with a pipeline for text generation:\n\n\n\nOr with Transformers library:## Limitations and bias\n\nThe model suffers from the same limitations and bias as GPT-2.\n\nIn addition, the data present in the user's tweets further affects the text generated by the model.## About\n\n*Built by Aleksey Korshuk*\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Bring Me The Horizon</div>
<a href="https://genius.com/artists/bring-me-the-horizon">
<div style="text-align: center; font-size: 14px;">@bring-me-the-horizon</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Bring Me The Horizon.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/bring-me-the-horizon).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/bring-me-the-horizon")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/1e9181i6/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Bring Me The Horizon's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/3p7pncir) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/3p7pncir/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/bring-me-the-horizon')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/bring-me-the-horizon")
model = AutoModelWithLMHead.from_pretrained("huggingartists/bring-me-the-horizon")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/bring-me-the-horizon"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/bring-me-the-horizon
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/bring-me-the-horizon",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/bring-me-the-horizon #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Bring Me The Horizon</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@bring-me-the-horizon</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Bring Me The Horizon.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Bring Me The Horizon's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Bruce Springsteen</div>
<a href="https://genius.com/artists/bruce-springsteen">
<div style="text-align: center; font-size: 14px;">@bruce-springsteen</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Bruce Springsteen.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/bruce-springsteen).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/bruce-springsteen")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/28yd4w57/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Bruce Springsteen's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/6qq7wbab) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/6qq7wbab/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/bruce-springsteen')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/bruce-springsteen")
model = AutoModelWithLMHead.from_pretrained("huggingartists/bruce-springsteen")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/bruce-springsteen"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/bruce-springsteen
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/bruce-springsteen",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/bruce-springsteen #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Bruce Springsteen</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@bruce-springsteen</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Bruce Springsteen.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Bruce Springsteen's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Bryan Adams</div>
<a href="https://genius.com/artists/bryan-adams">
<div style="text-align: center; font-size: 14px;">@bryan-adams</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Bryan Adams.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/bryan-adams).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/bryan-adams")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/22ksbpsz/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Bryan Adams's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/3b0c22fu) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/3b0c22fu/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/bryan-adams')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/bryan-adams")
model = AutoModelWithLMHead.from_pretrained("huggingartists/bryan-adams")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/bryan-adams"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/bryan-adams
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/bryan-adams",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/bryan-adams #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Bryan Adams</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@bryan-adams</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Bryan Adams.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Bryan Adams's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Burzum</div>
<a href="https://genius.com/artists/burzum">
<div style="text-align: center; font-size: 14px;">@burzum</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Burzum.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/burzum).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/burzum")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/j34qgww2/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Burzum's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/3579mrib) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/3579mrib/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/burzum')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/burzum")
model = AutoModelWithLMHead.from_pretrained("huggingartists/burzum")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/burzum"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/burzum
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/burzum",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/burzum #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Burzum</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@burzum</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Burzum.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Burzum's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">BUSHIDO ZHO</div>
<a href="https://genius.com/artists/bushido-zho">
<div style="text-align: center; font-size: 14px;">@bushido-zho</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from BUSHIDO ZHO.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/bushido-zho).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/bushido-zho")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/vtfjc0qi/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on BUSHIDO ZHO's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/iwclgqsj) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/iwclgqsj/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/bushido-zho')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/bushido-zho")
model = AutoModelWithLMHead.from_pretrained("huggingartists/bushido-zho")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/bushido-zho"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/bushido-zho
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/bushido-zho",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/bushido-zho #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">BUSHIDO ZHO</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@bushido-zho</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from BUSHIDO ZHO.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on BUSHIDO ZHO's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Cardi B</div>
<a href="https://genius.com/artists/cardi-b">
<div style="text-align: center; font-size: 14px;">@cardi-b</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Cardi B.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/cardi-b).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/cardi-b")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/2794795e/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Cardi B's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/1buiv5nf) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/1buiv5nf/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/cardi-b')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/cardi-b")
model = AutoModelWithLMHead.from_pretrained("huggingartists/cardi-b")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/cardi-b"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/cardi-b
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/cardi-b",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/cardi-b #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Cardi B</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@cardi-b</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Cardi B.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Cardi B's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Chester Bennington</div>
<a href="https://genius.com/artists/chester-bennington">
<div style="text-align: center; font-size: 14px;">@chester-bennington</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Chester Bennington.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/chester-bennington).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/chester-bennington")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/3pq3bd6d/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Chester Bennington's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/1sxpshrc) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/1sxpshrc/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/chester-bennington')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/chester-bennington")
model = AutoModelWithLMHead.from_pretrained("huggingartists/chester-bennington")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/chester-bennington"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/chester-bennington
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/chester-bennington",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/chester-bennington #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Chester Bennington</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@chester-bennington</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Chester Bennington.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Chester Bennington's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Cocomelon</div>
<a href="https://genius.com/artists/cocomelon">
<div style="text-align: center; font-size: 14px;">@cocomelon</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Cocomelon.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/cocomelon).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/cocomelon")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/1avk18yc/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Cocomelon's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/3s0b2uix) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/3s0b2uix/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/cocomelon')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/cocomelon")
model = AutoModelWithLMHead.from_pretrained("huggingartists/cocomelon")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/cocomelon"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/cocomelon
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/cocomelon",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/cocomelon #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Cocomelon</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@cocomelon</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Cocomelon.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Cocomelon's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Coldplay</div>
<a href="https://genius.com/artists/coldplay">
<div style="text-align: center; font-size: 14px;">@coldplay</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Coldplay.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/coldplay).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/coldplay")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/34tqcy7u/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Coldplay's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/23h7o09h) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/23h7o09h/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/coldplay')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/coldplay")
model = AutoModelWithLMHead.from_pretrained("huggingartists/coldplay")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/coldplay"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/coldplay
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/coldplay",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/coldplay #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Coldplay</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@coldplay</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Coldplay.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Coldplay's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">DaBaby</div>
<a href="https://genius.com/artists/dababy">
<div style="text-align: center; font-size: 14px;">@dababy</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from DaBaby.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/dababy).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/dababy")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/qnkumvdw/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on DaBaby's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/24o367up) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/24o367up/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/dababy')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/dababy")
model = AutoModelWithLMHead.from_pretrained("huggingartists/dababy")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/dababy"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/dababy
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/dababy",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/dababy #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">DaBaby</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@dababy</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from DaBaby.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on DaBaby's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">DDT</div>
<a href="https://genius.com/artists/ddt">
<div style="text-align: center; font-size: 14px;">@ddt</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from DDT.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/ddt).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/ddt")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/2t9xnx5c/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on DDT's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/33zphjtk) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/33zphjtk/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/ddt')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/ddt")
model = AutoModelWithLMHead.from_pretrained("huggingartists/ddt")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/ddt"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/ddt
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/ddt",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/ddt #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">DDT</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@ddt</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from DDT.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on DDT's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Death Grips</div>
<a href="https://genius.com/artists/death-grips">
<div style="text-align: center; font-size: 14px;">@death-grips</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Death Grips.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/death-grips).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/death-grips")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/2hmeenl7/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Death Grips's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/226ak5bw) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/226ak5bw/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/death-grips')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/death-grips")
model = AutoModelWithLMHead.from_pretrained("huggingartists/death-grips")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/death-grips"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/death-grips
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/death-grips",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/death-grips #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Death Grips</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@death-grips</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Death Grips.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Death Grips's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Deep Purple</div>
<a href="https://genius.com/artists/deep-purple">
<div style="text-align: center; font-size: 14px;">@deep-purple</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Deep Purple.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/deep-purple).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/deep-purple")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/2sybcajo/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Deep Purple's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/3evu15qv) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/3evu15qv/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/deep-purple')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/deep-purple")
model = AutoModelWithLMHead.from_pretrained("huggingartists/deep-purple")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/deep-purple"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/deep-purple
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/deep-purple",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/deep-purple #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Deep Purple</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@deep-purple</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Deep Purple.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Deep Purple's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">DenDerty</div>
<a href="https://genius.com/artists/denderty">
<div style="text-align: center; font-size: 14px;">@denderty</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from DenDerty.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/denderty).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/denderty")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/gu1nyrga/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on DenDerty's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/2hx5b1gk) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/2hx5b1gk/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/denderty')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/denderty")
model = AutoModelWithLMHead.from_pretrained("huggingartists/denderty")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/denderty"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/denderty
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/denderty",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/denderty #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">DenDerty</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@denderty</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from DenDerty.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on DenDerty's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">DJ Artem Artemov</div>
<a href="https://genius.com/artists/dj-artem-artemov">
<div style="text-align: center; font-size: 14px;">@dj-artem-artemov</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from DJ Artem Artemov.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/dj-artem-artemov).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/dj-artem-artemov")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/2yaf9hon/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on DJ Artem Artemov's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/crwya5am) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/crwya5am/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/dj-artem-artemov')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/dj-artem-artemov")
model = AutoModelWithLMHead.from_pretrained("huggingartists/dj-artem-artemov")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/dj-artem-artemov"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/dj-artem-artemov
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/dj-artem-artemov",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/dj-artem-artemov #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">DJ Artem Artemov</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@dj-artem-artemov</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from DJ Artem Artemov.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on DJ Artem Artemov's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Doja Cat</div>
<a href="https://genius.com/artists/doja-cat">
<div style="text-align: center; font-size: 14px;">@doja-cat</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Doja Cat.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/doja-cat).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/doja-cat")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/1qxclk1g/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Doja Cat's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/2lqvdntl) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/2lqvdntl/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/doja-cat')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/doja-cat")
model = AutoModelWithLMHead.from_pretrained("huggingartists/doja-cat")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/doja-cat"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/doja-cat
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/doja-cat",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/doja-cat #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Doja Cat</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@doja-cat</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Doja Cat.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Doja Cat's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Drake</div>
<a href="https://genius.com/artists/drake">
<div style="text-align: center; font-size: 14px;">@drake</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Drake.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/drake).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/drake")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/l3lz2q80/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Drake's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/033yz8al) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/033yz8al/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/drake')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/drake")
model = AutoModelWithLMHead.from_pretrained("huggingartists/drake")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/drake"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/drake
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/drake",
"autotrain_compatible",
"endpoints_compatible",
"has_space",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/drake #autotrain_compatible #endpoints_compatible #has_space #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Drake</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@drake</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Drake.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Drake's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Dua Lipa</div>
<a href="https://genius.com/artists/dua-lipa">
<div style="text-align: center; font-size: 14px;">@dua-lipa</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Dua Lipa.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/dua-lipa).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/dua-lipa")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/2wxz1liw/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Dua Lipa's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/3uj930yj) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/3uj930yj/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/dua-lipa')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/dua-lipa")
model = AutoModelWithLMHead.from_pretrained("huggingartists/dua-lipa")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/dua-lipa"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/dua-lipa
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/dua-lipa",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/dua-lipa #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Dua Lipa</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@dua-lipa</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Dua Lipa.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Dua Lipa's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Duran Duran</div>
<a href="https://genius.com/artists/duran-duran">
<div style="text-align: center; font-size: 14px;">@duran-duran</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Duran Duran.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/duran-duran).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/duran-duran")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/dy133fuf/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Duran Duran's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/386u7cc3) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/386u7cc3/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/duran-duran')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/duran-duran")
model = AutoModelWithLMHead.from_pretrained("huggingartists/duran-duran")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/duran-duran"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/duran-duran
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/duran-duran",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/duran-duran #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Duran Duran</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@duran-duran</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Duran Duran.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Duran Duran's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Джизус (Dzhizus)</div>
<a href="https://genius.com/artists/dzhizus">
<div style="text-align: center; font-size: 14px;">@dzhizus</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Джизус (Dzhizus).
Dataset is available [here](https://huggingface.co/datasets/huggingartists/dzhizus).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/dzhizus")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/35paacn1/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Джизус (Dzhizus)'s lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/1ug3yebo) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/1ug3yebo/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/dzhizus')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/dzhizus")
model = AutoModelWithLMHead.from_pretrained("huggingartists/dzhizus")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/dzhizus"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/dzhizus
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/dzhizus",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/dzhizus #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Джизус (Dzhizus)</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@dzhizus</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Джизус (Dzhizus).
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Джизус (Dzhizus)'s lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
.\n\nDataset is available here.\nAnd can be used with:\n\n\n\nExplore the data, which is tracked with W&B artifacts at every step of the pipeline.",
"## Training procedure\n\nThe model is based on a pre-trained GPT-2 which is fine-tuned on Джизус (Dzhizus)'s lyrics.\n\nHyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.\n\nAt the end of training, the final model is logged and versioned.",
"## How to use\n\nYou can use this model directly with a pipeline for text generation:\n\n\n\nOr with Transformers library:",
"## Limitations and bias\n\nThe model suffers from the same limitations and bias as GPT-2.\n\nIn addition, the data present in the user's tweets further affects the text generated by the model.",
"## About\n\n*Built by Aleksey Korshuk*\n\n\n\nFor more details, visit the project repository.\n\n.\n\nDataset is available here.\nAnd can be used with:\n\n\n\nExplore the data, which is tracked with W&B artifacts at every step of the pipeline.",
"## Training procedure\n\nThe model is based on a pre-trained GPT-2 which is fine-tuned on Джизус (Dzhizus)'s lyrics.\n\nHyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.\n\nAt the end of training, the final model is logged and versioned.",
"## How to use\n\nYou can use this model directly with a pipeline for text generation:\n\n\n\nOr with Transformers library:",
"## Limitations and bias\n\nThe model suffers from the same limitations and bias as GPT-2.\n\nIn addition, the data present in the user's tweets further affects the text generated by the model.",
"## About\n\n*Built by Aleksey Korshuk*\n\n\n\nFor more details, visit the project repository.\n\n.\n\nDataset is available here.\nAnd can be used with:\n\n\n\nExplore the data, which is tracked with W&B artifacts at every step of the pipeline.## Training procedure\n\nThe model is based on a pre-trained GPT-2 which is fine-tuned on Джизус (Dzhizus)'s lyrics.\n\nHyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.\n\nAt the end of training, the final model is logged and versioned.## How to use\n\nYou can use this model directly with a pipeline for text generation:\n\n\n\nOr with Transformers library:## Limitations and bias\n\nThe model suffers from the same limitations and bias as GPT-2.\n\nIn addition, the data present in the user's tweets further affects the text generated by the model.## About\n\n*Built by Aleksey Korshuk*\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Ed Sheeran</div>
<a href="https://genius.com/artists/ed-sheeran">
<div style="text-align: center; font-size: 14px;">@ed-sheeran</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Ed Sheeran.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/ed-sheeran).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/ed-sheeran")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/3nju68bo/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Ed Sheeran's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/3hu7zc76) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/3hu7zc76/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/ed-sheeran')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/ed-sheeran")
model = AutoModelWithLMHead.from_pretrained("huggingartists/ed-sheeran")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/ed-sheeran"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/ed-sheeran
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/ed-sheeran",
"autotrain_compatible",
"endpoints_compatible",
"has_space",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/ed-sheeran #autotrain_compatible #endpoints_compatible #has_space #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Ed Sheeran</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@ed-sheeran</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Ed Sheeran.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Ed Sheeran's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">ЕГОР КРИД (EGOR KREED)</div>
<a href="https://genius.com/artists/egor-kreed">
<div style="text-align: center; font-size: 14px;">@egor-kreed</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from ЕГОР КРИД (EGOR KREED).
Dataset is available [here](https://huggingface.co/datasets/huggingartists/egor-kreed).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/egor-kreed")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/3l7nf6hj/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on ЕГОР КРИД (EGOR KREED)'s lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/1mtfkshl) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/1mtfkshl/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/egor-kreed')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/egor-kreed")
model = AutoModelWithLMHead.from_pretrained("huggingartists/egor-kreed")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/egor-kreed"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/egor-kreed
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/egor-kreed",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/egor-kreed #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">ЕГОР КРИД (EGOR KREED)</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@egor-kreed</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from ЕГОР КРИД (EGOR KREED).
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on ЕГОР КРИД (EGOR KREED)'s lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
.\n\nDataset is available here.\nAnd can be used with:\n\n\n\nExplore the data, which is tracked with W&B artifacts at every step of the pipeline.",
"## Training procedure\n\nThe model is based on a pre-trained GPT-2 which is fine-tuned on ЕГОР КРИД (EGOR KREED)'s lyrics.\n\nHyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.\n\nAt the end of training, the final model is logged and versioned.",
"## How to use\n\nYou can use this model directly with a pipeline for text generation:\n\n\n\nOr with Transformers library:",
"## Limitations and bias\n\nThe model suffers from the same limitations and bias as GPT-2.\n\nIn addition, the data present in the user's tweets further affects the text generated by the model.",
"## About\n\n*Built by Aleksey Korshuk*\n\n\n\nFor more details, visit the project repository.\n\n.\n\nDataset is available here.\nAnd can be used with:\n\n\n\nExplore the data, which is tracked with W&B artifacts at every step of the pipeline.",
"## Training procedure\n\nThe model is based on a pre-trained GPT-2 which is fine-tuned on ЕГОР КРИД (EGOR KREED)'s lyrics.\n\nHyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.\n\nAt the end of training, the final model is logged and versioned.",
"## How to use\n\nYou can use this model directly with a pipeline for text generation:\n\n\n\nOr with Transformers library:",
"## Limitations and bias\n\nThe model suffers from the same limitations and bias as GPT-2.\n\nIn addition, the data present in the user's tweets further affects the text generated by the model.",
"## About\n\n*Built by Aleksey Korshuk*\n\n\n\nFor more details, visit the project repository.\n\n.\n\nDataset is available here.\nAnd can be used with:\n\n\n\nExplore the data, which is tracked with W&B artifacts at every step of the pipeline.## Training procedure\n\nThe model is based on a pre-trained GPT-2 which is fine-tuned on ЕГОР КРИД (EGOR KREED)'s lyrics.\n\nHyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.\n\nAt the end of training, the final model is logged and versioned.## How to use\n\nYou can use this model directly with a pipeline for text generation:\n\n\n\nOr with Transformers library:## Limitations and bias\n\nThe model suffers from the same limitations and bias as GPT-2.\n\nIn addition, the data present in the user's tweets further affects the text generated by the model.## About\n\n*Built by Aleksey Korshuk*\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Егор Летов (Egor Letov)</div>
<a href="https://genius.com/artists/egor-letov">
<div style="text-align: center; font-size: 14px;">@egor-letov</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Егор Летов (Egor Letov).
Dataset is available [here](https://huggingface.co/datasets/huggingartists/egor-letov).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/egor-letov")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/1omrcegx/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Егор Летов (Egor Letov)'s lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/3lk60u9h) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/3lk60u9h/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/egor-letov')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/egor-letov")
model = AutoModelWithLMHead.from_pretrained("huggingartists/egor-letov")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/egor-letov"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/egor-letov
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/egor-letov",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/egor-letov #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Егор Летов (Egor Letov)</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@egor-letov</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Егор Летов (Egor Letov).
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Егор Летов (Egor Letov)'s lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
.\n\nDataset is available here.\nAnd can be used with:\n\n\n\nExplore the data, which is tracked with W&B artifacts at every step of the pipeline.",
"## Training procedure\n\nThe model is based on a pre-trained GPT-2 which is fine-tuned on Егор Летов (Egor Letov)'s lyrics.\n\nHyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.\n\nAt the end of training, the final model is logged and versioned.",
"## How to use\n\nYou can use this model directly with a pipeline for text generation:\n\n\n\nOr with Transformers library:",
"## Limitations and bias\n\nThe model suffers from the same limitations and bias as GPT-2.\n\nIn addition, the data present in the user's tweets further affects the text generated by the model.",
"## About\n\n*Built by Aleksey Korshuk*\n\n\n\nFor more details, visit the project repository.\n\n.\n\nDataset is available here.\nAnd can be used with:\n\n\n\nExplore the data, which is tracked with W&B artifacts at every step of the pipeline.",
"## Training procedure\n\nThe model is based on a pre-trained GPT-2 which is fine-tuned on Егор Летов (Egor Letov)'s lyrics.\n\nHyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.\n\nAt the end of training, the final model is logged and versioned.",
"## How to use\n\nYou can use this model directly with a pipeline for text generation:\n\n\n\nOr with Transformers library:",
"## Limitations and bias\n\nThe model suffers from the same limitations and bias as GPT-2.\n\nIn addition, the data present in the user's tweets further affects the text generated by the model.",
"## About\n\n*Built by Aleksey Korshuk*\n\n\n\nFor more details, visit the project repository.\n\n.\n\nDataset is available here.\nAnd can be used with:\n\n\n\nExplore the data, which is tracked with W&B artifacts at every step of the pipeline.## Training procedure\n\nThe model is based on a pre-trained GPT-2 which is fine-tuned on Егор Летов (Egor Letov)'s lyrics.\n\nHyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.\n\nAt the end of training, the final model is logged and versioned.## How to use\n\nYou can use this model directly with a pipeline for text generation:\n\n\n\nOr with Transformers library:## Limitations and bias\n\nThe model suffers from the same limitations and bias as GPT-2.\n\nIn addition, the data present in the user's tweets further affects the text generated by the model.## About\n\n*Built by Aleksey Korshuk*\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Elton John</div>
<a href="https://genius.com/artists/elton-john">
<div style="text-align: center; font-size: 14px;">@elton-john</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Elton John.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/elton-john).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/elton-john")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/188xpm2n/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Elton John's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/1rgstntu) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/1rgstntu/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/elton-john')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/elton-john")
model = AutoModelWithLMHead.from_pretrained("huggingartists/elton-john")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/elton-john"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/elton-john
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/elton-john",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/elton-john #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Elton John</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@elton-john</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Elton John.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Elton John's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Eminem</div>
<a href="https://genius.com/artists/eminem">
<div style="text-align: center; font-size: 14px;">@eminem</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Eminem.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/eminem).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/eminem")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/391kfg7f/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Eminem's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/1361uz9o) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/1361uz9o/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/eminem')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/eminem")
model = AutoModelWithLMHead.from_pretrained("huggingartists/eminem")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/eminem"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/eminem
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/eminem",
"autotrain_compatible",
"endpoints_compatible",
"has_space",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/eminem #autotrain_compatible #endpoints_compatible #has_space #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Eminem</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@eminem</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Eminem.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Eminem's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Enigma</div>
<a href="https://genius.com/artists/enigma">
<div style="text-align: center; font-size: 14px;">@enigma</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Enigma.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/enigma).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/enigma")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/8bx90lw6/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Enigma's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/1c1t20ji) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/1c1t20ji/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/enigma')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/enigma")
model = AutoModelWithLMHead.from_pretrained("huggingartists/enigma")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/enigma"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/enigma
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/enigma",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/enigma #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Enigma</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@enigma</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Enigma.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Enigma's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Enya</div>
<a href="https://genius.com/artists/enya">
<div style="text-align: center; font-size: 14px;">@enya</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Enya.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/enya).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/enya")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/16cuy8yb/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Enya's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/il8ldqo8) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/il8ldqo8/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/enya')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/enya")
model = AutoModelWithLMHead.from_pretrained("huggingartists/enya")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/enya"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/enya
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/enya",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/enya #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Enya</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@enya</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Enya.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Enya's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Epic Rap Battles of History</div>
<a href="https://genius.com/artists/epic-rap-battles-of-history">
<div style="text-align: center; font-size: 14px;">@epic-rap-battles-of-history</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Epic Rap Battles of History.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/epic-rap-battles-of-history).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/epic-rap-battles-of-history")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/ujomrrjb/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Epic Rap Battles of History's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/1s03lfls) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/1s03lfls/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/epic-rap-battles-of-history')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/epic-rap-battles-of-history")
model = AutoModelWithLMHead.from_pretrained("huggingartists/epic-rap-battles-of-history")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/epic-rap-battles-of-history"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/epic-rap-battles-of-history
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/epic-rap-battles-of-history",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/epic-rap-battles-of-history #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Epic Rap Battles of History</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@epic-rap-battles-of-history</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Epic Rap Battles of History.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Epic Rap Battles of History's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">FACE</div>
<a href="https://genius.com/artists/face">
<div style="text-align: center; font-size: 14px;">@face</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from FACE.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/face).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/face")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/xtozoqtm/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on FACE's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/knkqp5iy) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/knkqp5iy/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/face')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/face")
model = AutoModelWithLMHead.from_pretrained("huggingartists/face")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/face"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/face
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/face",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/face #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">FACE</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@face</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from FACE.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on FACE's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Fascinoma</div>
<a href="https://genius.com/artists/fascinoma">
<div style="text-align: center; font-size: 14px;">@fascinoma</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Fascinoma.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/fascinoma).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/fascinoma")
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/fascinoma")
model = AutoModelWithLMHead.from_pretrained("huggingartists/fascinoma")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/za989b3u/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Fascinoma's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/kklye04t) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/kklye04t/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/fascinoma')
generator("I am", num_return_sequences=5)
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/fascinoma"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/fascinoma
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/fascinoma",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/fascinoma #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Fascinoma</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@fascinoma</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Fascinoma.
Dataset is available here.
And can be used with:
Or with Transformers library:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Fascinoma's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Fear Factory</div>
<a href="https://genius.com/artists/fear-factory">
<div style="text-align: center; font-size: 14px;">@fear-factory</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Fear Factory.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/fear-factory).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/fear-factory")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/24xjxpf5/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Fear Factory's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/3gju7udi) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/3gju7udi/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/fear-factory')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/fear-factory")
model = AutoModelWithLMHead.from_pretrained("huggingartists/fear-factory")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/fear-factory"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/fear-factory
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/fear-factory",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/fear-factory #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Fear Factory</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@fear-factory</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Fear Factory.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Fear Factory's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Florence + The Machine</div>
<a href="https://genius.com/artists/florence-the-machine">
<div style="text-align: center; font-size: 14px;">@florence-the-machine</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Florence + The Machine.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/florence-the-machine).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/florence-the-machine")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/icjt5evm/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Florence + The Machine's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/1zfb9y24) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/1zfb9y24/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/florence-the-machine')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/florence-the-machine")
model = AutoModelWithLMHead.from_pretrained("huggingartists/florence-the-machine")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/florence-the-machine"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/florence-the-machine
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/florence-the-machine",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/florence-the-machine #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Florence + The Machine</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@florence-the-machine</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Florence + The Machine.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Florence + The Machine's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Ghost</div>
<a href="https://genius.com/artists/ghost">
<div style="text-align: center; font-size: 14px;">@ghost</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Ghost.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/ghost).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/ghost")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/1n8515nl/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Ghost's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/2qimq3aa) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/2qimq3aa/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/ghost')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/ghost")
model = AutoModelWithLMHead.from_pretrained("huggingartists/ghost")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/ghost"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/ghost
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/ghost",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/ghost #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Ghost</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@ghost</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Ghost.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Ghost's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Ghostemane</div>
<a href="https://genius.com/artists/ghostemane">
<div style="text-align: center; font-size: 14px;">@ghostemane</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Ghostemane.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/ghostemane).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/ghostemane")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/1ou29taa/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Ghostemane's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/futdflju) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/futdflju/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/ghostemane')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/ghostemane")
model = AutoModelWithLMHead.from_pretrained("huggingartists/ghostemane")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/ghostemane"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/ghostemane
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/ghostemane",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/ghostemane #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Ghostemane</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@ghostemane</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Ghostemane.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Ghostemane's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">gizmo</div>
<a href="https://genius.com/artists/gizmo">
<div style="text-align: center; font-size: 14px;">@gizmo</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from gizmo.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/gizmo).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/gizmo")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/3lolgugy/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on gizmo's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/31nxia6i) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/31nxia6i/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/gizmo')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/gizmo")
model = AutoModelWithLMHead.from_pretrained("huggingartists/gizmo")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/gizmo"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/gizmo
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/gizmo",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/gizmo #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">gizmo</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@gizmo</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from gizmo.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on gizmo's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Gorillaz</div>
<a href="https://genius.com/artists/gorillaz">
<div style="text-align: center; font-size: 14px;">@gorillaz</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Gorillaz.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/gorillaz).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/gorillaz")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/3tuzza9u/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Gorillaz's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/12uilegj) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/12uilegj/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/gorillaz')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/gorillaz")
model = AutoModelWithLMHead.from_pretrained("huggingartists/gorillaz")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/gorillaz"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/gorillaz
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/gorillaz",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/gorillaz #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Gorillaz</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@gorillaz</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Gorillaz.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Gorillaz's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Green Day</div>
<a href="https://genius.com/artists/green-day">
<div style="text-align: center; font-size: 14px;">@green-day</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Green Day.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/green-day).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/green-day")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/22eap04b/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Green Day's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/183da0m9) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/183da0m9/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/green-day')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/green-day")
model = AutoModelWithLMHead.from_pretrained("huggingartists/green-day")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/green-day"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/green-day
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/green-day",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/green-day #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Green Day</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@green-day</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Green Day.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Green Day's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Григорий Лепс (Grigory Leps)</div>
<a href="https://genius.com/artists/grigory-leps">
<div style="text-align: center; font-size: 14px;">@grigory-leps</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Григорий Лепс (Grigory Leps).
Dataset is available [here](https://huggingface.co/datasets/huggingartists/grigory-leps).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/grigory-leps")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/32wqexib/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Григорий Лепс (Grigory Leps)'s lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/1j0f6nwb) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/1j0f6nwb/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/grigory-leps')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/grigory-leps")
model = AutoModelWithLMHead.from_pretrained("huggingartists/grigory-leps")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/grigory-leps"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/grigory-leps
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/grigory-leps",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/grigory-leps #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Григорий Лепс (Grigory Leps)</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@grigory-leps</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Григорий Лепс (Grigory Leps).
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Григорий Лепс (Grigory Leps)'s lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
.\n\nDataset is available here.\nAnd can be used with:\n\n\n\nExplore the data, which is tracked with W&B artifacts at every step of the pipeline.",
"## Training procedure\n\nThe model is based on a pre-trained GPT-2 which is fine-tuned on Григорий Лепс (Grigory Leps)'s lyrics.\n\nHyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.\n\nAt the end of training, the final model is logged and versioned.",
"## How to use\n\nYou can use this model directly with a pipeline for text generation:\n\n\n\nOr with Transformers library:",
"## Limitations and bias\n\nThe model suffers from the same limitations and bias as GPT-2.\n\nIn addition, the data present in the user's tweets further affects the text generated by the model.",
"## About\n\n*Built by Aleksey Korshuk*\n\n\n\nFor more details, visit the project repository.\n\n.\n\nDataset is available here.\nAnd can be used with:\n\n\n\nExplore the data, which is tracked with W&B artifacts at every step of the pipeline.",
"## Training procedure\n\nThe model is based on a pre-trained GPT-2 which is fine-tuned on Григорий Лепс (Grigory Leps)'s lyrics.\n\nHyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.\n\nAt the end of training, the final model is logged and versioned.",
"## How to use\n\nYou can use this model directly with a pipeline for text generation:\n\n\n\nOr with Transformers library:",
"## Limitations and bias\n\nThe model suffers from the same limitations and bias as GPT-2.\n\nIn addition, the data present in the user's tweets further affects the text generated by the model.",
"## About\n\n*Built by Aleksey Korshuk*\n\n\n\nFor more details, visit the project repository.\n\n.\n\nDataset is available here.\nAnd can be used with:\n\n\n\nExplore the data, which is tracked with W&B artifacts at every step of the pipeline.## Training procedure\n\nThe model is based on a pre-trained GPT-2 which is fine-tuned on Григорий Лепс (Grigory Leps)'s lyrics.\n\nHyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.\n\nAt the end of training, the final model is logged and versioned.## How to use\n\nYou can use this model directly with a pipeline for text generation:\n\n\n\nOr with Transformers library:## Limitations and bias\n\nThe model suffers from the same limitations and bias as GPT-2.\n\nIn addition, the data present in the user's tweets further affects the text generated by the model.## About\n\n*Built by Aleksey Korshuk*\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Grimes</div>
<a href="https://genius.com/artists/grimes">
<div style="text-align: center; font-size: 14px;">@grimes</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Grimes.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/grimes).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/grimes")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/3796ng30/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Grimes's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/ourv0tjj) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/ourv0tjj/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/grimes')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/grimes")
model = AutoModelWithLMHead.from_pretrained("huggingartists/grimes")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/grimes"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/grimes
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/grimes",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/grimes #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Grimes</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@grimes</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Grimes.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Grimes's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">GSPD</div>
<a href="https://genius.com/artists/gspd">
<div style="text-align: center; font-size: 14px;">@gspd</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from GSPD.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/gspd).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/gspd")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/3jof0sex/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on GSPD's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/2nxhrny4) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/2nxhrny4/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/gspd')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/gspd")
model = AutoModelWithLMHead.from_pretrained("huggingartists/gspd")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/gspd"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/gspd
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/gspd",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/gspd #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">GSPD</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@gspd</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from GSPD.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on GSPD's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Gunna</div>
<a href="https://genius.com/artists/gunna">
<div style="text-align: center; font-size: 14px;">@gunna</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Gunna.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/gunna).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/gunna")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/vcyblers/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Gunna's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/3c1xymw6) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/3c1xymw6/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/gunna')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/gunna")
model = AutoModelWithLMHead.from_pretrained("huggingartists/gunna")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/gunna"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/gunna
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/gunna",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/gunna #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Gunna</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@gunna</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Gunna.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Gunna's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">HyunA (현아)</div>
<a href="https://genius.com/artists/hyuna">
<div style="text-align: center; font-size: 14px;">@hyuna</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from HyunA (현아).
Dataset is available [here](https://huggingface.co/datasets/huggingartists/hyuna).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/hyuna")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/3uo94mxd/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on HyunA (현아)'s lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/1o8t0mq0) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/1o8t0mq0/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/hyuna')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/hyuna")
model = AutoModelWithLMHead.from_pretrained("huggingartists/hyuna")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/hyuna"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/hyuna
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/hyuna",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/hyuna #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">HyunA (현아)</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@hyuna</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from HyunA (현아).
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on HyunA (현아)'s lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
.\n\nDataset is available here.\nAnd can be used with:\n\n\n\nExplore the data, which is tracked with W&B artifacts at every step of the pipeline.",
"## Training procedure\n\nThe model is based on a pre-trained GPT-2 which is fine-tuned on HyunA (현아)'s lyrics.\n\nHyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.\n\nAt the end of training, the final model is logged and versioned.",
"## How to use\n\nYou can use this model directly with a pipeline for text generation:\n\n\n\nOr with Transformers library:",
"## Limitations and bias\n\nThe model suffers from the same limitations and bias as GPT-2.\n\nIn addition, the data present in the user's tweets further affects the text generated by the model.",
"## About\n\n*Built by Aleksey Korshuk*\n\n\n\nFor more details, visit the project repository.\n\n.\n\nDataset is available here.\nAnd can be used with:\n\n\n\nExplore the data, which is tracked with W&B artifacts at every step of the pipeline.",
"## Training procedure\n\nThe model is based on a pre-trained GPT-2 which is fine-tuned on HyunA (현아)'s lyrics.\n\nHyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.\n\nAt the end of training, the final model is logged and versioned.",
"## How to use\n\nYou can use this model directly with a pipeline for text generation:\n\n\n\nOr with Transformers library:",
"## Limitations and bias\n\nThe model suffers from the same limitations and bias as GPT-2.\n\nIn addition, the data present in the user's tweets further affects the text generated by the model.",
"## About\n\n*Built by Aleksey Korshuk*\n\n\n\nFor more details, visit the project repository.\n\n.\n\nDataset is available here.\nAnd can be used with:\n\n\n\nExplore the data, which is tracked with W&B artifacts at every step of the pipeline.## Training procedure\n\nThe model is based on a pre-trained GPT-2 which is fine-tuned on HyunA (현아)'s lyrics.\n\nHyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.\n\nAt the end of training, the final model is logged and versioned.## How to use\n\nYou can use this model directly with a pipeline for text generation:\n\n\n\nOr with Transformers library:## Limitations and bias\n\nThe model suffers from the same limitations and bias as GPT-2.\n\nIn addition, the data present in the user's tweets further affects the text generated by the model.## About\n\n*Built by Aleksey Korshuk*\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">I DONT KNOW HOW BUT THEY FOUND ME</div>
<a href="https://genius.com/artists/i-dont-know-how-but-they-found-me">
<div style="text-align: center; font-size: 14px;">@i-dont-know-how-but-they-found-me</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from I DONT KNOW HOW BUT THEY FOUND ME.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/i-dont-know-how-but-they-found-me).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/i-dont-know-how-but-they-found-me")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/1j7uofwh/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on I DONT KNOW HOW BUT THEY FOUND ME's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/1abhthz2) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/1abhthz2/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/i-dont-know-how-but-they-found-me')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/i-dont-know-how-but-they-found-me")
model = AutoModelWithLMHead.from_pretrained("huggingartists/i-dont-know-how-but-they-found-me")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/i-dont-know-how-but-they-found-me"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/i-dont-know-how-but-they-found-me
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/i-dont-know-how-but-they-found-me",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/i-dont-know-how-but-they-found-me #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">I DONT KNOW HOW BUT THEY FOUND ME</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@i-dont-know-how-but-they-found-me</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from I DONT KNOW HOW BUT THEY FOUND ME.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on I DONT KNOW HOW BUT THEY FOUND ME's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Imagine Dragons</div>
<a href="https://genius.com/artists/imagine-dragons">
<div style="text-align: center; font-size: 14px;">@imagine-dragons</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Imagine Dragons.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/imagine-dragons).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/imagine-dragons")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/dln6ixis/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Imagine Dragons's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/3cj3c8z1) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/3cj3c8z1/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/imagine-dragons')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/imagine-dragons")
model = AutoModelWithLMHead.from_pretrained("huggingartists/imagine-dragons")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/imagine-dragons"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/imagine-dragons
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/imagine-dragons",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/imagine-dragons #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Imagine Dragons</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@imagine-dragons</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Imagine Dragons.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Imagine Dragons's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">John K. Samson</div>
<a href="https://genius.com/artists/john-k-samson">
<div style="text-align: center; font-size: 14px;">@john-k-samson</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from John K. Samson.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/john-k-samson).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/john-k-samson")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/2s15m338/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on John K. Samson's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/18ill893) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/18ill893/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/john-k-samson')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/john-k-samson")
model = AutoModelWithLMHead.from_pretrained("huggingartists/john-k-samson")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/john-k-samson"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/john-k-samson
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/john-k-samson",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/john-k-samson #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">John K. Samson</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@john-k-samson</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from John K. Samson.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on John K. Samson's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">John Lennon</div>
<a href="https://genius.com/artists/john-lennon">
<div style="text-align: center; font-size: 14px;">@john-lennon</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from John Lennon.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/john-lennon).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/john-lennon")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/f3d8fseh/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on John Lennon's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/36mtogkg) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/36mtogkg/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/john-lennon')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/john-lennon")
model = AutoModelWithLMHead.from_pretrained("huggingartists/john-lennon")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/john-lennon"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/john-lennon
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/john-lennon",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/john-lennon #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">John Lennon</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@john-lennon</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from John Lennon.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on John Lennon's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n">
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800">🤖 HuggingArtists Model 🤖</div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Joji</div>
<a href="https://genius.com/artists/joji">
<div style="text-align: center; font-size: 14px;">@joji</div>
</a>
</div>
I was made with [huggingartists](https://github.com/AlekseyKorshuk/huggingartists).
Create your own bot based on your favorite artist with [the demo](https://colab.research.google.com/github/AlekseyKorshuk/huggingartists/blob/master/huggingartists-demo.ipynb)!
## How does it work?
To understand how the model was developed, check the [W&B report](https://wandb.ai/huggingartists/huggingartists/reportlist).
## Training data
The model was trained on lyrics from Joji.
Dataset is available [here](https://huggingface.co/datasets/huggingartists/joji).
And can be used with:
```python
from datasets import load_dataset
dataset = load_dataset("huggingartists/joji")
```
[Explore the data](https://wandb.ai/huggingartists/huggingartists/runs/ns61e8zi/artifacts), which is tracked with [W&B artifacts](https://docs.wandb.com/artifacts) at every step of the pipeline.
## Training procedure
The model is based on a pre-trained [GPT-2](https://huggingface.co/gpt2) which is fine-tuned on Joji's lyrics.
Hyperparameters and metrics are recorded in the [W&B training run](https://wandb.ai/huggingartists/huggingartists/runs/jz3ft48t) for full transparency and reproducibility.
At the end of training, [the final model](https://wandb.ai/huggingartists/huggingartists/runs/jz3ft48t/artifacts) is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
```python
from transformers import pipeline
generator = pipeline('text-generation',
model='huggingartists/joji')
generator("I am", num_return_sequences=5)
```
Or with Transformers library:
```python
from transformers import AutoTokenizer, AutoModelWithLMHead
tokenizer = AutoTokenizer.from_pretrained("huggingartists/joji")
model = AutoModelWithLMHead.from_pretrained("huggingartists/joji")
```
## Limitations and bias
The model suffers from [the same limitations and bias as GPT-2](https://huggingface.co/gpt2#limitations-and-bias).
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*
[](https://github.com/AlekseyKorshuk)
[](https://twitter.com/intent/follow?screen_name=alekseykorshuk)
[](https://t.me/joinchat/_CQ04KjcJ-4yZTky)
For more details, visit the project repository.
[](https://github.com/AlekseyKorshuk/huggingartists)
|
{"language": "en", "tags": ["huggingartists", "lyrics", "lm-head", "causal-lm"], "datasets": ["huggingartists/joji"], "widget": [{"text": "I am"}]}
|
text-generation
|
huggingartists/joji
|
[
"transformers",
"pytorch",
"jax",
"gpt2",
"text-generation",
"huggingartists",
"lyrics",
"lm-head",
"causal-lm",
"en",
"dataset:huggingartists/joji",
"autotrain_compatible",
"endpoints_compatible",
"text-generation-inference",
"region:us"
] |
2022-03-02T23:29:05+00:00
|
[] |
[
"en"
] |
TAGS
#transformers #pytorch #jax #gpt2 #text-generation #huggingartists #lyrics #lm-head #causal-lm #en #dataset-huggingartists/joji #autotrain_compatible #endpoints_compatible #text-generation-inference #region-us
|
<div class="inline-flex flex-col" style="line-height: 1.5;">
<div class="flex">
<div
style="display:DISPLAY_1; margin-left: auto; margin-right: auto; width: 92px; height:92px; border-radius: 50%; background-size: cover; background-image: url('URL
</div>
</div>
<div style="text-align: center; margin-top: 3px; font-size: 16px; font-weight: 800"> HuggingArtists Model </div>
<div style="text-align: center; font-size: 16px; font-weight: 800">Joji</div>
<a href="URL
<div style="text-align: center; font-size: 14px;">@joji</div>
</a>
</div>
I was made with huggingartists.
Create your own bot based on your favorite artist with the demo!
## How does it work?
To understand how the model was developed, check the W&B report.
## Training data
The model was trained on lyrics from Joji.
Dataset is available here.
And can be used with:
Explore the data, which is tracked with W&B artifacts at every step of the pipeline.
## Training procedure
The model is based on a pre-trained GPT-2 which is fine-tuned on Joji's lyrics.
Hyperparameters and metrics are recorded in the W&B training run for full transparency and reproducibility.
At the end of training, the final model is logged and versioned.
## How to use
You can use this model directly with a pipeline for text generation:
Or with Transformers library:
## Limitations and bias
The model suffers from the same limitations and bias as GPT-2.
In addition, the data present in the user's tweets further affects the text generated by the model.
## About
*Built by Aleksey Korshuk*

For more details, visit the project repository.
\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n\n\nFor more details, visit the project repository.\n\n![GitHub stars](URL"
] |
[
-0.014420767314732075,
0.14955991506576538,
-0.00279498309828341,
0.03305615857243538,
0.08972304314374924,
-0.006016930565237999,
0.08078042417764664,
0.11181074380874634,
-0.015103929676115513,
0.07291840761899948,
0.0807463526725769,
0.015418472699820995,
0.07071179896593094,
0.1436508744955063,
0.08550886064767838,
-0.2758246064186096,
0.03500349447131157,
-0.09936735779047012,
0.037698276340961456,
0.12016523629426956,
0.0946367010474205,
-0.0527837872505188,
0.08811429888010025,
0.03677014634013176,
-0.07868153601884842,
0.033927615731954575,
-0.020310455933213234,
-0.07076551020145416,
0.08776519447565079,
0.07872900366783142,
0.025065278634428978,
0.03576643764972687,
0.06775622814893723,
-0.19675536453723907,
0.035338420420885086,
0.12369301170110703,
0.02875053882598877,
0.072483591735363,
0.04566088318824768,
-0.07366550713777542,
0.1733744889497757,
-0.028513113036751747,
0.08460894972085953,
0.049139540642499924,
-0.1101161539554596,
-0.17940394580364227,
-0.12216851115226746,
0.07575420290231705,
0.10471469908952713,
0.07307559251785278,
-0.035463299602270126,
0.03999850153923035,
0.0022839659359306097,
0.046010326594114304,
0.25532275438308716,
-0.2397315502166748,
-0.01755913347005844,
0.08981824666261673,
0.04949471354484558,
0.04450960457324982,
-0.08122037351131439,
0.01705995947122574,
0.05879422649741173,
0.022629426792263985,
0.04698804393410683,
-0.018660856410861015,
0.21257369220256805,
0.025250384584069252,
-0.09286338835954666,
-0.07793205231428146,
0.11931297183036804,
-0.029263922944664955,
-0.08024520426988602,
-0.1560557335615158,
0.0027605306822806597,
-0.03249596431851387,
0.04064948856830597,
-0.017510725185275078,
-0.003789856331422925,
-0.0007525914697907865,
-0.02887619473040104,
-0.09354299306869507,
-0.0907747745513916,
-0.03166558966040611,
-0.024881834164261818,
0.05559997260570526,
0.027338147163391113,
0.026336736977100372,
-0.07859030365943909,
0.23302961885929108,
0.0036609761882573366,
-0.10784938186407089,
-0.0575522780418396,
-0.09496166557073593,
-0.09430059790611267,
-0.05925062298774719,
0.012331396341323853,
0.01126464456319809,
-0.06027388200163841,
0.16246634721755981,
-0.024310506880283356,
0.028192395344376564,
-0.00582378963008523,
-0.023045336827635765,
0.1594722419977188,
0.1299722045660019,
-0.09764757007360458,
-0.03711893782019615,
0.044585421681404114,
-0.011295412667095661,
-0.07224706560373306,
-0.05788310244679451,
-0.01078089326620102,
-0.02918067015707493,
0.027901696041226387,
0.09714015573263168,
0.041659560054540634,
0.05669568106532097,
0.033081650733947754,
-0.05120052769780159,
0.1088399663567543,
-0.14808861911296844,
0.00865138042718172,
-0.009979900903999805,
-0.03352293744683266,
0.02622104249894619,
0.051174942404031754,
0.014354433864355087,
-0.10082846879959106,
0.09980260580778122,
-0.0494341142475605,
-0.05104702711105347,
-0.06593186408281326,
-0.08651399612426758,
-0.00456831743940711,
-0.010416056029498577,
-0.04712514206767082,
-0.08927136659622192,
-0.1580812782049179,
-0.0356098972260952,
0.019738102331757545,
-0.04503887891769409,
-0.03991391882300377,
0.03881971910595894,
-0.03400234505534172,
-0.0025182468816637993,
-0.019267207011580467,
-0.011752829886972904,
-0.032372232526540756,
0.022801658138632774,
-0.05371028557419777,
0.04128968343138695,
0.09032460302114487,
0.03581605851650238,
-0.10885236412286758,
0.06685786694288254,
-0.14964257180690765,
0.1387464553117752,
-0.00809024553745985,
0.014789148233830929,
-0.11477183550596237,
-0.09330591559410095,
-0.02145528979599476,
-0.027162956073880196,
-0.009884784929454327,
0.09738191217184067,
-0.18828175961971283,
-0.03858804702758789,
0.20161302387714386,
-0.0776909738779068,
-0.07979831099510193,
0.07266182452440262,
-0.07515714317560196,
0.04914286360144615,
0.14085634052753448,
0.05820842087268829,
0.15220852196216583,
-0.11651774495840073,
-0.06261282414197922,
-0.04463912546634674,
-0.057525407522916794,
0.22862136363983154,
0.05406929925084114,
-0.004634538199752569,
0.024456143379211426,
0.016403304412961006,
-0.019849995151162148,
-0.031216925010085106,
-0.022875109687447548,
-0.04218139126896858,
-0.009148984216153622,
0.014941990375518799,
-0.007423962000757456,
-0.046292126178741455,
-0.06615389138460159,
-0.014052028767764568,
-0.10646823048591614,
0.04516894742846489,
0.10476475954055786,
-0.07360127568244934,
0.012470673769712448,
-0.0885954424738884,
-0.014958438463509083,
-0.04660632088780403,
0.0237702876329422,
-0.18697799742221832,
-0.046727657318115234,
0.024496478959918022,
-0.07417237758636475,
0.07667792588472366,
0.03644740581512451,
0.038900986313819885,
0.06419071555137634,
-0.010404281318187714,
0.023999882861971855,
-0.0469573438167572,
-0.019252173602581024,
-0.03270832076668739,
-0.14063997566699982,
-0.07285372167825699,
-0.056132227182388306,
0.08012717217206955,
-0.14370204508304596,
0.006424622610211372,
0.09800741076469421,
0.116990827023983,
0.02912968210875988,
-0.055103600025177,
0.016901852563023567,
-0.03487393260002136,
-0.03997159004211426,
-0.11312785744667053,
-0.06267306208610535,
0.001988342497497797,
-0.03546644374728203,
0.1665940135717392,
-0.1722286343574524,
-0.0642622783780098,
0.0925813838839531,
0.1666399985551834,
-0.10373464971780777,
0.013818259350955486,
-0.09500166028738022,
-0.01583460532128811,
-0.05055752396583557,
-0.03933661803603172,
0.2557544708251953,
0.03625473007559776,
0.08040978014469147,
-0.11455333977937698,
-0.10170894861221313,
0.00026152608916163445,
-0.04238216578960419,
-0.02948175184428692,
0.03521619364619255,
0.018529677763581276,
-0.17891646921634674,
0.026269955560564995,
-0.00971301831305027,
0.11650881171226501,
0.22584553062915802,
0.0515323281288147,
-0.10208723694086075,
-0.06259621679782867,
-0.08655243366956711,
0.004026412032544613,
0.044345322996377945,
0.03410796448588371,
0.025450611487030983,
0.045731157064437866,
0.0530114509165287,
0.037925902754068375,
-0.10457310825586319,
0.00966944731771946,
0.07665619999170303,
-0.048378270119428635,
-0.046310245990753174,
0.026923565194010735,
0.017112748697400093,
0.08166218549013138,
0.07707011699676514,
0.14966849982738495,
-0.074494369328022,
-0.04703301191329956,
-0.1391022950410843,
0.13422678411006927,
-0.07752520591020584,
-0.26132145524024963,
-0.13504596054553986,
-0.07359681278467178,
0.02297654189169407,
0.005021914839744568,
0.04433825984597206,
-0.06595345586538315,
-0.04125519096851349,
-0.10668391734361649,
0.08590617030858994,
-0.04682764410972595,
-0.016978872939944267,
0.00015331928443629295,
0.022186174988746643,
-0.03328925371170044,
-0.10859352350234985,
-0.026503542438149452,
0.04234312102198601,
-0.11127155274152756,
-0.02099338360130787,
0.018892450258135796,
0.04126814380288124,
0.14538462460041046,
-0.0035917973145842552,
-0.003064891090616584,
-0.033123910427093506,
0.274029016494751,
-0.12029818445444107,
0.06953871250152588,
0.17055468261241913,
-0.020942645147442818,
0.04798973724246025,
0.07990771532058716,
-0.0003717032668646425,
-0.06920214742422104,
0.07817674428224564,
0.07339277863502502,
-0.08899581432342529,
-0.22043342888355255,
-0.01736350730061531,
-0.007359666284173727,
0.03200751915574074,
0.1298372894525528,
0.061051737517118454,
0.04940079525113106,
-0.008692645467817783,
-0.1072193905711174,
0.04889697954058647,
0.03731340169906616,
0.10862534493207932,
-0.082133948802948,
-0.01187954843044281,
0.04436817392706871,
-0.06741941720247269,
0.0290722344070673,
0.13862530887126923,
0.04740184172987938,
0.19251108169555664,
-0.060568343847990036,
0.10843075066804886,
0.08135636895895004,
0.10420625656843185,
0.037744540721178055,
0.007008019834756851,
-0.002003288362175226,
0.011305879801511765,
-0.0028757744003087282,
-0.09865087270736694,
-0.008579595945775509,
0.04767923057079315,
0.028808334842324257,
-0.016591159626841545,
-0.039286140352487564,
-0.05524665117263794,
0.04583713784813881,
0.22552470862865448,
-0.018713457509875298,
-0.18062905967235565,
-0.10926702618598938,
0.04500142112374306,
-0.08717260509729385,
-0.05610215291380882,
-0.024510329589247704,
0.08296937495470047,
-0.21953774988651276,
0.07092064619064331,
-0.0316084660589695,
0.11019744724035263,
-0.11805522441864014,
0.0022694054059684277,
0.08207186311483383,
0.044065237045288086,
-0.062054913491010666,
0.09770432859659195,
-0.17003963887691498,
0.05514572933316231,
-0.010102338157594204,
0.07225894927978516,
-0.07236208766698837,
0.02939077652990818,
-0.002659639110788703,
0.05835171043872833,
0.0875193253159523,
0.009051092900335789,
0.034055374562740326,
0.0028863337356597185,
-0.043255507946014404,
0.014719538390636444,
0.050868913531303406,
-0.1326693743467331,
0.12291491031646729,
-0.02669435180723667,
0.034374069422483444,
-0.0523936003446579,
-0.0879112109541893,
-0.09593594819307327,
-0.16920065879821777,
0.09372886270284653,
-0.13085971772670746,
0.01582942344248295,
-0.06995502859354019,
-0.03085598163306713,
0.04153957590460777,
0.26309967041015625,
-0.060091957449913025,
-0.06905967742204666,
-0.1400875747203827,
0.021778671070933342,
0.14460980892181396,
-0.08387241512537003,
0.0019417103612795472,
-0.009098007343709469,
0.21877418458461761,
-0.005234365817159414,
-0.12223585695028305,
-0.02556784637272358,
-0.057748809456825256,
-0.17395275831222534,
-0.004989281762391329,
0.16241247951984406,
0.06650210171937943,
0.0214697178453207,
0.015703732147812843,
-0.01922924444079399,
-0.045527439564466476,
-0.16595904529094696,
0.020908398553729057,
0.17505063116550446,
-0.008916999213397503,
-0.012132962234318256,
0.0486045777797699,
0.009951826184988022,
-0.13267318904399872,
0.009118810296058655,
0.042497653514146805,
0.1770728975534439,
-0.07354040443897247,
0.19140742719173431,
0.04430937394499779,
-0.09829049557447433,
-0.1447727084159851,
0.002618390368297696,
0.04000231251120567,
0.039967261254787445,
0.04734368249773979,
-0.2011362910270691,
0.037154678255319595,
0.04266238585114479,
0.006264259573072195,
0.03264530375599861,
-0.3372833728790283,
-0.15743488073349,
-0.012578030116856098,
0.004927125759422779,
-0.1449679285287857,
-0.04520346596837044,
-0.036191824823617935,
-0.09981926530599594,
-0.26342782378196716,
0.0940096378326416,
-0.12131955474615097,
0.07618512958288193,
0.02839350700378418,
0.09298452734947205,
0.0437658317387104,
-0.042915161699056625,
0.13964253664016724,
-0.018335435539484024,
0.06901860982179642,
-0.09572508186101913,
-0.06299882382154465,
0.06520060449838638,
-0.074666827917099,
0.09456942230463028,
0.02864960767328739,
0.07533036917448044,
-0.09949766844511032,
-0.09236546605825424,
-0.06610842794179916,
0.0020796465687453747,
-0.05200773850083351,
-0.08854184299707413,
-0.08612992614507675,
0.08525332063436508,
0.12201675772666931,
-0.04454541578888893,
-0.08137961477041245,
-0.07902989536523819,
0.004731908906251192,
0.04954879358410835,
0.12084919214248657,
0.08214697241783142,
-0.047533657401800156,
-0.0014457963407039642,
0.01913708634674549,
0.015010532923042774,
-0.18400882184505463,
0.05004929006099701,
0.0883583128452301,
0.03543377295136452,
0.10956615954637527,
-0.0003310272004455328,
-0.17110300064086914,
0.010724980384111404,
0.05002136901021004,
-0.16531501710414886,
-0.11522772908210754,
-0.03798237815499306,
0.020940350368618965,
-0.09691402316093445,
-0.04557633772492409,
0.13740624487400055,
-0.03746919706463814,
-0.04177705943584442,
0.004662640858441591,
0.04383154585957527,
-0.04232127591967583,
0.08354833722114563,
-0.012701542116701603,
0.04211323335766792,
-0.06990614533424377,
0.11802363395690918,
0.06922324746847153,
0.010774608701467514,
0.038977380841970444,
0.07163936644792557,
-0.09027308970689774,
0.0164062287658453,
-0.10086842626333237,
0.015464934520423412,
-0.009498325176537037,
-0.013882234692573547,
0.02549414336681366,
-0.03119651786983013,
0.046932756900787354,
0.09642583876848221,
-0.016422497108578682,
0.10553298145532608,
-0.04243043437600136,
0.022385209798812866,
-0.13455301523208618,
0.0701649859547615,
0.0383342020213604,
0.02079596370458603,
-0.10260520130395889,
0.20320595800876617,
0.0345783568918705,
0.1070149764418602,
-0.036443885415792465,
-0.06400761753320694,
-0.05071417614817619,
-0.012980015017092228,
-0.07829567044973373,
-0.03564685210585594,
-0.09180653840303421,
-0.023496756330132484,
-0.006689597386866808,
-0.032993242144584656,
-0.03530311584472656,
0.04473519325256348,
-0.03144185245037079,
-0.060027703642845154,
-0.07792439311742783,
0.055384863168001175,
-0.13984453678131104,
0.03650374338030815,
0.11713520437479019,
-0.05600891634821892,
0.12328022718429565,
0.04851750656962395,
-0.03856608644127846,
0.027703488245606422,
-0.13890276849269867,
0.04686417803168297,
-0.0018998117884621024,
0.015641577541828156,
0.019581085070967674,
-0.14958451688289642,
0.003996535670012236,
-0.03511761501431465,
-0.06881595402956009,
0.0017601681174710393,
-0.015914877876639366,
-0.13490311801433563,
-0.012631584890186787,
0.08682078123092651,
-0.01120564341545105,
-0.06779581308364868,
0.072960264980793,
0.056369129568338394,
0.02393684722483158,
0.04989929124712944,
-0.01326617132872343,
0.07025045901536942,
-0.17340554296970367,
-0.06293875724077225,
-0.0015471155056729913,
0.0233584176748991,
0.05017278715968132,
-0.02502060867846012,
0.0346364788711071,
-0.018605733290314674,
0.20791147649288177,
0.018805785104632378,
-0.010005667805671692,
0.03688595071434975,
-0.08486538380384445,
-0.017471464350819588,
0.04454875364899635,
0.077609121799469,
-0.019670404493808746,
-0.024747982621192932,
0.000980598502792418,
-0.024460673332214355,
-0.09707757830619812,
-0.017909936606884003,
0.08669853210449219,
0.017295846715569496,
0.21036092936992645,
-0.05265999212861061,
0.059008289128541946,
-0.01788197085261345,
-0.09542598575353622,
-0.021310707554221153,
-0.04610031843185425,
0.032856088131666183,
-0.05671006441116333,
0.04473917558789253,
0.20464164018630981,
-0.15180277824401855,
0.11084693670272827,
0.045799076557159424,
-0.05596369504928589,
-0.12256783246994019,
-0.2019311636686325,
-0.016952328383922577,
-0.03269416466355324,
0.026741482317447662,
-0.13890816271305084,
0.10449853539466858,
0.014921333640813828,
0.0418180413544178,
-0.0610760860145092,
0.13106761872768402,
-0.06670675426721573,
-0.14244365692138672,
0.047614097595214844,
0.012993283569812775,
0.0304428543895483,
0.036958206444978714,
0.08991795033216476,
0.04147251322865486,
0.004128364380449057,
0.06840098649263382,
0.044038861989974976,
0.033787086606025696,
0.03576454892754555,
-0.03528674691915512,
-0.04460582137107849,
0.03056490235030651,
-0.004761684685945511,
0.019401410594582558,
0.09629692882299423,
0.0701717734336853,
-0.024447374045848846,
-0.012906636111438274,
0.31022393703460693,
-0.012949693016707897,
-0.026972392573952675,
-0.19101591408252716,
0.14322765171527863,
0.02144406922161579,
0.002530408790335059,
0.020280690863728523,
-0.11276662349700928,
0.01196106243878603,
0.11943548172712326,
0.13699640333652496,
-0.010476010851562023,
0.022346220910549164,
-0.024616708979010582,
0.018524272367358208,
0.03907421603798866,
0.10290825366973877,
0.05870482698082924,
0.18345379829406738,
-0.02425834722816944,
0.06313455104827881,
-0.010216457769274712,
-0.024647898972034454,
0.012164294719696045,
0.10549187660217285,
-0.04038774594664574,
0.007348013576120138,
-0.05019121244549751,
0.09884250909090042,
-0.05537095293402672,
-0.3066984713077545,
-0.03489973023533821,
-0.009525884874165058,
-0.09023068100214005,
0.07289635390043259,
-0.023314831778407097,
-0.020473787561058998,
0.07769792526960373,
0.030152032151818275,
-0.04487147927284241,
0.17007958889007568,
0.0516543872654438,
-0.03400209918618202,
-0.0054749795235693455,
0.10776317119598389,
-0.04547278583049774,
0.1591498851776123,
-0.036558687686920166,
0.0005780919454991817,
0.07084336131811142,
0.008067135699093342,
-0.1318938285112381,
0.008492019958794117,
0.035455550998449326,
-0.049251314252614975,
-0.022323518991470337,
0.2090313881635666,
0.011572275310754776,
0.029063180088996887,
0.071653813123703,
-0.0532873272895813,
0.01885838806629181,
-0.045997049659490585,
0.05635663866996765,
-0.13317835330963135,
0.0719463899731636,
-0.07264396548271179,
0.11553129553794861,
0.17779852449893951,
-0.06817402690649033,
0.03860935568809509,
-0.06314870715141296,
0.01662272773683071,
-0.031536590307950974,
0.06991857290267944,
-0.016786789521574974,
-0.10825547575950623,
0.002090692752972245,
0.03983011469244957,
0.017865734174847603,
-0.1624288111925125,
-0.07627397030591965,
0.07178458571434021,
-0.05762307345867157,
0.020935675129294395,
0.17728911340236664,
0.01263489294797182,
0.05472204089164734,
-0.036432284861803055,
-0.03830314800143242,
-0.00016165773558896035,
0.11845237016677856,
-0.1777312010526657,
-0.0806475356221199
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.