Commit
·
c4a09f6
1
Parent(s):
513dbef
Update parquet files (step 61 of 476)
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- spaces/101-5/gpt4free/g4f/.v1/testing/usesless_test.py +0 -13
- spaces/1acneusushi/gradio-2dmoleculeeditor/data/Anthony Hamilton The Point Of It All Full Album Zip.md +0 -22
- spaces/1gistliPinn/ChatGPT4/Examples/Autodesk 2016 All Products Patch Keygen -X-Force Zip _BEST_.md +0 -7
- spaces/1gistliPinn/ChatGPT4/Examples/Crack File For Sap 2000 V15 16 Fix.md +0 -7
- spaces/1gistliPinn/ChatGPT4/Examples/Download AutoCAD P ID 2010 Portable 64 Bit LINK.md +0 -10
- spaces/1line/AutoGPT/tests/test_token_counter.py +0 -63
- spaces/1pelhydcardo/ChatGPT-prompt-generator/assets/Download Agar.io for Windows 10 and Enjoy the Multiplayer Action.md +0 -134
- spaces/1pelhydcardo/ChatGPT-prompt-generator/assets/Download Clash of Lords 2 Guild Castle Mod Apk and Join the Epic Battle of Heroes and Fiends.md +0 -99
- spaces/1phancelerku/anime-remove-background/Brain Test 3 Tricky Quests Mod APK A Long and Tricky Adventure with Brain Teasing Challenges.md +0 -96
- spaces/1phancelerku/anime-remove-background/Dark Riddle 1.0 APK Solve Puzzles and Escape from a Suspicious Neighbor.md +0 -138
- spaces/1phancelerku/anime-remove-background/Download FIFA 22 Mobile and Experience the FIFA World Cup 2022 on Your Phone.md +0 -115
- spaces/1toTree/lora_test/ppdiffusers/schedulers/preconfig/preconfig_scheduling_euler_ancestral_discrete.py +0 -267
- spaces/1toTree/lora_test/ppdiffusers/training_utils.py +0 -152
- spaces/2ndelement/voicevox/ui_template/ui.html +0 -120
- spaces/801artistry/RVC801/infer/modules/train/extract/extract_f0_rmvpe_dml.py +0 -139
- spaces/AB-TW/team-ai/memories.py +0 -61
- spaces/AIZ2H/04-Gradio-SOTA-Seq2Seq-AutoQA/qasrl_model_pipeline.py +0 -183
- spaces/ATang0729/Forecast4Muses/Model/Model6/Model6_2_ProfileRecogition/mmpretrain/configs/_base_/models/__init__.py +0 -0
- spaces/AchyuthGamer/OpenGPT/g4f/Provider/Providers/deprecated/Equing.py +0 -81
- spaces/AgentVerse/agentVerse/ui/src/phaser3-rex-plugins/templates/ui/hiddenedit/Factory.d.ts +0 -6
- spaces/AgentVerse/agentVerse/ui/src/phaser3-rex-plugins/templates/ui/holygrail/methods/GetAddChildConfig.js +0 -70
- spaces/AkitoP/umamusume_bert_vits2/train_ms_acc.py +0 -623
- spaces/AlekseyKorshuk/instagram-filter-removal/modeling/build.py +0 -19
- spaces/AlexWang/lama/saicinpainting/training/data/datasets.py +0 -304
- spaces/Ameaou/academic-chatgpt3.1/crazy_functions/crazy_functions_test.py +0 -92
- spaces/Amrrs/DragGan-Inversion/PTI/models/e4e/latent_codes_pool.py +0 -55
- spaces/Amrrs/DragGan-Inversion/stylegan_human/torch_utils/ops/bias_act.h +0 -40
- spaces/Androidonnxfork/CivitAi-to-Diffusers/diffusers/src/diffusers/pipelines/deepfloyd_if/pipeline_if_superresolution.py +0 -914
- spaces/Andy1621/uniformer_image_detection/configs/sabl/README.md +0 -37
- spaces/Andy1621/uniformer_image_segmentation/configs/_base_/schedules/schedule_20k.py +0 -9
- spaces/Anonymous-sub/Rerender/ControlNet/annotator/uniformer/mmseg/utils/collect_env.py +0 -17
- spaces/Anonymous-sub/Rerender/ControlNet/ldm/modules/midas/midas/transforms.py +0 -234
- spaces/Ashrafb/codellama-34b/style.css +0 -16
- spaces/Audio-AGI/WavJourney/VoiceParser/model.py +0 -102
- spaces/Avkash/WhisperUI/whisperui.py +0 -216
- spaces/Banbri/zcvzcv/src/lib/getInitialRenderedScene.ts +0 -11
- spaces/Bart92/RVC_HF/julius/lowpass.py +0 -181
- spaces/Benson/text-generation/Examples/Crimen De Gngster Real Versin Antigua Apkpure.md +0 -139
- spaces/Benson/text-generation/Examples/Descargar Gratis Metro Surfistas Juego Para Windows 7 Softonic.md +0 -60
- spaces/BetterAPI/BetterChat/src/routes/conversation/[id]/+server.ts +0 -236
- spaces/Big-Web/MMSD/env/Lib/site-packages/pip/_vendor/chardet/langturkishmodel.py +0 -4380
- spaces/Big-Web/MMSD/env/Lib/site-packages/setuptools/_path.py +0 -29
- spaces/BraydenMoore/MARCI-NFL-Betting/Source/Test/__init__.py +0 -0
- spaces/CVPR/Dual-Key_Backdoor_Attacks/datagen/detectron2/docker/Dockerfile +0 -43
- spaces/CVPR/LIVE/thrust/thrust/detail/util/align.h +0 -59
- spaces/CVPR/LIVE/thrust/thrust/generate.h +0 -213
- spaces/CVPR/WALT/mmdet/core/bbox/match_costs/match_cost.py +0 -184
- spaces/CofAI/urlcut/index.html +0 -49
- spaces/Cyril666/ContourNet-ABI/maskrcnn_benchmark/data/samplers/iteration_based_batch_sampler.py +0 -31
- spaces/DQChoi/gpt-demo/venv/lib/python3.11/site-packages/PIL/_binary.py +0 -102
spaces/101-5/gpt4free/g4f/.v1/testing/usesless_test.py
DELETED
@@ -1,13 +0,0 @@
|
|
1 |
-
import usesless
|
2 |
-
|
3 |
-
question1 = "Who won the world series in 2020?"
|
4 |
-
req = usesless.Completion.create(prompt=question1)
|
5 |
-
answer = req["text"]
|
6 |
-
message_id = req["parentMessageId"]
|
7 |
-
|
8 |
-
question2 = "Where was it played?"
|
9 |
-
req2 = usesless.Completion.create(prompt=question2, parentMessageId=message_id)
|
10 |
-
answer2 = req2["text"]
|
11 |
-
|
12 |
-
print(answer)
|
13 |
-
print(answer2)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/1acneusushi/gradio-2dmoleculeeditor/data/Anthony Hamilton The Point Of It All Full Album Zip.md
DELETED
@@ -1,22 +0,0 @@
|
|
1 |
-
<br />
|
2 |
-
<h1>Anthony Hamilton: The Point Of It All - A Soulful Masterpiece</h1>
|
3 |
-
<p>If you are looking for a soulful and heartfelt R&B album, you should definitely check out <strong>Anthony Hamilton's The Point Of It All</strong>. This album, released in 2008, showcases Hamilton's smooth vocals, honest lyrics, and musical versatility. Whether you want to groove to some upbeat tracks, relax to some mellow tunes, or get inspired by some uplifting messages, this album has it all.</p>
|
4 |
-
<p>The Point Of It All is Hamilton's fourth studio album and his most successful one to date. It debuted at number 12 on the Billboard 200 chart and number one on the Top R&B/Hip-Hop Albums chart. It also received a Grammy nomination for Best R&B Album in 2010. The album features 16 tracks, including the singles "Cool", "The Point Of It All", and "Pray For Me".</p>
|
5 |
-
<h2>Anthony Hamilton, The Point Of It All full album zip</h2><br /><p><b><b>Download</b> ✒ ✒ ✒ <a href="https://byltly.com/2uKzlT">https://byltly.com/2uKzlT</a></b></p><br /><br />
|
6 |
-
<p>Some of the highlights of the album are:</p>
|
7 |
-
<ul>
|
8 |
-
<li>"Cool" - A funky and catchy song featuring rapper David Banner. This song is about staying cool and calm in a relationship, even when things get heated.</li>
|
9 |
-
<li>"The Point Of It All" - A beautiful and romantic ballad that expresses Hamilton's love and devotion to his partner. This song is the title track and the most popular song from the album.</li>
|
10 |
-
<li>"Pray For Me" - A powerful and emotional song that deals with Hamilton's struggles and faith. This song is a personal testimony of Hamilton's journey and his gratitude for God's grace.</li>
|
11 |
-
<li>"Fallin' In Love" - A sweet and soulful song that celebrates the joy of finding true love. This song features background vocals from Hamilton's wife, Tarsha McMillian.</li>
|
12 |
-
<li>"Soul's On Fire" - A passionate and energetic song that showcases Hamilton's vocal range and intensity. This song is about igniting the fire in your soul and living your dreams.</li>
|
13 |
-
</ul>
|
14 |
-
<p>If you want to listen to Anthony Hamilton's The Point Of It All full album zip, you can download it from various online platforms or stream it on YouTube[^3^]. You won't regret it!</p>
|
15 |
-
|
16 |
-
<p>Anthony Hamilton is not only a talented singer, but also a prolific songwriter and record producer. He has co-written and produced songs for many artists, such as Donell Jones, Sunshine Anderson, Jill Scott, Angie Stone, John Legend, and Al Green. He has also collaborated with rappers like Jadakiss, 2Pac, Nappy Roots, and David Banner. Hamilton's music has been featured in several movies and TV shows, such as Django Unchained, American Gangster, The Best Man Holiday, and Empire.</p>
|
17 |
-
<p>Hamilton was born on January 28, 1971, in Charlotte, North Carolina[^1^]. He started singing in his church choir when he was 17 years old. He attended South Mecklenburg High School, where he sang for his school's award-winning choir[^1^]. In 1993, he moved to New York City and signed with Uptown Records, but his debut album was shelved when the label went out of business. He then moved to MCA Records and released his first album XTC in 1996, but it failed to make an impact. He later joined Soulife Records and recorded another album that was also unreleased. He then became a backup singer for D'Angelo and wrote songs for other artists. His breakthrough came in 2002 when he sang on Nappy Roots' hit single "Po' Folks", which earned him a Grammy nomination[^2^]. He then signed with Jermaine Dupri's So So Def Records and released his second album Comin' from Where I'm From in 2003, which went platinum and spawned the hits "Comin' from Where I'm From" and "Charlene".</p>
|
18 |
-
<p>Since then, Hamilton has released several more albums that have received critical acclaim and commercial success. His third album Ain't Nobody Worryin' (2005) featured the singles "Can't Let Go" and "Sista Big Bones". His fourth album The Point Of It All (2008) included the songs "Cool", "The Point Of It All", and "Pray For Me". His fifth album Back to Love (2011) had the tracks "Woo", "Best of Me", and "Pray for Me". His sixth album What I'm Feelin' (2016) showcased the songs "Amen", "Save Me", and "Ever Seen Heaven". His seventh album Love Is the New Black (2021) contained the songs "You Made a Fool of Me", "Love Is the New Black", and "Mercy" [^2^]. Hamilton has won one Grammy Award for his duet with Al Green on "You've Got the Love I Need" in 2009 and has been nominated for 16 more [^2^].</p>
|
19 |
-
<p></p>
|
20 |
-
<p>Anthony Hamilton is one of the most respected and influential R&B artists of his generation. He has a distinctive voice that blends soul, gospel, blues, and hip-hop. He has a loyal fan base that appreciates his authentic and relatable music. He has a remarkable career that spans over two decades and shows no signs of slowing down. He is truly a soulful masterpiece.</p> 7b8c122e87<br />
|
21 |
-
<br />
|
22 |
-
<br />
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/1gistliPinn/ChatGPT4/Examples/Autodesk 2016 All Products Patch Keygen -X-Force Zip _BEST_.md
DELETED
@@ -1,7 +0,0 @@
|
|
1 |
-
|
2 |
-
<p>If you still havent found what you are looking for, try our other categories, location, keywords, Autodesk, licensing, files, disk images, emulators, drivers, Autodesk, software, or more! This Unlocked file is only for use with the. Autodesk based products, such as AutoCAD, Revit, 3ds Max, Maya, FreeCAD, and Inventor. </p>
|
3 |
-
<p>Because the company releases the AutoCAD 2016 and student and professional editions of other products such as AutoCAD, 3ds Max, Maya, Inventor at regular intervals, we can use these to create unlimited licensed keys for your own free use. This keygen is not for sale, and is only used for testing. This is the latest license key patch, so you may use this. </p>
|
4 |
-
<h2>Autodesk 2016 All Products Patch Keygen -X-Force Zip</h2><br /><p><b><b>DOWNLOAD</b> ☆☆☆ <a href="https://imgfil.com/2uy0YH">https://imgfil.com/2uy0YH</a></b></p><br /><br />
|
5 |
-
<p>Summary of features in Autodesk 2016 All Products Patch. Comprehensive, free solution to. 'Revert' the previous patch. Autodesk License Patcher allows you to unlock, activate, and uninstall the serial. Autodesk Design Review. Website, www.autodesk.com/products/3ds-max/overview. Autodesk 2014 All Products Universal Keygen For Windows & Mac... Autodesk. Generate, Update, Protect and Validate License Keys for Software. Updated 9/25/2015: ARCHIVE FOR DOWNLOAD DATE 9/25/2015. Update All Products. Autodesk 2016 All Products Patch Keygen -X-Force Zip . Product - 2016. Product - 2013. Official Autodesk Products Site - Home - Unofficial Autodesk 2016 All Products Patch Keygen -X-Force Zip . Autodesk Design Review. Autodesk 2014 All Products. Generation, Update and. Keygens. Autodesk 2007 All Products Unofficial Full Patch. Autodesk. Server Connection. Key Generation. Backup|Generate, Update and. . *Click on Mem Patch (you should see successfully patched) Autodesk 2016 All Products Patch Keygen -X-Force Zip . Autodesk Software at Autodesk. Autodesk. </p> 899543212b<br />
|
6 |
-
<br />
|
7 |
-
<br />
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/1gistliPinn/ChatGPT4/Examples/Crack File For Sap 2000 V15 16 Fix.md
DELETED
@@ -1,7 +0,0 @@
|
|
1 |
-
<br />
|
2 |
-
<p>a new study has found that the leading cause of a fire in a health-care facility is "neglect" (22 percent of the time) and that the leading cause of a fire in a home is "accidental combustion" (50 percent of the time). this became evident when, as part of a health-care fire drill, the owners of a hospital found a fire in a hallway and called 911. firefighters promptly arrived on scene, but in their rush, the first thing they did was to set up a four-sided "portable corridor" that doubled as a human flammability barrier. that, in turn, displaced the ambulances with the patients. "there were 700 patients housed in the building at the time of the fire.</p>
|
3 |
-
<p>there is a clear documentation of when to use these options to make system changes and when to use them to simply create and operate a new system. if you're not following the documentation properly, you might lock yourself out of the system. in the example above, if you were to use the reset command incorrectly, you would find you could not log on to the new system. </p>
|
4 |
-
<h2>Crack File For Sap 2000 V15 16</h2><br /><p><b><b>Download Zip</b> ⏩ <a href="https://imgfil.com/2uxZeF">https://imgfil.com/2uxZeF</a></b></p><br /><br />
|
5 |
-
<p> the <code>restorex</code> command is used to protect against data corruption and accidental formatting. the <code>restorex</code> program is not a backup program; it is part of the unix core services that will attempt to fix errors. the <code>restorex</code> can be used to recover from suse linux enterprise software or any operating system, regardless of the backup method. <code>restorex</code> will only recover files that have been created or altered since the last time the file system was consistency checked. you may use this option to recover corrupt files. it's not a full backup recovery, but it's a great tool to have in your backup toolbelt. </p> 899543212b<br />
|
6 |
-
<br />
|
7 |
-
<br />
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/1gistliPinn/ChatGPT4/Examples/Download AutoCAD P ID 2010 Portable 64 Bit LINK.md
DELETED
@@ -1,10 +0,0 @@
|
|
1 |
-
<br />
|
2 |
-
<p>in addition to this, there are many more features that make autocad much more user friendly and also easier to use. among those include the use of graphics and icons to make the interface much more user friendly and the use of a mouse instead of a keyboard to make the interface much more intuitive.</p>
|
3 |
-
<h2>Download AutoCAD P ID 2010 Portable 64 Bit</h2><br /><p><b><b>Download Zip</b> 🗸 <a href="https://imgfil.com/2uy17g">https://imgfil.com/2uy17g</a></b></p><br /><br />
|
4 |
-
<p>the interface is also more compact, and you can toggle through different functions on your screen by hitting an icon at the top for the specific function you want to use. autocad p-tech also has a more robust architecture and the features that make the traditional autocad user interface much more intuitive. you can learn more about p-tech at autodesk's autocad p-tech page.</p>
|
5 |
-
<p>hi, i have downloaded the autocad p 2020 portable version from but i don't know how to install it on my computer. i have windows 10 and i'm using windows 7 home edition. i have no idea how to install it. can someone please help me? thanks so much in advance!</p>
|
6 |
-
<p>i am having problems with installing autocad portable 2020 on my 64-bit computer. when i start the installation, it first shows a screen of two different versions: one called "home" and the other called "portable". i can't understand why this happens and what the difference between the two versions of the program is. could someone help me with this?</p>
|
7 |
-
<p></p>
|
8 |
-
<p>thank you so much. i've downloaded the autocad portable 2020 from the website and i've installed it. the problem is that every time i try to open it, it doesn't open. i can't understand why. i can't even find the autocad or autocad.exe file that should be in the "program files" folder. how do i fix this?</p> 899543212b<br />
|
9 |
-
<br />
|
10 |
-
<br />
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/1line/AutoGPT/tests/test_token_counter.py
DELETED
@@ -1,63 +0,0 @@
|
|
1 |
-
import unittest
|
2 |
-
|
3 |
-
import tests.context
|
4 |
-
from autogpt.token_counter import count_message_tokens, count_string_tokens
|
5 |
-
|
6 |
-
|
7 |
-
class TestTokenCounter(unittest.TestCase):
|
8 |
-
def test_count_message_tokens(self):
|
9 |
-
messages = [
|
10 |
-
{"role": "user", "content": "Hello"},
|
11 |
-
{"role": "assistant", "content": "Hi there!"},
|
12 |
-
]
|
13 |
-
self.assertEqual(count_message_tokens(messages), 17)
|
14 |
-
|
15 |
-
def test_count_message_tokens_with_name(self):
|
16 |
-
messages = [
|
17 |
-
{"role": "user", "content": "Hello", "name": "John"},
|
18 |
-
{"role": "assistant", "content": "Hi there!"},
|
19 |
-
]
|
20 |
-
self.assertEqual(count_message_tokens(messages), 17)
|
21 |
-
|
22 |
-
def test_count_message_tokens_empty_input(self):
|
23 |
-
self.assertEqual(count_message_tokens([]), 3)
|
24 |
-
|
25 |
-
def test_count_message_tokens_invalid_model(self):
|
26 |
-
messages = [
|
27 |
-
{"role": "user", "content": "Hello"},
|
28 |
-
{"role": "assistant", "content": "Hi there!"},
|
29 |
-
]
|
30 |
-
with self.assertRaises(KeyError):
|
31 |
-
count_message_tokens(messages, model="invalid_model")
|
32 |
-
|
33 |
-
def test_count_message_tokens_gpt_4(self):
|
34 |
-
messages = [
|
35 |
-
{"role": "user", "content": "Hello"},
|
36 |
-
{"role": "assistant", "content": "Hi there!"},
|
37 |
-
]
|
38 |
-
self.assertEqual(count_message_tokens(messages, model="gpt-4-0314"), 15)
|
39 |
-
|
40 |
-
def test_count_string_tokens(self):
|
41 |
-
string = "Hello, world!"
|
42 |
-
self.assertEqual(
|
43 |
-
count_string_tokens(string, model_name="gpt-3.5-turbo-0301"), 4
|
44 |
-
)
|
45 |
-
|
46 |
-
def test_count_string_tokens_empty_input(self):
|
47 |
-
self.assertEqual(count_string_tokens("", model_name="gpt-3.5-turbo-0301"), 0)
|
48 |
-
|
49 |
-
def test_count_message_tokens_invalid_model(self):
|
50 |
-
messages = [
|
51 |
-
{"role": "user", "content": "Hello"},
|
52 |
-
{"role": "assistant", "content": "Hi there!"},
|
53 |
-
]
|
54 |
-
with self.assertRaises(NotImplementedError):
|
55 |
-
count_message_tokens(messages, model="invalid_model")
|
56 |
-
|
57 |
-
def test_count_string_tokens_gpt_4(self):
|
58 |
-
string = "Hello, world!"
|
59 |
-
self.assertEqual(count_string_tokens(string, model_name="gpt-4-0314"), 4)
|
60 |
-
|
61 |
-
|
62 |
-
if __name__ == "__main__":
|
63 |
-
unittest.main()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/1pelhydcardo/ChatGPT-prompt-generator/assets/Download Agar.io for Windows 10 and Enjoy the Multiplayer Action.md
DELETED
@@ -1,134 +0,0 @@
|
|
1 |
-
|
2 |
-
<h1>Agar.io Download Windows 10: How to Play the Popular Online Game on Your PC</h1>
|
3 |
-
<p>If you are looking for a simple but addictive online game that you can play with millions of other players around the world, you might want to try Agar.io. This game has been around since 2015, but it is still one of the most popular games on the internet. In this article, we will show you what Agar.io is, why it is so fun, and how you can download and install it on your Windows 10 PC.</p>
|
4 |
-
<h2>agar.io download windows 10</h2><br /><p><b><b>DOWNLOAD</b> ❤❤❤ <a href="https://urlin.us/2uT1tk">https://urlin.us/2uT1tk</a></b></p><br /><br />
|
5 |
-
<h2>What is Agar.io and Why is it So Fun?</h2>
|
6 |
-
<p>Agar.io is a multiplayer online game that starts you out as one tiny cell in a huge map. Your goal is to grow bigger by eating smaller cells, while avoiding being eaten by bigger cells. You can also split your cell into smaller pieces to move faster or to escape from predators. The game is simple but challenging, as you have to balance between growing and surviving.</p>
|
7 |
-
<p>One of the reasons why Agar.io is so fun is that you can play with other players from different countries and regions. You can join different servers based on your location, or create your own private room to play with your friends. You can also chat with other players using emojis or custom messages. The game is constantly updated with new features and content, such as new game modes, skins, events, and leaderboards.</p>
|
8 |
-
<h2>How to Download and Install Agar.io on Windows 10</h2>
|
9 |
-
<p>There are two main ways to download and install Agar.io on your Windows 10 PC. One is using an emulator, which allows you to run Android apps on your computer. The other is using a website that offers free downloads of PC games. We will explain both options below.</p>
|
10 |
-
<h3>Option 1: Using BlueStacks Emulator</h3>
|
11 |
-
<p>BlueStacks is one of the most popular and trusted emulators that you can use to play Android games on your PC. It has many features and advantages that make it a great choice for playing Agar.io on your computer. Here are the steps to follow:</p>
|
12 |
-
<h4>Step 1: Download and Install BlueStacks</h4>
|
13 |
-
<p>To download BlueStacks, you can visit their official website <a href="(^1^)">here</a>. You will see a button that says "Download Agar.io on PC". Click on it and the download will start automatically. Once the download is complete, run the installer and follow the instructions to install BlueStacks on your PC.</p>
|
14 |
-
<p>agar.io pc download windows 10<br />
|
15 |
-
agar.io game download for windows 10<br />
|
16 |
-
agar.io free download windows 10<br />
|
17 |
-
agar.io download for pc windows 10<br />
|
18 |
-
agar.io download windows 10 64 bit<br />
|
19 |
-
agar.io offline download windows 10<br />
|
20 |
-
agar.io online download windows 10<br />
|
21 |
-
agar.io app download windows 10<br />
|
22 |
-
agar.io download for laptop windows 10<br />
|
23 |
-
agar.io download for desktop windows 10<br />
|
24 |
-
agar.io mod download windows 10<br />
|
25 |
-
agar.io hack download windows 10<br />
|
26 |
-
agar.io skins download windows 10<br />
|
27 |
-
agar.io bots download windows 10<br />
|
28 |
-
agar.io cheats download windows 10<br />
|
29 |
-
agar.io emulator download windows 10<br />
|
30 |
-
agar.io apk download windows 10<br />
|
31 |
-
agar.io exe download windows 10<br />
|
32 |
-
agar.io installer download windows 10<br />
|
33 |
-
agar.io setup download windows 10<br />
|
34 |
-
agar.io full version download windows 10<br />
|
35 |
-
agar.io latest version download windows 10<br />
|
36 |
-
agar.io update download windows 10<br />
|
37 |
-
agar.io patch download windows 10<br />
|
38 |
-
agar.io crack download windows 10<br />
|
39 |
-
agar.io play on pc windows 10<br />
|
40 |
-
agar.io play online on windows 10<br />
|
41 |
-
agar.io play offline on windows 10<br />
|
42 |
-
agar.io play with friends on windows 10<br />
|
43 |
-
agar.io play with bots on windows 10<br />
|
44 |
-
agar.io play with skins on windows 10<br />
|
45 |
-
agar.io play with mods on windows 10<br />
|
46 |
-
agar.io play with hacks on windows 10<br />
|
47 |
-
agar.io play with cheats on windows 10<br />
|
48 |
-
agar.io play on laptop windows 10<br />
|
49 |
-
agar.io play on desktop windows 10<br />
|
50 |
-
agar.io play on emulator windows 10<br />
|
51 |
-
agar.io play on apk windows 10<br />
|
52 |
-
agar.io play on exe windows 10<br />
|
53 |
-
agar.io play on installer windows 10<br />
|
54 |
-
how to download and install agar.io on windows 10 <br />
|
55 |
-
how to download and play agar.io on windows 10 <br />
|
56 |
-
how to download and update agar.io on windows 10 <br />
|
57 |
-
how to download and patch agar.io on windows 10 <br />
|
58 |
-
how to download and crack agar.io on windows 10 <br />
|
59 |
-
how to run and play agar.io on windows 10 <br />
|
60 |
-
how to run and update agar.io on windows 10 <br />
|
61 |
-
how to run and patch agar.io on windows 10 <br />
|
62 |
-
how to run and crack agar.io on windows 10</p>
|
63 |
-
<h4>Step 2: Launch BlueStacks and Log in to Google Play Store</h4>
|
64 |
-
<p>After installing BlueStacks, launch it from your desktop or start menu. You will see a welcome screen that asks you to log in to your Google account. This is necessary to access the Google Play Store, where you can find and install Android apps. If you don't have a Google account, you can create one for free.</p>
|
65 |
-
<h4>Step 3: Search for Agar.io and Install it</h4>
|
66 |
-
<p>Once you are logged <p>Once you are logged in to the Google Play Store, you can search for Agar.io in the search bar. You will see the Agar.io app icon with a green background and a white dot. Click on it and then click on the "Install" button. The app will be downloaded and installed on your PC.</p>
|
67 |
-
<h4>Step 4: Enjoy Playing Agar.io on PC</h4>
|
68 |
-
<p>After installing Agar.io, you can launch it from the BlueStacks home screen or the app drawer. You will see the Agar.io logo and then the game will start. You can use your mouse to move your cell around the map, and use the space bar to split your cell or the W key to eject some mass. You can also customize your settings, such as your nickname, skin, game mode, and chat options.</p>
|
69 |
-
<h3>Option 2: Using GameTop Website</h3>
|
70 |
-
<p>GameTop is a website that offers free downloads of PC games, including Agar.io. It is a safe and reliable source that does not contain any viruses or malware. Here are the steps to follow:</p>
|
71 |
-
<h4>Step 1: Visit GameTop Website and Click on Agar.io</h4>
|
72 |
-
<p>To visit GameTop website, you can click <a href="">here</a>. You will see a list of categories of games, such as action, arcade, puzzle, racing, and more. Scroll down until you find the category "Online Games". Under this category, you will see Agar.io with a blue background and a white dot. Click on it and you will be redirected to the Agar.io game page.</p>
|
73 |
-
<h4>Step 2: Download and Install Agar.io for PC</h4>
|
74 |
-
<p>On the Agar.io game page, you will see a button that says "Download Free Full Version". Click on it and the download will start automatically. Once the download is complete, run the installer and follow the instructions to install Agar.io on your PC.</p>
|
75 |
-
<h4>Step 3: Launch Agar.io and Start Playing</h4>
|
76 |
-
<p>After installing Agar.io, you can launch it from your desktop or start menu. You will see the Agar.io logo and then the game will start. You can use your mouse to move your cell around the map, and use the space bar to split your cell or the W key to eject some mass. You can also customize your settings, such as your nickname, skin, game mode, and chat options.</p>
|
77 |
-
<h2>Tips and Tricks for Playing Agar.io on PC</h2>
|
78 |
-
<p>Now that you know how to download and install Agar.io on your Windows 10 PC, you might want to learn some tips and tricks to improve your gameplay and have more fun. Here are some of them:</p>
|
79 |
-
<h3>Use Keyboard Shortcuts for Better Control</h3>
|
80 |
-
<p>One of the advantages of playing Agar.io on PC is that you can use keyboard shortcuts to perform certain actions faster and easier. Here are some of the keyboard shortcuts that you can use:</p>
|
81 |
-
<table>
|
82 |
-
<tr><th>Key</th><th>Action</th></tr>
|
83 |
-
<tr><td>Space</td><td>Split your cell into two pieces</td></tr>
|
84 |
-
<tr><td>W</td><td>Eject some mass from your cell</td></tr>
|
85 |
-
<tr><td>E</td><td>Eject mass continuously (hold down)</td></tr>
|
86 |
-
<tr><td>R</td><td>Split multiple times (hold down)</td></tr>
|
87 |
-
<tr><td>T</td><td>Lock or unlock your mouse cursor</td></tr>
|
88 |
-
<tr><td>P</td><td>Pause or resume the game</td></tr>
|
89 |
-
<tr><td>M</td><td>Mute or unmute the sound effects</td></tr>
|
90 |
-
<tr><td>S</td><td>Show or hide skins</td></tr>
|
91 |
-
<tr><td>N</td><td>Show or hide names</td></tr>
|
92 |
-
<tr><td>H</td><td>Show or hide chat messages</td></tr>
|
93 |
-
<tr><td>C</td><td>Show or hide colors</td></tr>
|
94 |
-
<tr><td>G</td><td>Show or hide grid lines</td></tr>
|
95 |
-
<tr><td>A</td><td>Show or hide mass numbers</td></tr>
|
96 |
-
<tr><td>F11</td><td>Enter or exit full screen mode</td></tr>
|
97 |
-
<tr><td>F12</td><td>Take a screenshot of the game window</td></tr>
|
98 |
-
<tr><td>F5</td><td>Refresh or restart the game page</td></tr>
|
99 |
-
<tr>< <tr><td>ESC</td><td>Exit the game and return to the main menu</td></tr>
|
100 |
-
</table>
|
101 |
-
<p>You can also change the keyboard shortcuts in the settings menu if you prefer different keys.</p>
|
102 |
-
<h3>Adjust the Graphics Settings for Optimal Performance</h3>
|
103 |
-
<p>Another advantage of playing Agar.io on PC is that you can adjust the graphics settings to suit your preferences and your computer's capabilities. You can find the graphics settings in the settings menu, under the "Graphics" tab. Here are some of the options that you can tweak:</p>
|
104 |
-
<ul>
|
105 |
-
<li>Resolution: You can choose the resolution of the game window, from low to high. Higher resolutions will make the game look sharper and clearer, but they will also consume more resources and may cause lag or slowdowns.</li>
|
106 |
-
<li>Quality: You can choose the quality of the graphics, from low to high. Higher quality will make the game look more realistic and detailed, but they will also consume more resources and may cause lag or slowdowns.</li>
|
107 |
-
<li>Anti-aliasing: You can choose whether to enable or disable anti-aliasing, which is a technique that smooths out the edges of the graphics and reduces jaggedness. Enabling anti-aliasing will make the game look smoother and more pleasant, but it will also consume more resources and may cause lag or slowdowns.</li>
|
108 |
-
<li>FPS: You can choose the frame rate of the game, from 30 to 60. Higher frame rates will make the game run smoother and more responsive, but they will also consume more resources and may cause lag or slowdowns.</li>
|
109 |
-
<li>Show FPS: You can choose whether to show or hide the FPS counter, which is a number that indicates how many frames per second the game is running at. Showing the FPS counter will help you monitor the performance of the game and adjust the settings accordingly, but it will also take up some space on the screen.</li>
|
110 |
-
</ul>
|
111 |
-
<p>You can experiment with different combinations of graphics settings until you find the optimal balance between quality and performance for your PC.</p>
|
112 |
-
<h3>Try Different Game Modes and Skins for More Variety</h3>
|
113 |
-
<p>One of the reasons why Agar.io is so popular is that it offers a lot of variety and customization options for players. You can try different game modes and skins to spice up your gameplay and express your personality. Here are some of the game modes and skins that you can choose from:</p>
|
114 |
-
<ul>
|
115 |
-
<li>Game Modes: You can choose from different game modes, such as FFA (free for all), Teams, Experimental, Party, Battle Royale, Zombie, and Hunger Games. Each game mode has its own rules and objectives, such as working with your teammates, surviving until the end, or eating viruses.</li>
|
116 |
-
<li>Skins: You can choose from hundreds of skins, such as flags, animals, memes, celebrities, logos, and more. Some skins are free, while others require coins or premium membership to unlock. You can also create your own custom skins using images or text.</li>
|
117 |
-
</ul>
|
118 |
-
<p>You can change your game mode and skin in the main menu before starting a game. You can also switch between different servers and regions to play with different players.</p>
|
119 |
-
<h2>Conclusion</h2>
|
120 |
-
<p>Agar.io is a fun and addictive online game that you can play with millions of other players around the world. It is easy to download and install on your Windows 10 PC using either an emulator or a website. You can also improve your gameplay and have more fun by using keyboard shortcuts, adjusting graphics settings, and trying different game modes and skins. If you are looking for a simple but challenging game that will keep you entertained for hours, you should give Agar.io a try.</p>
|
121 |
-
<h2>FAQs</h2>
|
122 |
-
<ul>
|
123 |
-
<li>Q: Is Agar.io free to play?</li>
|
124 |
-
<li>A: Yes, Agar.io is free to play on both PC and mobile devices. However, there are some optional in-app purchases that you can make to get coins, premium membership, or special offers.</li>
|
125 |
-
<li>Q: Is Agar.io safe to play?</li>
|
126 |
-
<li>A: Yes, Agar.io is safe to play as long as you download it from a trusted source, such as BlueStacks emulator or GameTop website. You should also avoid clicking on any suspicious links or ads that may appear in the game or on the website.</li>
|
127 |
-
<li>Q: How do I play Agar.io with my friends?</li>
|
128 |
-
<li>A: You can play Agar.io with your friends by creating a private room using the Party mode. To do this, you need to click on the "Create" button in the main menu and then copy and share the link with your friends. Your friends need to click on the link and join your room. You can also chat with your friends using emojis or custom messages.</li <li>Q: How do I get coins and premium membership in Agar.io?</li>
|
129 |
-
<li>A: You can get coins and premium membership in Agar.io by making in-app purchases using real money. Coins can be used to buy skins, boosts, or potions. Premium membership can give you access to exclusive skins, double coins, no ads, and other benefits. You can also earn coins by completing daily quests, watching videos, or inviting friends.</li>
|
130 |
-
<li>Q: How do I change my nickname and skin in Agar.io?</li>
|
131 |
-
<li>A: You can change your nickname and skin in Agar.io by clicking on the "Settings" button in the main menu and then choosing the "Profile" tab. You can type your desired nickname in the text box and select your preferred skin from the list. You can also create your own custom skin by clicking on the "Create" button and uploading an image or typing some text.</li>
|
132 |
-
</ul></p> 197e85843d<br />
|
133 |
-
<br />
|
134 |
-
<br />
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/1pelhydcardo/ChatGPT-prompt-generator/assets/Download Clash of Lords 2 Guild Castle Mod Apk and Join the Epic Battle of Heroes and Fiends.md
DELETED
@@ -1,99 +0,0 @@
|
|
1 |
-
|
2 |
-
<h1>Download Clash of Lords 2 Guild Castle Mod Apk: A Strategy Game with Fun and Innovative Twists</h1>
|
3 |
-
<p>If you are looking for a strategy game that is different from the usual ones, you might want to try <strong>Clash of Lords 2 Guild Castle</strong>. This game is a sequel to the original Clash of Lords, and it has a lot of new features and improvements that make it more fun and exciting. In this game, you can recruit over 50 heroes and their mercenaries, build and defend a base, and fight alongside your friends in over 10 PvE and PvP modes. You can also control the action and activate heroes' skills in real time, pair heroes and troops with a unique mercenary system, and play it your way with various options and challenges.</p>
|
4 |
-
<p>But what if you want to enjoy the game without any limitations or restrictions? What if you want to have unlimited money and gems, free hero hires and arena challenges, no ads and root required, and more? Well, you can do that by downloading <strong>Clash of Lords 2 Guild Castle mod apk</strong>. This is a modified version of the game that gives you access to all the premium features and benefits that you would normally have to pay for or work hard for. In this article, we will tell you what is Clash of Lords 2 Guild Castle mod apk, why you should download it, how to download and install it, and some tips and tricks for playing the game.</p>
|
5 |
-
<h2>download clash of lords 2 guild castle mod apk</h2><br /><p><b><b>DOWNLOAD</b> 🔗 <a href="https://urlin.us/2uSVPx">https://urlin.us/2uSVPx</a></b></p><br /><br />
|
6 |
-
<h2>What is Clash of Lords 2 Guild Castle?</h2>
|
7 |
-
<p>Clash of Lords 2 Guild Castle is a strategy game developed by IGG.COM, a company that is known for creating popular games such as Lords Mobile, Castle Clash, Mobile Royale, etc. The game was released in May 2014 for Android devices, and in June 2014 for iOS devices. Since then, it has been downloaded over 10 million times on Google Play Store alone, and has received positive reviews from players and critics alike. The game has also been updated regularly with new content, features, events, heroes, etc.</p>
|
8 |
-
<h3>Game features</h3>
|
9 |
-
<p>Some of the main features of Clash of Lords 2 Guild Castle are:</p>
|
10 |
-
<p>How to install clash of lords 2 guild castle mod apk on android<br />
|
11 |
-
Clash of lords 2 guild castle mod apk unlimited money and gems<br />
|
12 |
-
Clash of lords 2 guild castle mod apk latest version download<br />
|
13 |
-
Clash of lords 2 guild castle mod apk offline mode<br />
|
14 |
-
Clash of lords 2 guild castle mod apk free shopping<br />
|
15 |
-
Clash of lords 2 guild castle mod apk hack cheats<br />
|
16 |
-
Clash of lords 2 guild castle mod apk no root required<br />
|
17 |
-
Clash of lords 2 guild castle mod apk gameplay and review<br />
|
18 |
-
Clash of lords 2 guild castle mod apk features and benefits<br />
|
19 |
-
Clash of lords 2 guild castle mod apk download link and instructions<br />
|
20 |
-
Clash of lords 2 guild castle mod apk best heroes and strategies<br />
|
21 |
-
Clash of lords 2 guild castle mod apk tips and tricks<br />
|
22 |
-
Clash of lords 2 guild castle mod apk update and news<br />
|
23 |
-
Clash of lords 2 guild castle mod apk comparison with original game<br />
|
24 |
-
Clash of lords 2 guild castle mod apk pros and cons<br />
|
25 |
-
Clash of lords 2 guild castle mod apk for PC and laptop<br />
|
26 |
-
Clash of lords 2 guild castle mod apk online multiplayer mode<br />
|
27 |
-
Clash of lords 2 guild castle mod apk support and feedback<br />
|
28 |
-
Clash of lords 2 guild castle mod apk ratings and reviews<br />
|
29 |
-
Clash of lords 2 guild castle mod apk screenshots and videos<br />
|
30 |
-
Clash of lords 2 guild castle mod apk system requirements and compatibility<br />
|
31 |
-
Clash of lords 2 guild castle mod apk download size and speed<br />
|
32 |
-
Clash of lords 2 guild castle mod apk bugs and issues<br />
|
33 |
-
Clash of lords 2 guild castle mod apk alternatives and similar games<br />
|
34 |
-
Clash of lords 2 guild castle mod apk developer and publisher information</p>
|
35 |
-
<ul>
|
36 |
-
<li>You control the action! Activate Heroes’ skills in real time!</li>
|
37 |
-
<li>Pair Heroes and Troops with our unique Mercenary system!</li>
|
38 |
-
<li>Play it Your Way! With over 10 PvE and PvP modes, there's always something fun and different to do!</li>
|
39 |
-
<li>Fight alongside your friends! Join a Guild, and clash with players from all over the world! You can even battle against players from different countries!</li>
|
40 |
-
<li>Free to Play! Log in every day to win free Heroes and Jewels!</li>
|
41 |
-
</ul>
|
42 |
-
<h3>Game modes</h3>
|
43 |
-
<p>Some of the game modes that you can play in Clash of Lords 2 Guild Castle are:</p>
|
44 |
-
<ul>
|
45 |
-
<li>Battle Royale: A free-for-all frenzy where you have to survive against other players.</li>
|
46 |
-
<li>Lords League: A competitive mode where you have to climb the ranks by defeating other players.</li>
|
47 |
-
<li>Guild Wars: A team-based mode where you have to cooperate with your guild members to conquer other guilds.</li>
|
48 |
-
<li>Guild Bash: A fun mode where you have to smash as many eggs as possible to earn rewards.</li>
|
49 |
-
<li>Hero Trials: A challenging mode where you have to face different scenarios and enemies with your heroes.</li>
|
50 |
-
<li>Resource Raids: A mode where you have to loot resources from other players' bases.</li>
|
51 |
-
<li>Coliseum: A mode where you have to fight against other players' heroes in a turn-based battle.</li>
|
52 |
-
<li>Expedition: A mode where you have to explore different maps and complete quests with your heroes.</li>
|
53 |
-
<li>Hero Arena: A mode where you have to compete with other players in a hero-only battle.</li>
|
54 |
-
<li>Guild Boss: A mode where you have to team up with your guild members to defeat powerful bosses.</li>
|
55 |
-
</ul>
|
56 |
-
<h2>Why download Clash of Lords 2 Guild Castle mod apk?</h2>
|
57 |
-
<p>Clash of Lords 2 Guild Castle is a fun and addictive game, but it can also be frustrating and time-consuming if you don't have enough resources, heroes, or options. That's why you might want to download <strong>Clash of Lords 2 Guild Castle mod apk</strong>, which is a modified version of the game that gives you a lot of advantages and benefits that you won't get from the original version. Here are some of the reasons why you should download Clash of Lords 2 Guild Castle mod apk:</p>
|
58 |
-
<h3>Unlimited money and gems</h3>
|
59 |
-
<p>Money and gems are the main currencies in the game, and you need them to buy items, upgrade buildings, hire heroes, etc. However, they are not easy to come by, and you might have to spend real money or watch ads to get them. But with Clash of Lords 2 Guild Castle mod apk, you don't have to worry about that, because you will have unlimited money and gems at your disposal. You can use them to buy anything you want, upgrade anything you need, and hire any hero you like. You can also use them to speed up the game progress and skip the waiting time.</p>
|
60 |
-
<h3>Free hero hires and arena challenges</h3>
|
61 |
-
<p>Heroes are the most important part of the game, as they are the ones who lead your troops into battle and use their skills to turn the tide of the war. However, hiring heroes is not cheap, and you might have to spend a lot of money and gems to get them. Moreover, some heroes are only available through arena challenges, which are limited and require tickets to enter. But with Clash of Lords 2 Guild Castle mod apk, you don't have to worry about that, because you will be able to hire any hero for free, and enter any arena challenge without tickets. You can also switch between heroes anytime you want, and experiment with different combinations and strategies.</p>
|
62 |
-
<h3>No ads and root required</h3>
|
63 |
-
<p>Ads are annoying and distracting, especially when they pop up in the middle of the game or when you are trying to do something important. They can also slow down your device and consume your data. But with Clash of Lords 2 Guild Castle mod apk, you don't have to worry about that, because there will be no ads in the game at all. You can enjoy the game without any interruptions or disturbances. Moreover, some mod apk files require root access to work properly, which can be risky and complicated for your device. But with Clash of Lords 2 Guild Castle mod apk, you don't have to worry about that, because it does not require root access at all. You can install it easily and safely on any Android device.</p>
|
64 |
-
<h2>How to download and install Clash of Lords 2 Guild Castle mod apk?</h2>
|
65 |
-
<p>If you are convinced by the benefits of downloading Clash of Lords 2 Guild Castle mod apk, you might be wondering how to do it. Well, it's not hard at all, and it only takes a few minutes. Here are the steps that you need to follow:</p>
|
66 |
-
<h3>Step 1: Enable unknown sources</h3>
|
67 |
-
<p>The first thing that you need to do is enable unknown sources on your device. This is a security setting that allows you to install apps from sources other than Google Play Store. To do this, go to your device settings > security > unknown sources > enable.</p>
|
68 |
-
<h3>Step 2: Download the mod apk file</h3>
|
69 |
-
<p>The next thing that you need to do is download the mod apk file from a reliable source. You can search for it online or use this link: <a href="">Clash of Lords 2 Guild Castle Mod Apk Download</a>. Make sure that the file is compatible with your device version and has no viruses or malware.</p>
|
70 |
-
<h3>Step 3: Install the mod apk file</h3>
|
71 |
-
<p>The last thing that you need to do is install the mod apk file on your device. To do this, locate the file in your file manager or downloads folder, and tap on it to start the installation process. Follow the instructions on the screen and wait for the installation to finish.</p>
|
72 |
-
<h3>Step 4: Launch the game and enjoy</h3>
|
73 |
-
<p>The final thing that you need to do is launch the game and enjoy it. To do this, find the game icon on your home screen or app drawer, and tap on it to open it. You will see a mod menu where you can customize your settings and preferences. You can also access all the features and benefits of the mod apk, such as unlimited money and gems, free hero hires and arena challenges, no ads and root required, etc. You can now play the game with fun and innovative twists.</p>
|
74 |
-
<h2>Tips and tricks for playing Clash of Lords 2 Guild Castle</h2>
|
75 |
-
<p>Now that you have downloaded and installed Clash of Lords 2 Guild Castle mod apk, you might want to know some tips and tricks for playing the game better and smarter. Here are some of them:</p>
|
76 |
-
<h3>Upgrade your town hall and troops</h3>
|
77 |
-
<p>One of the most important things that you need to do in the game is upgrade your town hall and troops. Your town hall is the heart of your base, and it determines your level, resource capacity, building limit, etc. Your troops are your main force in battle, and they determine your attack power, defense power, speed, etc. Therefore, you need to upgrade them regularly to unlock new features, abilities, options, etc. You can use your money and gems to speed up the upgrading process.</p>
|
78 |
-
<h3>Fuse your heroes to make them stronger</h3>
|
79 |
-
<p>Another important thing that you need to do in the game is fuse your heroes to make them stronger. Your heroes are your leaders in battle, and they have unique skills that can change the outcome of the war. However, they also have different levels, ranks, grades, etc. that affect their performance. Therefore, you need to fuse them with other heroes or materials to increase their stats, skills, stars, etc. You can use your money and gems to buy more heroes or materials for fusion.</p>
|
80 |
-
<h3>Plan your strategy before heading to battle</h3>
|
81 |
-
<p>A third important thing that you need to do in the game is plan your strategy before heading to battle. Your strategy is your plan of action in battle, and it involves choosing your heroes, troops, mercenaries, formations, etc. You need to consider various factors such as your enemy's strength, weakness, type, etc., as well as your own goals, objectives, resources, etc. You also need to adapt your strategy according to the game mode that you are playing. You can use your money and gems to change your strategy anytime you want.</p>
|
82 |
-
<h3>Join a guild and fight alongside your friends</h3>
|
83 |
-
<p>A fourth important thing that you need to do in the game is join a guild and fight alongside your friends. A guild is a group of players who share a common interest in the game, and who can help each other out with various aspects of the game such as resources, advice, support, etc. You can also participate in guild wars and guild bashes with your guild members, where you can compete with other guilds for glory and rewards. You can use your money and gems to donate to your guild or buy guild gifts.</p>
|
84 |
-
<h2>Conclusion</h2>
|
85 |
-
<p>In conclusion, Clash of Lords 2 Guild Castle is a strategy game with fun and innovative twists that you can enjoy on your Android device. You can recruit over 50 heroes and their mercenaries, build and defend a base, and fight alongside your friends in over 10 PvE and PvP modes. You can also control the action and activate heroes' skills in real time, pair heroes and troops with a unique mercenary system, and play it your way with various options and challenges. However, if you want to have more fun and freedom in the game, you should download Clash of Lords 2 Guild Castle mod apk, which is a modified version of the game that gives you unlimited money and gems, free hero hires and arena challenges, no ads and root required, and more. You can download and install it easily and safely on your device by following the steps that we have provided in this article. You can also use some tips and tricks that we have shared to play the game better and smarter. We hope that you have enjoyed this article and found it helpful. If you have any questions or feedback, please feel free to leave them in the comments section below.</p>
|
86 |
-
<h2>FAQs</h2>
|
87 |
-
<p>Here are some frequently asked questions about Clash of Lords 2 Guild Castle mod apk:</p>
|
88 |
-
<h4>Q: Is Clash of Lords 2 Guild Castle mod apk safe to use?</h4>
|
89 |
-
<p>A: Yes, Clash of Lords 2 Guild Castle mod apk is safe to use, as long as you download it from a reliable source and scan it for viruses or malware before installing it. However, you should always be careful when downloading and installing any mod apk file, as some of them might contain harmful or malicious code that can damage your device or compromise your privacy. You should also backup your data before using any mod apk file, as some of them might overwrite or delete your original game data.</p>
|
90 |
-
<h4>Q: Is Clash of Lords 2 Guild Castle mod apk compatible with my device?</h4>
|
91 |
-
<p>A: Clash of Lords 2 Guild Castle mod apk is compatible with most Android devices that run on Android 4.1 or higher. However, some devices might not support the mod apk file due to different specifications or settings. You should check the compatibility of the mod apk file with your device before downloading and installing it. You should also make sure that you have enough storage space on your device to install the mod apk file.</p>
|
92 |
-
<h4>Q: Will I get banned for using Clash of Lords 2 Guild Castle mod apk?</h4>
|
93 |
-
<p>A: There is a possibility that you might get banned for using Clash of Lords 2 Guild Castle mod apk, as it is against the terms of service and fair play policy of the game. The game developers might detect the use of the mod apk file and suspend or terminate your account. Therefore, you should use the mod apk file at your own risk and discretion. You should also avoid using the mod apk file in online or multiplayer modes, as it might affect the game balance and fairness for other players.</p>
|
94 |
-
<h4>Q: Can I update Clash of Lords 2 Guild Castle mod apk?</h4>
|
95 |
-
<p>A: Yes, you can update Clash of Lords 2 Guild Castle mod apk, but you might lose some of the features or benefits of the mod apk file. The game developers might release new updates for the original version of the game that might not be compatible with the mod apk file. Therefore, you should check the compatibility of the update with the mod apk file before downloading and installing it. You should also backup your data before updating the mod apk file, as some updates might overwrite or delete your previous game data.</p>
|
96 |
-
<h4>Q: Can I uninstall Clash of Lords 2 Guild Castle mod apk?</h4>
|
97 |
-
<p>A: Yes, you can uninstall Clash of Lords 2 Guild Castle mod apk anytime you want, just like any other app on your device. To do this, go to your device settings > apps > Clash of Lords 2 Guild Castle > uninstall. However, you should note that uninstalling the mod apk file will also delete all your game data and progress. Therefore, you should backup your data before uninstalling the mod apk file.</p> 197e85843d<br />
|
98 |
-
<br />
|
99 |
-
<br />
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/1phancelerku/anime-remove-background/Brain Test 3 Tricky Quests Mod APK A Long and Tricky Adventure with Brain Teasing Challenges.md
DELETED
@@ -1,96 +0,0 @@
|
|
1 |
-
|
2 |
-
<h1>Brain Test 3: Tricky Quests Mod APK - A Fun and Challenging Puzzle Game</h1>
|
3 |
-
<p>If you are looking for a game that will test your brain power, make you laugh, and keep you entertained, then you should try <strong>Brain Test 3: Tricky Quests</strong>. This is a puzzle game with dozens of tricky questions and puzzles accompanied by colorful characters with original stories. In this game, you will join Alyx on her quest to find the six power gems in order to save her dying father. Along the way, you will meet with Brain Test franchise characters and face various challenges and dangers. You will need to use your logic, creativity, and intuition to solve the puzzles and shape the story.</p>
|
4 |
-
<p>Brain Test 3 is the third installment in the popular Brain Test series, which has been downloaded by millions of players worldwide. The game is developed by Unico Studio, a game development studio based in the United States. The game is available for both Android and iOS devices, but if you want to enjoy some extra features and benefits, you should download <strong>Brain Test 3 Mod APK</strong>. This is a modified version of the game that gives you unlimited resources, no ads, and all levels unlocked. In this article, we will tell you how to download and install Brain Test 3 Mod APK, what are its features, how to play it, what are its benefits, and what are some tips and tricks for playing it.</p>
|
5 |
-
<h2>brain test 3 tricky quests mod apk</h2><br /><p><b><b>DOWNLOAD</b> ····· <a href="https://jinyurl.com/2uNMO5">https://jinyurl.com/2uNMO5</a></b></p><br /><br />
|
6 |
-
<h2>How to Download and Install Brain Test 3 Mod APK</h2>
|
7 |
-
<p>Downloading and installing Brain Test 3 Mod APK is very easy and simple. Just follow these steps:</p>
|
8 |
-
<ol>
|
9 |
-
<li>Download the mod APK file from a trusted source. You can use this link to download it.</li>
|
10 |
-
<li>Enable unknown sources on your device settings. This will allow you to install apps from sources other than Google Play Store or App Store. To do this, go to Settings > Security > Unknown Sources and toggle it on.</li>
|
11 |
-
<li>Install the mod APK file by tapping on it and following the instructions. It may take a few seconds or minutes depending on your device.</li>
|
12 |
-
<li>Launch the game and enjoy!</li>
|
13 |
-
</ol>
|
14 |
-
<h2>What are the Features of Brain Test 3 Mod APK?</h2>
|
15 |
-
<p>Brain Test 3 Mod APK has some amazing features that will make your gaming experience more enjoyable and rewarding. Here are some of them:</p>
|
16 |
-
<ul>
|
17 |
-
<li><strong>Unlimited resources:</strong> With Brain Test 3 Mod APK hidden secrets or clues. You can also talk to the characters and choose different dialogues that may affect the story.</li>
|
18 |
-
<li>Collect and use items from your inventory. Some puzzles require you to collect items from the environment and use them to solve the puzzles. You can access your inventory by tapping on the backpack icon on the top right corner of the screen. You can drag and drop the items from your inventory to the screen or combine them with other items.</li>
|
19 |
-
<li>Use hints and help if you get stuck. If you are having trouble with a puzzle, you can use hints or help to get some guidance. You can tap on the light bulb icon on the top left corner of the screen to get a hint, which will give you a clue or a suggestion on how to solve the puzzle. You can also tap on the question mark icon on the top left corner of the screen to get help, which will show you the solution or the answer to the puzzle. However, using hints or help will cost you diamonds, so use them wisely.</li>
|
20 |
-
</ul>
|
21 |
-
<h2>What are the Benefits of Playing Brain Test 3: Tricky Quests?</h2>
|
22 |
-
<p>Brain Test 3 is not only a fun and entertaining game, but also a beneficial one. Playing Brain Test 3 can help you in many ways, such as:</p>
|
23 |
-
<ul>
|
24 |
-
<li><strong>Train your brain and improve your cognitive skills:</strong> Brain Test 3 is a game that will challenge your brain and make you think in different ways. By solving the puzzles, you will improve your logic, creativity, memory, concentration, problem-solving, and lateral thinking skills. You will also learn new facts and information from the puzzles and the story.</li>
|
25 |
-
<li><strong>Have fun and laugh at the humorous situations and dialogues:</strong> Brain Test 3 is a game that will make you laugh and smile with its witty and hilarious situations and dialogues. The game has a lot of humor and comedy that will brighten up your mood and relieve your stress. You will enjoy the funny interactions between Alyx and other characters, as well as the absurd and unexpected outcomes of some puzzles.</li>
|
26 |
-
<li><strong>Experience an engaging and immersive story with Alyx and other characters:</strong> Brain Test 3 is a game that will immerse you in an engaging and captivating story with Alyx and other characters from the Brain Test franchise. You will follow Alyx on her quest to find the six power gems and save her father, while meeting with old and new friends and foes along the way. You will also shape the story with your choices and actions, as well as discover secrets and mysteries about Alyx's past and future.</li>
|
27 |
-
</ul>
|
28 |
-
<h2>What are some Tips and Tricks for Playing Brain Test 3: Tricky Quests?</h2>
|
29 |
-
<p>Brain Test 3 is a game that will test your brain power, make you laugh, and keep you entertained. However, some of the puzzles may be too tricky or difficult for you. Here are some tips and tricks that may help you play better:</p>
|
30 |
-
<ul>
|
31 |
-
<li><strong>Think outside the box and try different approaches:</strong> Brain Test 3 is a game that will challenge your brain and make you think in different ways. Sometimes, the answer or the solution is not what you expect or what seems obvious. You may need to think outside the box and try different approaches to solve the puzzles. For example, you may need to use unconventional methods, break some rules, or use your device's features.</li>
|
32 |
-
<li><strong>Pay attention to the details and clues on the screen:</strong> Brain Test 3 is a game that will test your observation skills and attention to detail. Some of the puzzles may have subtle details or clues on the screen that may help you solve them. You may need to pay attention to the colors, shapes, sizes, numbers, words, sounds, or movements of the objects and characters on the screen. You may also need to zoom in or out of the screen to see more details or clues.</li>
|
33 |
-
<li><strong>Explore the workshop and upgrade your tools and machines:</strong> Brain Test 3 is a game that will let you explore the workshop of Alyx's father, where you can find and use various tools and machines to help you solve the puzzles. You can also upgrade your tools and machines with diamonds to make them more powerful and useful. For example, you can upgrade your magnifying glass to see more details, your hammer to break more things, or your jetpack to fly higher.</li>
|
34 |
-
<li><strong>Play the bonus action levels for more excitement and challenge:</strong> Brain Test 3 is a game that will not only test your brain, but also your reflexes and skills. Some of the levels are bonus action levels, where you need to control Alyx or other characters and perform different actions, such as running, jumping, flying, shooting, or fighting. These levels are more exciting and challenging than the regular puzzle levels, and they may also reward you with more diamonds or items.</li>
|
35 |
-
</ul>
|
36 |
-
<h1>Conclusion</h1>
|
37 |
-
<p>Brain Test 3: Tricky Quests is a fun and challenging puzzle game that will test your brain power, make you laugh, and keep you entertained. The game has over 100 levels of tricky puzzles and quests that will challenge your logic, creativity, and intuition. The game also has an engaging and immersive story with Alyx and other characters from the Brain Test franchise. You can shape the story with your choices and actions, as well as discover secrets and mysteries about Alyx's past and future.</p>
|
38 |
-
<p>brain test 3 tricky quests unlimited diamond<br />
|
39 |
-
brain test 3 tricky quests puzzle game mod apk<br />
|
40 |
-
brain test 3 tricky quests mod apk download<br />
|
41 |
-
brain test 3 tricky quests hack version<br />
|
42 |
-
brain test 3 tricky quests mod apk latest<br />
|
43 |
-
brain test 3 tricky quests free hints<br />
|
44 |
-
brain test 3 tricky quests mod apk android<br />
|
45 |
-
brain test 3 tricky quests cheats and solutions<br />
|
46 |
-
brain test 3 tricky quests mod apk modyolo<br />
|
47 |
-
brain test 3 tricky quests premium unlocked<br />
|
48 |
-
brain test 3 tricky quests mod apk revdl<br />
|
49 |
-
brain test 3 tricky quests full version<br />
|
50 |
-
brain test 3 tricky quests mod apk rexdl<br />
|
51 |
-
brain test 3 tricky quests no ads<br />
|
52 |
-
brain test 3 tricky quests mod apk happymod<br />
|
53 |
-
brain test 3 tricky quests offline mode<br />
|
54 |
-
brain test 3 tricky quests mod apk unlimited money<br />
|
55 |
-
brain test 3 tricky quests walkthrough guide<br />
|
56 |
-
brain test 3 tricky quests mod apk apkpure<br />
|
57 |
-
brain test 3 tricky quests all levels unlocked<br />
|
58 |
-
brain test 3 tricky quests mod apk an1<br />
|
59 |
-
brain test 3 tricky quests online play<br />
|
60 |
-
brain test 3 tricky quests mod apk platinmods<br />
|
61 |
-
brain test 3 tricky quests tips and tricks<br />
|
62 |
-
brain test 3 tricky quests mod apk ios<br />
|
63 |
-
brain test 3 tricky quests pro apk<br />
|
64 |
-
brain test 3 tricky quests mod apk android 1<br />
|
65 |
-
brain test 3 tricky quests best strategy<br />
|
66 |
-
brain test 3 tricky quests mod apk obb<br />
|
67 |
-
brain test 3 tricky quests mega mod<br />
|
68 |
-
brain test 3 tricky quests mod apk uptodown<br />
|
69 |
-
brain test 3 tricky quests review and rating<br />
|
70 |
-
brain test 3 tricky quests mod apk mob.org<br />
|
71 |
-
brain test 3 tricky quests fun and challenging<br />
|
72 |
-
brain test 3 tricky quests mod apk lenov.ru<br />
|
73 |
-
brain test 3 tricky quests original version<br />
|
74 |
-
brain test 3 tricky quests mod apk blackmod<br />
|
75 |
-
brain test 3 tricky quests gameplay video<br />
|
76 |
-
brain test 3 tricky quests mod apk andropalace<br />
|
77 |
-
brain test 3 tricky quests new update</p>
|
78 |
-
<p>If you want to enjoy some extra features and benefits, you should download Brain Test 3 Mod APK. This is a modified version of the game that gives you unlimited resources, no ads, and all levels unlocked. You can download and install Brain Test 3 Mod APK easily and safely by following the steps in this article.</p>
|
79 |
-
<p>Brain Test 3 is a game that will not only entertain you, but also benefit you in many ways. Playing Brain Test 3 can help you train your brain and improve your cognitive skills, have fun and laugh at the humorous situations and dialogues, and experience an engaging and immersive story with Alyx and other characters. You can also play better by following some tips and tricks that we shared in this article.</p>
|
80 |
-
<p>If you are looking for a game that will test your brain power, make you laugh, and keep you entertained, then you should try Brain Test 3: Tricky Quests. You can download it from Google Play Store or App Store for free, or download Brain Test 3 Mod APK from this link for some extra features and benefits. Have fun playing Brain Test 3!</p>
|
81 |
-
<h2>FAQs</h2>
|
82 |
-
<p>Here are some common questions and answers about Brain Test 3 Mod APK:</p>
|
83 |
-
<ol>
|
84 |
-
<li><strong>Q: Is Brain Test 3 Mod APK safe to download and install?</strong></li>
|
85 |
-
<li><strong>A: Yes, Brain Test 3 Mod APK is safe to download and install. The mod APK file is scanned for viruses and malware before being uploaded to the source. However, you should always download it from a trusted source like this link to avoid any risks.</strong></li>
|
86 |
-
<li><strong>Q: Do I need to root or jailbreak my device to use Brain Test 3 Mod APK?</strong></li>
|
87 |
-
<li><strong>A: No, you do not need to root or jailbreak your device to use Brain Test 3 Mod APK. The mod APK file works on any Android or iOS device without any modifications.</strong></li>
|
88 |
-
<li><strong>Q: Will I get banned from playing Brain Test 3 if I use Brain Test 3 Mod APK?</strong></li>
|
89 |
-
<li><strong>A: No, you will not get banned from playing Brain Test 3 if you use Brain Test 3 Mod APK. The mod APK file does not interfere with the game's servers or data. However, you should always use it at your own risk and discretion.</strong></li>
|
90 |
-
<li><strong>Q: How can I update Brain Test 3 Mod APK?</strong></li>
|
91 |
-
<li><strong>A: You can update Brain Test 3 Mod APK by downloading the latest version of the mod APK file from the same source as before. You can also check this article for any updates or news about Brain Test 3 Mod APK.</strong></li>
|
92 |
-
<li><strong>Q: How can I contact the developer of Brain Test 3 Mod APK?</strong></li>
|
93 |
-
<li><strong>A: You can contact the developer of Brain Test 3 Mod APK by visiting their website or sending them an email at [email protected].</strong></li>
|
94 |
-
</ol></p> 401be4b1e0<br />
|
95 |
-
<br />
|
96 |
-
<br />
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/1phancelerku/anime-remove-background/Dark Riddle 1.0 APK Solve Puzzles and Escape from a Suspicious Neighbor.md
DELETED
@@ -1,138 +0,0 @@
|
|
1 |
-
<br />
|
2 |
-
<h1>Dark Riddle 1.0 APK: A Stealth Puzzle Game with a Mysterious Neighbor</h1>
|
3 |
-
<p>If you are a fan of stealth puzzle games, you might have heard of <strong>Dark Riddle</strong>, a game that is similar to the popular <strong>Hello Neighbor</strong>. In this game, you have to sneak into your neighbor's house and find out what he is hiding in his basement. But be careful, he is not as friendly as he seems, and he will try to stop you at all costs.</p>
|
4 |
-
<h2>dark riddle 1.0 apk</h2><br /><p><b><b>Download File</b> ::: <a href="https://jinyurl.com/2uNQrt">https://jinyurl.com/2uNQrt</a></b></p><br /><br />
|
5 |
-
<p>In this article, we will tell you everything you need to know about <strong>Dark Riddle 1.0 APK</strong>, the latest version of the game that you can download and install on your Android device. We will explain what is Dark Riddle, how to download and install it, and how to play it. Let's get started!</p>
|
6 |
-
<h2>What is Dark Riddle?</h2>
|
7 |
-
<p>Dark Riddle is a stealth puzzle game developed by <strong>PAGA GROUP</strong>, a studio that specializes in creating games with immersive stories and realistic graphics. The game was released in 2020 and has received positive reviews from players and critics alike.</p>
|
8 |
-
<h3>The premise of the game</h3>
|
9 |
-
<p>The game begins in a house you just moved into, and with a suspicious neighbor across the street. You notice that he has a lot of security cameras, locks, and traps around his house, and that he acts very strangely. You decide to investigate what he is up to, and discover that he has a secret basement where he keeps something mysterious.</p>
|
10 |
-
<p>Your goal is to find out what he is hiding, and why he is so obsessed with it. But be careful, he will not let you enter his house easily, and he will chase you if he sees you. You have to use your stealth skills, your logic, and your creativity to avoid him and solve the puzzles that block your way.</p>
|
11 |
-
<p>dark riddle classic game download<br />
|
12 |
-
dark riddle retro version apk<br />
|
13 |
-
dark riddle neighbor's basement<br />
|
14 |
-
dark riddle classic android game<br />
|
15 |
-
dark riddle old version free download<br />
|
16 |
-
dark riddle classic apkcombo<br />
|
17 |
-
dark riddle story mode apk<br />
|
18 |
-
dark riddle classic game online<br />
|
19 |
-
dark riddle mysterious neighbor apk<br />
|
20 |
-
dark riddle classic mod apk<br />
|
21 |
-
dark riddle classic game walkthrough<br />
|
22 |
-
dark riddle retro version download<br />
|
23 |
-
dark riddle neighbor's secrets apk<br />
|
24 |
-
dark riddle classic android download<br />
|
25 |
-
dark riddle old version apk<br />
|
26 |
-
dark riddle classic apkpure<br />
|
27 |
-
dark riddle story mode download<br />
|
28 |
-
dark riddle classic game play<br />
|
29 |
-
dark riddle mysterious neighbor download<br />
|
30 |
-
dark riddle classic hack apk<br />
|
31 |
-
dark riddle classic game guide<br />
|
32 |
-
dark riddle retro version online<br />
|
33 |
-
dark riddle neighbor's mystery apk<br />
|
34 |
-
dark riddle classic android free<br />
|
35 |
-
dark riddle old version mod apk<br />
|
36 |
-
dark riddle classic apk mirror<br />
|
37 |
-
dark riddle story mode online<br />
|
38 |
-
dark riddle classic game review<br />
|
39 |
-
dark riddle mysterious neighbor online<br />
|
40 |
-
dark riddle classic unlimited money apk<br />
|
41 |
-
dark riddle classic game tips<br />
|
42 |
-
dark riddle retro version free download<br />
|
43 |
-
dark riddle neighbor's adventure apk<br />
|
44 |
-
dark riddle classic android app<br />
|
45 |
-
dark riddle old version hack apk<br />
|
46 |
-
dark riddle classic apk mod menu<br />
|
47 |
-
dark riddle story mode free download<br />
|
48 |
-
dark riddle classic game cheats<br />
|
49 |
-
dark riddle mysterious neighbor free download<br />
|
50 |
-
dark riddle classic premium apk</p>
|
51 |
-
<h3>The gameplay and features</h3>
|
52 |
-
<p>Dark Riddle is a game that combines elements of stealth, puzzle, adventure, and horror. You have to explore your neighbor's house, find clues, collect items, use tools, and interact with objects to progress in the game. You can also distract your neighbor by making noises, throwing objects, or setting traps.</p>
|
53 |
-
<p>The game has many features that make it fun and challenging, such as:</p>
|
54 |
-
<ul>
|
55 |
-
<li>A dynamic AI system that makes your neighbor react differently depending on your actions.</li>
|
56 |
-
<li>A sandbox mode that lets you create your own scenarios and share them with other players.</li>
|
57 |
-
<li>A multiplayer mode that lets you play with or against other players online.</li>
|
58 |
-
<li>A variety of endings that depend on your choices and actions.</li>
|
59 |
-
<li>A lot of secrets and Easter eggs to discover.</li>
|
60 |
-
</ul>
|
61 |
-
<h3>The graphics and sound</h3>
|
62 |
-
<p>Dark Riddle has impressive graphics that create a realistic and immersive atmosphere. The game uses <strong>Unreal Engine 4</strong>, which allows for high-quality textures, lighting, shadows, and animations. The game also has a lot of details and objects that make the environment rich and interactive.</p>
|
63 |
-
<p>The sound design of the game is also very well done, as it enhances the mood and tension of the game. The game has a creepy soundtrack that matches the theme of the game, as well as realistic sound effects that make you feel like you are in the game. The game also has voice acting for the characters, which adds personality and emotion to them.</p>
|
64 |
-
<h2>How to download and install Dark Riddle 1.0 APK?</h2>
|
65 |
-
<p>If you want to play Dark Riddle on your Android device, you can download and install the APK file of the game, which is a modified version that has some advantages over the original version. Here is how you can do it.</p>
|
66 |
-
<h3>The requirements for the APK file</h3>
|
67 |
-
<p>Before you download and install the APK file, you need to make sure that your device meets the following requirements:</p>
|
68 |
-
<ul>
|
69 |
-
<li>Your device must have Android 4.4 or higher.</li>
|
70 |
-
<li>Your device must have at least 2 GB of RAM and 500 MB of free storage space.</li>
|
71 |
-
<li>Your device must have a stable internet connection.</li>
|
72 |
-
<li>Your device must allow the installation of apps from unknown sources. You can enable this option by going to Settings > Security > Unknown Sources.</li>
|
73 |
-
</ul>
|
74 |
-
<h3>The steps to download and install the APK file</h3>
|
75 |
-
<p>Once you have checked the requirements, you can follow these steps to download and install the APK file:</p>
|
76 |
-
<ol>
|
77 |
-
<li>Go to this link and click on the Download button to download the APK file.</li>
|
78 |
-
<li>Locate the downloaded file in your device's file manager and tap on it to start the installation process.</li>
|
79 |
-
<li>Follow the instructions on the screen and wait for the installation to finish.</li>
|
80 |
-
<li>Launch the game from your app drawer and enjoy!</li>
|
81 |
-
</ol>
|
82 |
-
<h3>The benefits of using the APK file</h3>
|
83 |
-
<p>By using the APK file, you can enjoy some benefits that are not available in the original version of the game, such as:</p>
|
84 |
-
<ul>
|
85 |
-
<li>No ads or in-app purchases that can interrupt your gameplay.</li>
|
86 |
-
<li>All levels and modes unlocked from the start.</li>
|
87 |
-
<li>Unlimited coins and gems that you can use to buy items and tools.</li>
|
88 |
-
<li>Access to exclusive features and updates that are not yet released in the official version.</li>
|
89 |
-
</ul>
|
90 |
-
<h2>How to play Dark Riddle 1.0 APK?</h2>
|
91 |
-
<p>Now that you have downloaded and installed Dark Riddle 1.0 APK, you might be wondering how to play it. Here are some tips and tricks that will help you master the game.</p>
|
92 |
-
<h3>The controls and interface</h3>
|
93 |
-
<p>The game has a simple and intuitive control system that lets you move around, interact with objects, and use items. You can use the virtual joystick on the left side of the screen to move your character, and swipe on the right side of the screen to look around. You can also tap on the icons on the right side of the screen to perform actions such as jumping, crouching, running, or using items.</p>
|
94 |
-
<p>The game also has a user-friendly interface that shows you important information such as your health, inventory, objectives, and map. You can access these by tapping on the icons on the top of the screen. You can also pause the game by tapping on the menu icon on the top right corner of the screen.</p>
|
95 |
-
<h3>The tips and tricks</h3>
|
96 |
-
<p>The game is not easy, as your neighbor is smart and unpredictable. You have to use your wits and skills to outsmart him and solve the puzzles. Here are some tips and tricks that will help you:</p>
|
97 |
-
<ul>
|
98 |
-
<li>Be stealthy and avoid making noise. Your neighbor can hear you if you run, jump, or break something. You can also use objects such as radios, TVs, or phones to distract him or lure him away from his house.</li>
|
99 |
-
<li>Be observant and explore everything. Your neighbor's house is full of clues, items, secrets, and puzzles. You have to find them and use them to your advantage. You can also use binoculars or cameras to spy on your neighbor and learn his habits and routines.</li>
|
100 |
-
<li>Be creative and use tools. You can find various tools such as crowbars, hammers, keys, or magnets that can help you open doors, break windows, or activate switches. You can also combine items to create new ones, such as fireworks, bombs, or traps.</li>
|
101 |
-
<li>Be careful and save often. Your neighbor is not friendly, and he will try to catch you if he sees you. If he does, he will take away your items and send you back to your house. You can save your progress by using phones or radios in your house or in his house.</li>
|
102 |
-
</ul>
|
103 |
-
<h3>The challenges and rewards</h3>
|
104 |
-
<p>The game has many challenges and rewards that make it more fun and rewarding. You can complete various objectives such as finding keys, unlocking doors, entering rooms, or discovering secrets. You can also collect coins and gems that you can use to buy items or tools in the shop. You can also unlock achievements that show your progress and skills in the game.</p>
|
105 |
-
<h2>Conclusion</h2>
|
106 |
-
<p>Dark Riddle 1.0 APK is a stealth puzzle game that will keep you entertained and challenged for hours. You have to sneak into your neighbor's house and find out what he is hiding in his basement, while avoiding his traps and attacks. You have to use your stealth skills, your logic, and your creativity to solve the puzzles and uncover the secrets. You can also enjoy the sandbox mode, the multiplayer mode, the various endings, and the secrets and Easter eggs that the game has to offer.</p>
|
107 |
-
<p>If you want to play Dark Riddle on your Android device, you can download and install the APK file of the game, which has some benefits over the original version. You can enjoy the game without ads or in-app purchases, with all levels and modes unlocked, with unlimited coins and gems, and with exclusive features and updates.</p>
|
108 |
-
<p>Dark Riddle 1.0 APK is a game that will test your wits and skills, and will give you a thrilling and immersive experience. If you are a fan of stealth puzzle games, you should definitely try it out. You will not regret it!</p>
|
109 |
-
<h2>FAQs</h2>
|
110 |
-
<p>Here are some frequently asked questions about Dark Riddle 1.0 APK:</p>
|
111 |
-
<table>
|
112 |
-
<tr>
|
113 |
-
<th>Question</th>
|
114 |
-
<th>Answer</th>
|
115 |
-
</tr>
|
116 |
-
<tr>
|
117 |
-
<td>Is Dark Riddle 1.0 APK safe to download and install?</td>
|
118 |
-
<td>Yes, Dark Riddle 1.0 APK is safe to download and install, as long as you use a trusted source such as this link . The APK file has been scanned for viruses and malware, and has no harmful effects on your device.</td>
|
119 |
-
</tr>
|
120 |
-
<tr>
|
121 |
-
<td>Is Dark Riddle 1.0 APK compatible with my device?</td>
|
122 |
-
<td>Dark Riddle 1.0 APK is compatible with most Android devices that have Android 4.4 or higher, 2 GB of RAM, 500 MB of free storage space, and a stable internet connection. You can check the compatibility of your device by going to Settings > About Phone > Software Information.</td>
|
123 |
-
</tr>
|
124 |
-
<tr>
|
125 |
-
<td>How can I update Dark Riddle 1.0 APK?</td>
|
126 |
-
<td>You can update Dark Riddle 1.0 APK by downloading and installing the latest version of the APK file from this link . You do not need to uninstall the previous version of the game, as the new version will overwrite it.</td>
|
127 |
-
</tr>
|
128 |
-
<tr>
|
129 |
-
<td>How can I contact the developers of Dark Riddle?</td>
|
130 |
-
<td>You can contact the developers of Dark Riddle by sending them an email at [email protected] , or by visiting their website at https://pagagroup.com/ . You can also follow them on Facebook , Twitter , or Instagram for news and updates about the game.</td>
|
131 |
-
</tr>
|
132 |
-
<tr>
|
133 |
-
<td>How can I share my feedback or suggestions about Dark Riddle?</td>
|
134 |
-
<td>You can share your feedback or suggestions about Dark Riddle by leaving a comment or rating on the Google Play Store , or by sending an email to [email protected] . The developers appreciate your feedback and suggestions, as they help them improve the game and make it more enjoyable for you.</td>
|
135 |
-
</tr>
|
136 |
-
</table></p> 401be4b1e0<br />
|
137 |
-
<br />
|
138 |
-
<br />
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/1phancelerku/anime-remove-background/Download FIFA 22 Mobile and Experience the FIFA World Cup 2022 on Your Phone.md
DELETED
@@ -1,115 +0,0 @@
|
|
1 |
-
|
2 |
-
<h1>FIFA 22 Mobile: How to Download and Play the Ultimate Soccer Game on Android</h1>
|
3 |
-
<p>If you are a soccer fan, you probably have heard of <strong>FIFA 22</strong>, the latest installment of the popular soccer simulation game series by EA Sports. But did you know that you can also play <strong>FIFA 22 Mobile</strong> on your Android device? Yes, you read that right. You can enjoy the ultimate soccer game experience on your smartphone or tablet, with tons of features, modes, players, teams, stadiums, and more.</p>
|
4 |
-
<p>In this article, we will tell you everything you need to know about FIFA 22 Mobile, including what it is, what are its main features and modes, how to download it on your Android device, and some tips and tricks for playing it. So, without further ado, let's get started.</p>
|
5 |
-
<h2>fifa 22 mobile download android</h2><br /><p><b><b>DOWNLOAD</b> »»» <a href="https://jinyurl.com/2uNPNl">https://jinyurl.com/2uNPNl</a></b></p><br /><br />
|
6 |
-
<h2>FIFA 22 Mobile Features and Modes</h2>
|
7 |
-
<p>FIFA 22 Mobile is not just a scaled-down version of FIFA 22 for consoles or PC. It is a full-fledged soccer game that has its own unique features and modes that are designed specifically for mobile devices. Here are some of the most notable ones:</p>
|
8 |
-
<h3>HyperMotion Technology</h3>
|
9 |
-
<p>One of the biggest innovations in FIFA 22 Mobile is the <strong>HyperMotion Technology</strong>, which is a new system that uses machine learning to create more realistic and fluid gameplay and animations. HyperMotion Technology analyzes over 8.7 million frames of real-life soccer data and applies them to the game, resulting in more natural movements, reactions, and emotions of the players on the pitch.</p>
|
10 |
-
<p>HyperMotion Technology also works on mobile devices, thanks to the optimization and compression techniques used by EA Sports. However, you will need a compatible device that meets the minimum system requirements to enjoy this feature. According to EA Sports, you will need at least 4 GB of RAM, Android 8.0 or higher, and a device with a 64-bit processor. You can check if your device is compatible by going to the Google Play Store page of FIFA 22 Mobile and looking for the "HyperMotion Compatible" label.</p>
|
11 |
-
<h3>FIFA World Cup 2022 Mode</h3>
|
12 |
-
<p>Another exciting feature in FIFA 22 Mobile is the <strong>FIFA World Cup 2022 Mode</strong>, which lets you experience the thrill and excitement of the biggest soccer tournament in the world. FIFA World Cup 2022 Mode allows you to play with any of the 32 qualified nations or 15 non-qualified nations that are available in the game, and compete for the coveted trophy in Qatar.</p>
|
13 |
-
<p>To access FIFA World Cup 2022 Mode, you will need to go to the main menu of FIFA 22 Mobile and tap on the "World Cup" icon. You will then be able to choose your preferred nation and start your journey from the group stage to the knockout stage. You will also be able to earn rewards and challenges along the way, such as coins, packs, players, kits, badges, and more.</p>
|
14 |
-
<h3>Manager Mode</h3>
|
15 |
-
<p>If you prefer to take a more strategic approach to soccer, you might want to try <strong>Manager Mode</strong>, which is a mode that lets you manage your own dream team and adjust your tactics in real time or auto-play. Manager Mode gives you full control over your squad, transfers, formations, tactics, training, scouting, and more. You can also choose from different difficulty levels and leagues to suit your preferences and skills.</p>
|
16 |
-
<p>To start Manager Mode, you will need to go to the main menu of FIFA 22 Mobile and tap on the "Manager" icon. You will then be able to create your manager profile, choose your team name, logo, kit, stadium, and budget. You will also be able to customize your team's appearance, attributes, skills, and chemistry. You can then start playing matches against other teams or simulate them if you want to save time.</p>
|
17 |
-
<p>fifa 22 mobile apk download for android<br />
|
18 |
-
how to download fifa 22 mobile on android<br />
|
19 |
-
fifa 22 mobile android release date<br />
|
20 |
-
fifa 22 mobile android gameplay<br />
|
21 |
-
fifa 22 mobile android offline<br />
|
22 |
-
fifa 22 mobile android requirements<br />
|
23 |
-
fifa 22 mobile android mod apk<br />
|
24 |
-
fifa 22 mobile android beta<br />
|
25 |
-
fifa 22 mobile android hack<br />
|
26 |
-
fifa 22 mobile android size<br />
|
27 |
-
fifa 22 mobile android free download<br />
|
28 |
-
fifa 22 mobile android review<br />
|
29 |
-
fifa 22 mobile android tips and tricks<br />
|
30 |
-
fifa 22 mobile android best players<br />
|
31 |
-
fifa 22 mobile android cheats<br />
|
32 |
-
fifa 22 mobile android update<br />
|
33 |
-
fifa 22 mobile android graphics settings<br />
|
34 |
-
fifa 22 mobile android controller support<br />
|
35 |
-
fifa 22 mobile android online mode<br />
|
36 |
-
fifa 22 mobile android world cup mode<br />
|
37 |
-
fifa 22 mobile android vs ios<br />
|
38 |
-
fifa 22 mobile android coins generator<br />
|
39 |
-
fifa 22 mobile android ultimate team mode<br />
|
40 |
-
fifa 22 mobile android champions league mode<br />
|
41 |
-
fifa 22 mobile android icons and heroes mode<br />
|
42 |
-
fifa 22 mobile android manager mode<br />
|
43 |
-
fifa 22 mobile android career mode<br />
|
44 |
-
fifa 22 mobile android skills tutorial<br />
|
45 |
-
fifa 22 mobile android squad builder<br />
|
46 |
-
fifa 22 mobile android transfer market<br />
|
47 |
-
fifa 22 mobile android live events<br />
|
48 |
-
fifa 22 mobile android seasons mode<br />
|
49 |
-
fifa 22 mobile android tournaments mode<br />
|
50 |
-
fifa 22 mobile android vs attack mode<br />
|
51 |
-
fifa 22 mobile android head to head mode<br />
|
52 |
-
fifa 22 mobile android customisation options<br />
|
53 |
-
fifa 22 mobile android device compatibility list<br />
|
54 |
-
fifa 22 mobile android error fix guide<br />
|
55 |
-
fifa 22 mobile android patch notes<br />
|
56 |
-
fifa 22 mobile android news and updates</p>
|
57 |
-
<h3>UEFA Champions League, Europa League, and Europa Conference League</h3>
|
58 |
-
<p>If you want to compete against the best teams from club football's most prestigious tournaments, you can play in the <strong>UEFA Champions League</strong>, <strong>Europa League</strong>, or <strong>Europa Conference League</strong>. These are modes that let you participate in these competitions with your Ultimate Team or Manager Mode team, and try to win the coveted trophies and glory.</p>
|
59 |
-
<p>To play in these modes, you will need to go to the main menu of FIFA 22 Mobile and tap on the "UEFA" icon. You will then be able to choose which competition you want to enter and start playing matches against other teams from different groups and stages. You will also be able to earn special players and items from these modes, such as Team of the Group Stage (TOTGS), Team of the Knockout Stage (TOTKS), Man of the Match (MOTM), etc.</p>
|
60 |
-
<h3>Icons and Heroes</h3>
|
61 |
-
<p>If you want to build your Ultimate Team with over 100 soccer legends from different leagues and eras, you can get <strong>Icons</strong> and <strong>Heroes</strong>. These are special players that have high ratings and unique attributes that reflect their skills and achievements in real life. Icons are players that have retired from soccer, while Heroes are players that are still active but have made a significant impact on their clubs or leagues.</p>
|
62 |
-
<p>To get Icons and Heroes, you will need to go to the store or the transfer market of FIFA 22 Mobile and look for packs or cards that contain them. You can also earn them from completing certain objectives or challenges in the game. You can then use Icons and Heroes to boost your team's chemistry and performance by matching them with players from the same nation, league, or club. You can also upgrade your Icons and Heroes by completing their special challenges or objectives in the game.</p>
|
63 |
-
<h2>How to Download FIFA 22 Mobile on Android Devices</h2>
|
64 |
-
<p>Now that you know what FIFA 22 Mobile has to offer, you might be wondering how to download it on your Android device. Well, don't worry, because we have got you covered. Here are the requirements and compatibility, and the steps to download FIFA 22 Mobile on Android devices.</p>
|
65 |
-
<h3>Requirements and Compatibility</h3>
|
66 |
-
<p>Before you download FIFA 22 Mobile on your Android device, you need to make sure that your device meets the minimum specs and compatibility for the game. According to EA Sports, these are the minimum requirements for FIFA 22 Mobile on Android devices:</p>
|
67 |
-
<ul>
|
68 |
-
<li>4 GB of RAM or more</li>
|
69 |
-
<li>Android 8.0 or higher</li>
|
70 |
-
<li>A device with a 64-bit processor</li>
|
71 |
-
<li>At least 2 GB of free storage space</li>
|
72 |
-
</ul>
|
73 |
-
<p>You can check if your device meets these requirements by going to the settings menu of your device and looking for the information about your device model, OS version, RAM, processor, and storage space. You can also check if your device is compatible by going to the Google Play Store page of FIFA 22 Mobile and looking for the "HyperMotion Compatible" label.</p>
|
74 |
-
<h3>Steps to Download FIFA 22 Mobile on Android Devices</h3>
|
75 |
-
<p>If your device meets the requirements and compatibility for FIFA 22 Mobile, you can follow these steps to download it on your Android device:</p>
|
76 |
-
<ol>
|
77 |
-
<li>Go to the Google Play Store app on your device and search for "FIFA 22 Mobile" or use this link: <a href="">FIFA 22 Mobile - Apps on Google Play</a></li>
|
78 |
-
<li>Tap on the "Install" button and wait for the game to download and install on your device. The game size is about 1.5 GB, so make sure you have a stable internet connection and enough battery life.</li>
|
79 |
-
<li>Once the game is installed, tap on the "Open" button or find the game icon on your home screen or app drawer and tap on it to launch the game.</li>
|
80 |
-
<li>The game will ask you to sign in with your EA account or create a new one if you don't have one. You can also sign in with your Facebook or Google account if you prefer. Signing in with your EA account will allow you to sync your progress and data across different devices and platforms.</li>
|
81 |
-
<li>The game will then ask you to choose your preferred language, region, and difficulty level. You can also customize your controls, graphics, sound, and other settings in the game menu.</li>
|
82 |
-
<li>The game will then take you to the main menu, where you can access different features and modes of FIFA 22 Mobile. You can also watch tutorials and tips videos to learn more about the game.</li>
|
83 |
-
</ol>
|
84 |
-
<h2>Tips and Tricks for Playing FIFA 22 Mobile on Android Devices</h2>
|
85 |
-
<p>Now that you have downloaded FIFA 22 Mobile on your Android device, you might want some tips and tricks for playing it better and having more fun. Here are some of them:</p>
|
86 |
-
<h3>Use the Advanced Passing System to Create More Opportunities</h3>
|
87 |
-
<p>One of the most important skills in soccer is passing, as it allows you to create more opportunities for scoring or defending. In FIFA 22 Mobile, you can use the <strong>Advanced Passing System</strong>, which is a new feature that lets you use different types of passes, such as through balls, lobbed passes, driven passes, etc., depending on the situation and your preference.</p>
|
88 |
-
<p>To use the Advanced Passing System, you need to tap and hold on the pass button on the right side of the screen, and then swipe in any direction to choose the type of pass you want to make. The longer you hold and swipe, the more power and distance you will give to your pass. You can also use gestures such as double-tap or flick to perform quick passes or one-touch passes.</p>
|
89 |
-
<p>The Advanced Passing System can help you avoid interceptions and turnovers by passing smartly and accurately. You can also use it to create more chances for scoring by finding gaps in the defense or sending long balls to your strikers.</p>
|
90 |
-
<h3>Master the New Skill Moves to Outsmart Your Opponents</h3>
|
91 |
-
<p>Another way to improve your gameplay in FIFA 22 Mobile is to master the <strong>new skill moves</strong>, which are special moves that allow you to dribble past defenders, create space, or score goals. FIFA 22 Mobile has over 50 skill moves that you can perform with different players, depending on their skill rating and position.</p>
|
92 |
-
<p>To perform skill moves in FIFA 22 Mobile, you need to swipe on the left side of the screen, where the virtual joystick is located. You can swipe in different directions and combinations to perform different skill moves, such as roulette, rainbow flick, heel to heel, etc. You can also use gestures such as double-tap or flick to perform quick skill moves or feints.</p>
|
93 |
-
<p>Mastering skill moves can help you outsmart your opponents and gain an advantage on the pitch. You can also use skill moves to show off your style and flair, or to humiliate your rivals. You can practice skill moves in training mode or skill games mode, where you can learn how to perform them and when to use them.</p>
|
94 |
-
<h3>Use Emote Messages to Communicate with Your Opponents or Teammates</h3>
|
95 |
-
<p>One of the fun features in FIFA 22 Mobile is the <strong>Emote Messages</strong>, which are messages that you can send to your opponents or teammates during a match. Emote Messages allow you to express yourself, taunt your opponents, or celebrate your goals. You can also use Emote Messages to communicate with your teammates, such as asking for a pass, giving instructions, or praising their performance.</p>
|
96 |
-
<p>To use Emote Messages in FIFA 22 Mobile, you need to tap on the emote button on the top right corner of the screen during a match. You will then see a list of Emote Messages that you can choose from, such as "Nice one!", "Sorry!", "Wow!", etc. You can also customize your Emote Messages in the settings menu or buy new ones from the store.</p>
|
97 |
-
<p>Using Emote Messages can make your matches more fun and interactive. You can also use Emote Messages to influence your opponents' emotions and behavior, such as making them angry, nervous, or confident. However, you should be careful not to use Emote Messages in an abusive or offensive way, as this might result in a ban or a report from other players.</p>
|
98 |
-
<h2>Conclusion</h2>
|
99 |
-
<p>FIFA 22 Mobile is a great soccer game that you can play on your Android device. It has many features and modes that will keep you entertained and challenged for hours. You can also download it easily and play it smoothly on your device, as long as it meets the requirements and compatibility for the game.</p>
|
100 |
-
<p>If you are a soccer fan, you should definitely give FIFA 22 Mobile a try. You will not regret it. You will be able to enjoy the ultimate soccer game experience on your smartphone or tablet, with tons of features, modes, players, teams, stadiums, and more.</p>
|
101 |
-
<p>We hope this article has helped you learn more about FIFA 22 Mobile and how to download and play it on your Android device. If you have any questions or feedback, please feel free to leave a comment below. We would love to hear from you.</p>
|
102 |
-
<h2>FAQs</h2>
|
103 |
-
<p>Here are some of the frequently asked questions about FIFA 22 Mobile:</p>
|
104 |
-
<h4>Q: Is FIFA 22 Mobile free to play?</h4>
|
105 |
-
<p>A: Yes, FIFA 22 Mobile is free to play on Android devices. However, it does have some optional in-app purchases that can enhance your gameplay or unlock more content.</p>
|
106 |
-
<h4>Q: Can I play FIFA 22 Mobile offline?</h4>
|
107 |
-
<p>A: No, FIFA 22 Mobile requires an internet connection to play. You will need a stable Wi-Fi or mobile data connection to access all the features and modes of the game.</p>
|
108 |
-
<h4>Q: Can I play FIFA 22 Mobile with my friends?</h4>
|
109 |
-
<p>A: Yes, FIFA 22 Mobile has a multiplayer mode that allows you to play with your friends or other players online. You can invite your friends to join your team or challenge them to a friendly match. You can also join leagues or tournaments with other players from around the world.</p>
|
110 |
-
<h4>Q: How do I update FIFA 22 Mobile?</h4>
|
111 |
-
<p>A: FIFA 22 Mobile updates automatically when you launch the game, as long as you have an internet connection and enough storage space on your device. You can also check for updates manually by going to the Google Play Store app on your device and looking for FIFA 22 Mobile.</p>
|
112 |
-
<h4>Q: How do I contact EA Sports for support or feedback?</h4>
|
113 |
-
<p>A: If you have any issues or problems with FIFA 22 Mobile, you can contact EA Sports for support by going to the settings menu of the game and tapping on the "Help" icon. You will then be able to access the EA Help Center, where you can find answers to common questions, report a bug, request a refund, or contact an EA advisor. You can also provide feedback or suggestions for FIFA 22 Mobile by going to the settings menu of the game and tapping on the "Feedback" icon.</p> 197e85843d<br />
|
114 |
-
<br />
|
115 |
-
<br />
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/1toTree/lora_test/ppdiffusers/schedulers/preconfig/preconfig_scheduling_euler_ancestral_discrete.py
DELETED
@@ -1,267 +0,0 @@
|
|
1 |
-
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
|
2 |
-
# Copyright 2022 Katherine Crowson and The HuggingFace Team. All rights reserved.
|
3 |
-
#
|
4 |
-
# Licensed under the Apache License, Version 2.0 (the "License");
|
5 |
-
# you may not use this file except in compliance with the License.
|
6 |
-
# You may obtain a copy of the License at
|
7 |
-
#
|
8 |
-
# http://www.apache.org/licenses/LICENSE-2.0
|
9 |
-
#
|
10 |
-
# Unless required by applicable law or agreed to in writing, software
|
11 |
-
# distributed under the License is distributed on an "AS IS" BASIS,
|
12 |
-
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
13 |
-
# See the License for the specific language governing permissions and
|
14 |
-
# limitations under the License.
|
15 |
-
|
16 |
-
from dataclasses import dataclass
|
17 |
-
from typing import List, Optional, Tuple, Union
|
18 |
-
|
19 |
-
import numpy as np
|
20 |
-
import paddle
|
21 |
-
|
22 |
-
from ...configuration_utils import ConfigMixin, register_to_config
|
23 |
-
from ...utils import _COMPATIBLE_STABLE_DIFFUSION_SCHEDULERS, BaseOutput, logging
|
24 |
-
from ..scheduling_utils import SchedulerMixin
|
25 |
-
|
26 |
-
logger = logging.get_logger(__name__) # pylint: disable=invalid-name
|
27 |
-
|
28 |
-
|
29 |
-
@dataclass
|
30 |
-
# Copied from diffusers.schedulers.scheduling_ddpm.DDPMSchedulerOutput with DDPM->EulerAncestralDiscrete
|
31 |
-
class PreconfigEulerAncestralDiscreteSchedulerOutput(BaseOutput):
|
32 |
-
"""
|
33 |
-
Output class for the scheduler's step function output.
|
34 |
-
|
35 |
-
Args:
|
36 |
-
prev_sample (`paddle.Tensor` of shape `(batch_size, num_channels, height, width)` for images):
|
37 |
-
Computed sample (x_{t-1}) of previous timestep. `prev_sample` should be used as next model input in the
|
38 |
-
denoising loop.
|
39 |
-
pred_original_sample (`paddle.Tensor` of shape `(batch_size, num_channels, height, width)` for images):
|
40 |
-
The predicted denoised sample (x_{0}) based on the model output from the current timestep.
|
41 |
-
`pred_original_sample` can be used to preview progress or for guidance.
|
42 |
-
"""
|
43 |
-
|
44 |
-
prev_sample: paddle.Tensor
|
45 |
-
pred_original_sample: Optional[paddle.Tensor] = None
|
46 |
-
|
47 |
-
|
48 |
-
class PreconfigEulerAncestralDiscreteScheduler(SchedulerMixin, ConfigMixin):
|
49 |
-
"""
|
50 |
-
Ancestral sampling with Euler method steps. Based on the original k-diffusion implementation by Katherine Crowson:
|
51 |
-
https://github.com/crowsonkb/k-diffusion/blob/481677d114f6ea445aa009cf5bd7a9cdee909e47/k_diffusion/sampling.py#L72
|
52 |
-
|
53 |
-
[`~ConfigMixin`] takes care of storing all config attributes that are passed in the scheduler's `__init__`
|
54 |
-
function, such as `num_train_timesteps`. They can be accessed via `scheduler.config.num_train_timesteps`.
|
55 |
-
[`SchedulerMixin`] provides general loading and saving functionality via the [`SchedulerMixin.save_pretrained`] and
|
56 |
-
[`~SchedulerMixin.from_pretrained`] functions.
|
57 |
-
|
58 |
-
Args:
|
59 |
-
num_train_timesteps (`int`): number of diffusion steps used to train the model.
|
60 |
-
beta_start (`float`): the starting `beta` value of inference.
|
61 |
-
beta_end (`float`): the final `beta` value.
|
62 |
-
beta_schedule (`str`):
|
63 |
-
the beta schedule, a mapping from a beta range to a sequence of betas for stepping the model. Choose from
|
64 |
-
`linear` or `scaled_linear`.
|
65 |
-
trained_betas (`np.ndarray`, optional):
|
66 |
-
option to pass an array of betas directly to the constructor to bypass `beta_start`, `beta_end` etc.
|
67 |
-
prediction_type (`str`, default `epsilon`, optional):
|
68 |
-
prediction type of the scheduler function, one of `epsilon` (predicting the noise of the diffusion
|
69 |
-
process), `sample` (directly predicting the noisy sample`) or `v_prediction` (see section 2.4
|
70 |
-
https://imagen.research.google/video/paper.pdf)
|
71 |
-
"""
|
72 |
-
|
73 |
-
_compatibles = _COMPATIBLE_STABLE_DIFFUSION_SCHEDULERS.copy()
|
74 |
-
order = 1
|
75 |
-
|
76 |
-
@register_to_config
|
77 |
-
def __init__(
|
78 |
-
self,
|
79 |
-
num_train_timesteps: int = 1000,
|
80 |
-
beta_start: float = 0.0001,
|
81 |
-
beta_end: float = 0.02,
|
82 |
-
beta_schedule: str = "linear",
|
83 |
-
trained_betas: Optional[Union[np.ndarray, List[float]]] = None,
|
84 |
-
prediction_type: str = "epsilon",
|
85 |
-
preconfig: bool = True,
|
86 |
-
):
|
87 |
-
if trained_betas is not None:
|
88 |
-
self.betas = paddle.to_tensor(trained_betas, dtype="float32")
|
89 |
-
elif beta_schedule == "linear":
|
90 |
-
self.betas = paddle.linspace(beta_start, beta_end, num_train_timesteps, dtype="float32")
|
91 |
-
elif beta_schedule == "scaled_linear":
|
92 |
-
# this schedule is very specific to the latent diffusion model.
|
93 |
-
self.betas = paddle.linspace(beta_start**0.5, beta_end**0.5, num_train_timesteps, dtype="float32") ** 2
|
94 |
-
else:
|
95 |
-
raise NotImplementedError(f"{beta_schedule} does is not implemented for {self.__class__}")
|
96 |
-
|
97 |
-
self.alphas = 1.0 - self.betas
|
98 |
-
self.alphas_cumprod = paddle.cumprod(self.alphas, 0)
|
99 |
-
|
100 |
-
sigmas = np.array(((1 - self.alphas_cumprod) / self.alphas_cumprod) ** 0.5)
|
101 |
-
sigmas = np.concatenate([sigmas[::-1], [0.0]]).astype(np.float32)
|
102 |
-
self.sigmas = paddle.to_tensor(sigmas)
|
103 |
-
|
104 |
-
# standard deviation of the initial noise distribution
|
105 |
-
self.init_noise_sigma = self.sigmas.max()
|
106 |
-
|
107 |
-
# setable values
|
108 |
-
self.num_inference_steps = None
|
109 |
-
timesteps = np.linspace(0, num_train_timesteps - 1, num_train_timesteps, dtype=float)[::-1].copy()
|
110 |
-
self.timesteps = paddle.to_tensor(timesteps, dtype="float32")
|
111 |
-
self.is_scale_input_called = False
|
112 |
-
self.preconfig = preconfig
|
113 |
-
|
114 |
-
def scale_model_input(
|
115 |
-
self, sample: paddle.Tensor, timestep: Union[float, paddle.Tensor], **kwargs
|
116 |
-
) -> paddle.Tensor:
|
117 |
-
"""
|
118 |
-
Scales the denoising model input by `(sigma**2 + 1) ** 0.5` to match the Euler algorithm.
|
119 |
-
|
120 |
-
Args:
|
121 |
-
sample (`paddle.Tensor`): input sample
|
122 |
-
timestep (`float` or `paddle.Tensor`): the current timestep in the diffusion chain
|
123 |
-
|
124 |
-
Returns:
|
125 |
-
`paddle.Tensor`: scaled input sample
|
126 |
-
"""
|
127 |
-
self.is_scale_input_called = True
|
128 |
-
if kwargs.get("step_index") is not None:
|
129 |
-
step_index = kwargs["step_index"]
|
130 |
-
else:
|
131 |
-
step_index = (self.timesteps == timestep).nonzero().item()
|
132 |
-
|
133 |
-
if not self.preconfig:
|
134 |
-
sigma = self.sigmas[step_index]
|
135 |
-
sample = sample / ((sigma**2 + 1) ** 0.5)
|
136 |
-
return sample
|
137 |
-
else:
|
138 |
-
return sample * self.latent_scales[step_index]
|
139 |
-
|
140 |
-
def set_timesteps(self, num_inference_steps: int):
|
141 |
-
"""
|
142 |
-
Sets the timesteps used for the diffusion chain. Supporting function to be run before inference.
|
143 |
-
|
144 |
-
Args:
|
145 |
-
num_inference_steps (`int`):
|
146 |
-
the number of diffusion steps used when generating samples with a pre-trained model.
|
147 |
-
"""
|
148 |
-
self.num_inference_steps = num_inference_steps
|
149 |
-
|
150 |
-
timesteps = np.linspace(0, self.config.num_train_timesteps - 1, num_inference_steps, dtype=float)[::-1].copy()
|
151 |
-
sigmas = np.array(((1 - self.alphas_cumprod) / self.alphas_cumprod) ** 0.5)
|
152 |
-
sigmas = np.interp(timesteps, np.arange(0, len(sigmas)), sigmas)
|
153 |
-
sigmas = np.concatenate([sigmas, [0.0]]).astype(np.float32)
|
154 |
-
self.sigmas = paddle.to_tensor(sigmas)
|
155 |
-
self.timesteps = paddle.to_tensor(timesteps, dtype="float32")
|
156 |
-
if self.preconfig:
|
157 |
-
self.sigma_up = []
|
158 |
-
self.sigma_down = []
|
159 |
-
for step_index_i in range(len(self.timesteps)):
|
160 |
-
sigma_from = self.sigmas[step_index_i]
|
161 |
-
sigma_to = self.sigmas[step_index_i + 1]
|
162 |
-
sigma_up = (sigma_to**2 * (sigma_from**2 - sigma_to**2) / sigma_from**2) ** 0.5
|
163 |
-
sigma_down = (sigma_to**2 - sigma_up**2) ** 0.5
|
164 |
-
self.sigma_up.append(sigma_up)
|
165 |
-
self.sigma_down.append(sigma_down)
|
166 |
-
self.latent_scales = 1 / ((self.sigmas**2 + 1) ** 0.5)
|
167 |
-
|
168 |
-
def step(
|
169 |
-
self,
|
170 |
-
model_output: paddle.Tensor,
|
171 |
-
timestep: Union[float, paddle.Tensor],
|
172 |
-
sample: paddle.Tensor,
|
173 |
-
generator: Optional[Union[paddle.Generator, List[paddle.Generator]]] = None,
|
174 |
-
return_dict: bool = True,
|
175 |
-
**kwargs
|
176 |
-
) -> Union[PreconfigEulerAncestralDiscreteSchedulerOutput, Tuple]:
|
177 |
-
"""
|
178 |
-
Predict the sample at the previous timestep by reversing the SDE. Core function to propagate the diffusion
|
179 |
-
process from the learned model outputs (most often the predicted noise).
|
180 |
-
|
181 |
-
Args:
|
182 |
-
model_output (`paddle.Tensor`): direct output from learned diffusion model.
|
183 |
-
timestep (`float`): current timestep in the diffusion chain.
|
184 |
-
sample (`paddle.Tensor`):
|
185 |
-
current instance of sample being created by diffusion process.
|
186 |
-
generator (`paddle.Generator`, optional): Random number generator.
|
187 |
-
return_dict (`bool`): option for returning tuple rather than PreconfigEulerAncestralDiscreteSchedulerOutput class
|
188 |
-
|
189 |
-
Returns:
|
190 |
-
[`~schedulers.scheduling_utils.PreconfigEulerAncestralDiscreteSchedulerOutput`] or `tuple`:
|
191 |
-
[`~schedulers.scheduling_utils.PreconfigEulerAncestralDiscreteSchedulerOutput`] if `return_dict` is True, otherwise
|
192 |
-
a `tuple`. When returning a tuple, the first element is the sample tensor.
|
193 |
-
|
194 |
-
"""
|
195 |
-
if not self.is_scale_input_called:
|
196 |
-
logger.warning(
|
197 |
-
"The `scale_model_input` function should be called before `step` to ensure correct denoising. "
|
198 |
-
"See `StableDiffusionPipeline` for a usage example."
|
199 |
-
)
|
200 |
-
if kwargs.get("return_pred_original_sample") is not None:
|
201 |
-
return_pred_original_sample = kwargs["return_pred_original_sample"]
|
202 |
-
else:
|
203 |
-
return_pred_original_sample = True
|
204 |
-
if kwargs.get("step_index") is not None:
|
205 |
-
step_index = kwargs["step_index"]
|
206 |
-
else:
|
207 |
-
step_index = (self.timesteps == timestep).nonzero().item()
|
208 |
-
sigma = self.sigmas[step_index]
|
209 |
-
if self.config.prediction_type == "epsilon" and not return_pred_original_sample:
|
210 |
-
derivative = model_output
|
211 |
-
pred_original_sample = None
|
212 |
-
else:
|
213 |
-
# 1. compute predicted original sample (x_0) from sigma-scaled predicted noise
|
214 |
-
if self.config.prediction_type == "epsilon":
|
215 |
-
pred_original_sample = sample - sigma * model_output
|
216 |
-
elif self.config.prediction_type == "v_prediction":
|
217 |
-
# * c_out + input * c_skip
|
218 |
-
pred_original_sample = model_output * (-sigma / (sigma**2 + 1) ** 0.5) + (sample / (sigma**2 + 1))
|
219 |
-
else:
|
220 |
-
raise ValueError(
|
221 |
-
f"prediction_type given as {self.config.prediction_type} must be one of `epsilon`, or `v_prediction`"
|
222 |
-
)
|
223 |
-
derivative = (sample - pred_original_sample) / sigma
|
224 |
-
if not self.preconfig:
|
225 |
-
sigma_from = self.sigmas[step_index]
|
226 |
-
sigma_to = self.sigmas[step_index + 1]
|
227 |
-
sigma_up = (sigma_to**2 * (sigma_from**2 - sigma_to**2) / sigma_from**2) ** 0.5
|
228 |
-
sigma_down = (sigma_to**2 - sigma_up**2) ** 0.5
|
229 |
-
else:
|
230 |
-
sigma_up = self.sigma_up[step_index]
|
231 |
-
sigma_down = self.sigma_down[step_index]
|
232 |
-
# 2. Convert to an ODE derivative
|
233 |
-
dt = sigma_down - sigma
|
234 |
-
prev_sample = sample + derivative * dt
|
235 |
-
noise = paddle.randn(model_output.shape, dtype=model_output.dtype, generator=generator)
|
236 |
-
prev_sample = prev_sample + noise * sigma_up
|
237 |
-
if not return_dict:
|
238 |
-
if not return_pred_original_sample:
|
239 |
-
return (prev_sample,)
|
240 |
-
else:
|
241 |
-
return (prev_sample, pred_original_sample)
|
242 |
-
|
243 |
-
return PreconfigEulerAncestralDiscreteSchedulerOutput(
|
244 |
-
prev_sample=prev_sample, pred_original_sample=pred_original_sample
|
245 |
-
)
|
246 |
-
|
247 |
-
def add_noise(
|
248 |
-
self,
|
249 |
-
original_samples: paddle.Tensor,
|
250 |
-
noise: paddle.Tensor,
|
251 |
-
timesteps: paddle.Tensor,
|
252 |
-
) -> paddle.Tensor:
|
253 |
-
# Make sure sigmas and timesteps have the same dtype as original_samples
|
254 |
-
self.sigmas = self.sigmas.cast(original_samples.dtype)
|
255 |
-
|
256 |
-
schedule_timesteps = self.timesteps
|
257 |
-
step_indices = [(schedule_timesteps == t).nonzero().item() for t in timesteps]
|
258 |
-
|
259 |
-
sigma = self.sigmas[step_indices].flatten()
|
260 |
-
while len(sigma.shape) < len(original_samples.shape):
|
261 |
-
sigma = sigma.unsqueeze(-1)
|
262 |
-
|
263 |
-
noisy_samples = original_samples + noise * sigma
|
264 |
-
return noisy_samples
|
265 |
-
|
266 |
-
def __len__(self):
|
267 |
-
return self.config.num_train_timesteps
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/1toTree/lora_test/ppdiffusers/training_utils.py
DELETED
@@ -1,152 +0,0 @@
|
|
1 |
-
# Copyright (c) 2022 PaddlePaddle Authors. All Rights Reserved.
|
2 |
-
# Copyright 2022 The HuggingFace Team. All rights reserved.
|
3 |
-
#
|
4 |
-
# Licensed under the Apache License, Version 2.0 (the "License");
|
5 |
-
# you may not use this file except in compliance with the License.
|
6 |
-
# You may obtain a copy of the License at
|
7 |
-
#
|
8 |
-
# http://www.apache.org/licenses/LICENSE-2.0
|
9 |
-
#
|
10 |
-
# Unless required by applicable law or agreed to in writing, software
|
11 |
-
# distributed under the License is distributed on an "AS IS" BASIS,
|
12 |
-
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
13 |
-
# See the License for the specific language governing permissions and
|
14 |
-
# limitations under the License.
|
15 |
-
|
16 |
-
import contextlib
|
17 |
-
import copy
|
18 |
-
import os
|
19 |
-
import random
|
20 |
-
|
21 |
-
import numpy as np
|
22 |
-
import paddle
|
23 |
-
|
24 |
-
from .utils import logging
|
25 |
-
|
26 |
-
logger = logging.get_logger(__name__)
|
27 |
-
|
28 |
-
|
29 |
-
def enable_full_determinism(seed: int):
|
30 |
-
"""
|
31 |
-
Helper function for reproducible behavior during distributed training.
|
32 |
-
"""
|
33 |
-
# set seed first
|
34 |
-
set_seed(seed)
|
35 |
-
|
36 |
-
# Enable Paddle deterministic mode. This potentially requires either the environment
|
37 |
-
# variable 'CUDA_LAUNCH_BLOCKING' or 'CUBLAS_WORKSPACE_CONFIG' to be set,
|
38 |
-
# depending on the CUDA version, so we set them both here
|
39 |
-
os.environ["CUDA_LAUNCH_BLOCKING"] = "1"
|
40 |
-
os.environ["CUBLAS_WORKSPACE_CONFIG"] = ":16:8"
|
41 |
-
os.environ["FLAGS_cudnn_deterministic"] = "True"
|
42 |
-
os.environ["FLAGS_benchmark"] = "True"
|
43 |
-
|
44 |
-
|
45 |
-
def set_seed(seed: int = None):
|
46 |
-
"""
|
47 |
-
Args:
|
48 |
-
Helper function for reproducible behavior to set the seed in `random`, `numpy`, `paddle`.
|
49 |
-
seed (`int`): The seed to set.
|
50 |
-
"""
|
51 |
-
if seed is not None:
|
52 |
-
random.seed(seed)
|
53 |
-
np.random.seed(seed)
|
54 |
-
paddle.seed(seed)
|
55 |
-
|
56 |
-
|
57 |
-
class EMAModel:
|
58 |
-
"""
|
59 |
-
Exponential Moving Average of models weights
|
60 |
-
"""
|
61 |
-
|
62 |
-
def __init__(self, model, update_after_step=0, inv_gamma=1.0, power=2 / 3, min_value=0.0, max_value=0.9999):
|
63 |
-
"""
|
64 |
-
@crowsonkb's notes on EMA Warmup:
|
65 |
-
If gamma=1 and power=1, implements a simple average. gamma=1, power=2/3 are good values for models you plan
|
66 |
-
to train for a million or more steps (reaches decay factor 0.999 at 31.6K steps, 0.9999 at 1M steps),
|
67 |
-
gamma=1, power=3/4 for models you plan to train for less (reaches decay factor 0.999 at 10K steps, 0.9999
|
68 |
-
at 215.4k steps).
|
69 |
-
Args:
|
70 |
-
inv_gamma (float): Inverse multiplicative factor of EMA warmup. Default: 1.
|
71 |
-
power (float): Exponential factor of EMA warmup. Default: 2/3.
|
72 |
-
min_value (float): The minimum EMA decay rate. Default: 0.
|
73 |
-
"""
|
74 |
-
|
75 |
-
self.averaged_model = copy.deepcopy(model)
|
76 |
-
self.averaged_model.eval()
|
77 |
-
for params in self.averaged_model.parameters():
|
78 |
-
params.stop_gradient = True
|
79 |
-
|
80 |
-
self.update_after_step = update_after_step
|
81 |
-
self.inv_gamma = inv_gamma
|
82 |
-
self.power = power
|
83 |
-
self.min_value = min_value
|
84 |
-
self.max_value = max_value
|
85 |
-
|
86 |
-
self.decay = 0.0
|
87 |
-
self.optimization_step = 0
|
88 |
-
|
89 |
-
def get_decay(self, optimization_step):
|
90 |
-
"""
|
91 |
-
Compute the decay factor for the exponential moving average.
|
92 |
-
"""
|
93 |
-
step = max(0, optimization_step - self.update_after_step - 1)
|
94 |
-
value = 1 - (1 + step / self.inv_gamma) ** -self.power
|
95 |
-
|
96 |
-
if step <= 0:
|
97 |
-
return 0.0
|
98 |
-
|
99 |
-
return max(self.min_value, min(value, self.max_value))
|
100 |
-
|
101 |
-
@paddle.no_grad()
|
102 |
-
def step(self, new_model):
|
103 |
-
ema_state_dict = {}
|
104 |
-
ema_params = self.averaged_model.state_dict()
|
105 |
-
|
106 |
-
self.decay = self.get_decay(self.optimization_step)
|
107 |
-
|
108 |
-
for key, param in new_model.named_parameters():
|
109 |
-
if isinstance(param, dict):
|
110 |
-
continue
|
111 |
-
try:
|
112 |
-
ema_param = ema_params[key]
|
113 |
-
except KeyError:
|
114 |
-
ema_param = param.cast("float32").clone() if param.ndim == 1 else copy.deepcopy(param)
|
115 |
-
ema_params[key] = ema_param
|
116 |
-
|
117 |
-
if param.stop_gradient:
|
118 |
-
ema_params[key].copy_(param.cast(ema_param.dtype), True)
|
119 |
-
ema_param = ema_params[key]
|
120 |
-
else:
|
121 |
-
ema_param.scale_(self.decay)
|
122 |
-
ema_param.add_(param.cast(ema_param.dtype) * (1 - self.decay))
|
123 |
-
|
124 |
-
ema_state_dict[key] = ema_param
|
125 |
-
|
126 |
-
for key, param in new_model.named_buffers():
|
127 |
-
ema_state_dict[key] = param
|
128 |
-
|
129 |
-
self.averaged_model.load_dict(ema_state_dict)
|
130 |
-
self.optimization_step += 1
|
131 |
-
|
132 |
-
|
133 |
-
@contextlib.contextmanager
|
134 |
-
def main_process_first(desc="work"):
|
135 |
-
if paddle.distributed.get_world_size() > 1:
|
136 |
-
rank = paddle.distributed.get_rank()
|
137 |
-
is_main_process = rank == 0
|
138 |
-
main_process_desc = "main local process"
|
139 |
-
|
140 |
-
try:
|
141 |
-
if not is_main_process:
|
142 |
-
# tell all replicas to wait
|
143 |
-
logger.debug(f"{rank}: waiting for the {main_process_desc} to perform {desc}")
|
144 |
-
paddle.distributed.barrier()
|
145 |
-
yield
|
146 |
-
finally:
|
147 |
-
if is_main_process:
|
148 |
-
# the wait is over
|
149 |
-
logger.debug(f"{rank}: {main_process_desc} completed {desc}, releasing all replicas")
|
150 |
-
paddle.distributed.barrier()
|
151 |
-
else:
|
152 |
-
yield
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/2ndelement/voicevox/ui_template/ui.html
DELETED
@@ -1,120 +0,0 @@
|
|
1 |
-
<!DOCTYPE html>
|
2 |
-
<html lang="ja">
|
3 |
-
<head>
|
4 |
-
<meta charset="utf-8" />
|
5 |
-
<title>VOICEVOX Engine 設定</title>
|
6 |
-
<link
|
7 |
-
rel="shortcut icon"
|
8 |
-
href="https://voicevox.hiroshiba.jp/favicon-32x32.png"
|
9 |
-
/>
|
10 |
-
|
11 |
-
<link
|
12 |
-
href="https://cdn.jsdelivr.net/npm/[email protected]/dist/css/bootstrap.min.css"
|
13 |
-
rel="stylesheet"
|
14 |
-
integrity="sha384-EVSTQN3/azprG1Anm3QDgpJLIm9Nao0Yz1ztcQTwFspd3yD65VohhpuuCOmLASjC"
|
15 |
-
crossorigin="anonymous"
|
16 |
-
/>
|
17 |
-
<script
|
18 |
-
src="https://cdn.jsdelivr.net/npm/[email protected]/dist/js/bootstrap.bundle.min.js"
|
19 |
-
integrity="sha384-MrcW6ZMFYlzcLA8Nl+NtUVF0sA7MsXsP1UyJoMp4YLEuNSfAP+JcXn/tWtIaxVXM"
|
20 |
-
crossorigin="anonymous"
|
21 |
-
></script>
|
22 |
-
</head>
|
23 |
-
|
24 |
-
<body>
|
25 |
-
<div class="container p-3">
|
26 |
-
<form method="post">
|
27 |
-
<div class="alert alert-warning" role="alert">
|
28 |
-
設定の変更の更新にはエンジンの再起動が必要です。
|
29 |
-
</div>
|
30 |
-
|
31 |
-
<div class="mb-3">
|
32 |
-
<label class="form-label">CORS Policy Mode</label>
|
33 |
-
<select
|
34 |
-
class="form-select"
|
35 |
-
aria-label="cors_policy_mode"
|
36 |
-
name="cors_policy_mode"
|
37 |
-
>
|
38 |
-
<option selected value="{{ cors_policy_mode }}">
|
39 |
-
現在値: {{ cors_policy_mode }}
|
40 |
-
</option>
|
41 |
-
<option value="localapps">localapps</option>
|
42 |
-
<option value="all">all</option>
|
43 |
-
</select>
|
44 |
-
<div class="form-text">
|
45 |
-
<p class="mb-1">
|
46 |
-
allまたはlocalappsを指定。allはすべてを許可します。
|
47 |
-
</p>
|
48 |
-
<p class="mb-1">
|
49 |
-
localappsはオリジン間リソース共有ポリシーを、app://.とlocalhost関連に限定します。
|
50 |
-
</p>
|
51 |
-
<p>
|
52 |
-
その他のオリジンはallow_originオプションで追加できます。デフォルトはlocalapps。
|
53 |
-
</p>
|
54 |
-
</div>
|
55 |
-
</div>
|
56 |
-
|
57 |
-
<div class="mb-3">
|
58 |
-
<label class="form-label">Allow Origin</label>
|
59 |
-
<input
|
60 |
-
class="form-control"
|
61 |
-
type="text"
|
62 |
-
name="allow_origin"
|
63 |
-
value="{{ allow_origin }}"
|
64 |
-
/>
|
65 |
-
<div class="form-text">
|
66 |
-
許可するオリジンを指定します。複数指定する場合は、直後にスペースで区切って追加できます。
|
67 |
-
</div>
|
68 |
-
</div>
|
69 |
-
|
70 |
-
<div
|
71 |
-
class="modal fade"
|
72 |
-
id="submitModal"
|
73 |
-
tabindex="-1"
|
74 |
-
aria-labelledby="submitModalLabel"
|
75 |
-
aria-hidden="true"
|
76 |
-
>
|
77 |
-
<div class="modal-dialog">
|
78 |
-
<div class="modal-content">
|
79 |
-
<div class="modal-header">
|
80 |
-
<h5 class="modal-title" id="submitModalLabel">
|
81 |
-
設定の保存
|
82 |
-
</h5>
|
83 |
-
<button
|
84 |
-
type="button"
|
85 |
-
class="btn-close"
|
86 |
-
data-bs-dismiss="modal"
|
87 |
-
aria-label="Close"
|
88 |
-
></button>
|
89 |
-
</div>
|
90 |
-
<div class="modal-body">
|
91 |
-
設定を保存します。よろしいですか?
|
92 |
-
</div>
|
93 |
-
<div class="modal-footer">
|
94 |
-
<button
|
95 |
-
type="button"
|
96 |
-
class="btn btn-secondary"
|
97 |
-
data-bs-dismiss="modal"
|
98 |
-
>
|
99 |
-
キャンセル
|
100 |
-
</button>
|
101 |
-
<button type="submit" class="btn btn-primary">
|
102 |
-
保存
|
103 |
-
</button>
|
104 |
-
</div>
|
105 |
-
</div>
|
106 |
-
</div>
|
107 |
-
</div>
|
108 |
-
|
109 |
-
<button
|
110 |
-
type="button"
|
111 |
-
class="btn btn-primary"
|
112 |
-
data-bs-toggle="modal"
|
113 |
-
data-bs-target="#submitModal"
|
114 |
-
>
|
115 |
-
保存
|
116 |
-
</button>
|
117 |
-
</form>
|
118 |
-
</div>
|
119 |
-
</body>
|
120 |
-
</html>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/801artistry/RVC801/infer/modules/train/extract/extract_f0_rmvpe_dml.py
DELETED
@@ -1,139 +0,0 @@
|
|
1 |
-
import os
|
2 |
-
import sys
|
3 |
-
import traceback
|
4 |
-
|
5 |
-
import parselmouth
|
6 |
-
|
7 |
-
now_dir = os.getcwd()
|
8 |
-
sys.path.append(now_dir)
|
9 |
-
import logging
|
10 |
-
|
11 |
-
import numpy as np
|
12 |
-
import pyworld
|
13 |
-
|
14 |
-
from infer.lib.audio import load_audio
|
15 |
-
|
16 |
-
logging.getLogger("numba").setLevel(logging.WARNING)
|
17 |
-
|
18 |
-
exp_dir = sys.argv[1]
|
19 |
-
import torch_directml
|
20 |
-
|
21 |
-
device = torch_directml.device(torch_directml.default_device())
|
22 |
-
f = open("%s/extract_f0_feature.log" % exp_dir, "a+")
|
23 |
-
|
24 |
-
|
25 |
-
def printt(strr):
|
26 |
-
print(strr)
|
27 |
-
f.write("%s\n" % strr)
|
28 |
-
f.flush()
|
29 |
-
|
30 |
-
|
31 |
-
class FeatureInput(object):
|
32 |
-
def __init__(self, samplerate=16000, hop_size=160):
|
33 |
-
self.fs = samplerate
|
34 |
-
self.hop = hop_size
|
35 |
-
|
36 |
-
self.f0_bin = 256
|
37 |
-
self.f0_max = 1100.0
|
38 |
-
self.f0_min = 50.0
|
39 |
-
self.f0_mel_min = 1127 * np.log(1 + self.f0_min / 700)
|
40 |
-
self.f0_mel_max = 1127 * np.log(1 + self.f0_max / 700)
|
41 |
-
|
42 |
-
def compute_f0(self, path, f0_method):
|
43 |
-
x = load_audio(path, self.fs)
|
44 |
-
# p_len = x.shape[0] // self.hop
|
45 |
-
if f0_method == "rmvpe":
|
46 |
-
if hasattr(self, "model_rmvpe") == False:
|
47 |
-
from infer.lib.rmvpe import RMVPE
|
48 |
-
|
49 |
-
print("Loading rmvpe model")
|
50 |
-
self.model_rmvpe = RMVPE(
|
51 |
-
"assets/rmvpe/rmvpe.pt", is_half=False, device=device
|
52 |
-
)
|
53 |
-
f0 = self.model_rmvpe.infer_from_audio(x, thred=0.03)
|
54 |
-
return f0
|
55 |
-
|
56 |
-
def coarse_f0(self, f0):
|
57 |
-
f0_mel = 1127 * np.log(1 + f0 / 700)
|
58 |
-
f0_mel[f0_mel > 0] = (f0_mel[f0_mel > 0] - self.f0_mel_min) * (
|
59 |
-
self.f0_bin - 2
|
60 |
-
) / (self.f0_mel_max - self.f0_mel_min) + 1
|
61 |
-
|
62 |
-
# use 0 or 1
|
63 |
-
f0_mel[f0_mel <= 1] = 1
|
64 |
-
f0_mel[f0_mel > self.f0_bin - 1] = self.f0_bin - 1
|
65 |
-
f0_coarse = np.rint(f0_mel).astype(int)
|
66 |
-
assert f0_coarse.max() <= 255 and f0_coarse.min() >= 1, (
|
67 |
-
f0_coarse.max(),
|
68 |
-
f0_coarse.min(),
|
69 |
-
)
|
70 |
-
return f0_coarse
|
71 |
-
|
72 |
-
def go(self, paths, f0_method):
|
73 |
-
if len(paths) == 0:
|
74 |
-
printt("no-f0-todo")
|
75 |
-
else:
|
76 |
-
printt("todo-f0-%s" % len(paths))
|
77 |
-
n = max(len(paths) // 5, 1) # 每个进程最多打印5条
|
78 |
-
for idx, (inp_path, opt_path1, opt_path2) in enumerate(paths):
|
79 |
-
try:
|
80 |
-
if idx % n == 0:
|
81 |
-
printt("f0ing,now-%s,all-%s,-%s" % (idx, len(paths), inp_path))
|
82 |
-
if (
|
83 |
-
os.path.exists(opt_path1 + ".npy") == True
|
84 |
-
and os.path.exists(opt_path2 + ".npy") == True
|
85 |
-
):
|
86 |
-
continue
|
87 |
-
featur_pit = self.compute_f0(inp_path, f0_method)
|
88 |
-
np.save(
|
89 |
-
opt_path2,
|
90 |
-
featur_pit,
|
91 |
-
allow_pickle=False,
|
92 |
-
) # nsf
|
93 |
-
coarse_pit = self.coarse_f0(featur_pit)
|
94 |
-
np.save(
|
95 |
-
opt_path1,
|
96 |
-
coarse_pit,
|
97 |
-
allow_pickle=False,
|
98 |
-
) # ori
|
99 |
-
except:
|
100 |
-
printt("f0fail-%s-%s-%s" % (idx, inp_path, traceback.format_exc()))
|
101 |
-
|
102 |
-
|
103 |
-
if __name__ == "__main__":
|
104 |
-
# exp_dir=r"E:\codes\py39\dataset\mi-test"
|
105 |
-
# n_p=16
|
106 |
-
# f = open("%s/log_extract_f0.log"%exp_dir, "w")
|
107 |
-
printt(sys.argv)
|
108 |
-
featureInput = FeatureInput()
|
109 |
-
paths = []
|
110 |
-
inp_root = "%s/1_16k_wavs" % (exp_dir)
|
111 |
-
opt_root1 = "%s/2a_f0" % (exp_dir)
|
112 |
-
opt_root2 = "%s/2b-f0nsf" % (exp_dir)
|
113 |
-
|
114 |
-
os.makedirs(opt_root1, exist_ok=True)
|
115 |
-
os.makedirs(opt_root2, exist_ok=True)
|
116 |
-
for name in sorted(list(os.listdir(inp_root))):
|
117 |
-
inp_path = "%s/%s" % (inp_root, name)
|
118 |
-
if "spec" in inp_path:
|
119 |
-
continue
|
120 |
-
opt_path1 = "%s/%s" % (opt_root1, name)
|
121 |
-
opt_path2 = "%s/%s" % (opt_root2, name)
|
122 |
-
paths.append([inp_path, opt_path1, opt_path2])
|
123 |
-
try:
|
124 |
-
featureInput.go(paths, "rmvpe")
|
125 |
-
except:
|
126 |
-
printt("f0_all_fail-%s" % (traceback.format_exc()))
|
127 |
-
# ps = []
|
128 |
-
# for i in range(n_p):
|
129 |
-
# p = Process(
|
130 |
-
# target=featureInput.go,
|
131 |
-
# args=(
|
132 |
-
# paths[i::n_p],
|
133 |
-
# f0method,
|
134 |
-
# ),
|
135 |
-
# )
|
136 |
-
# ps.append(p)
|
137 |
-
# p.start()
|
138 |
-
# for i in range(n_p):
|
139 |
-
# ps[i].join()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/AB-TW/team-ai/memories.py
DELETED
@@ -1,61 +0,0 @@
|
|
1 |
-
from typing import Any, Dict, List, Optional
|
2 |
-
from langchain.memory.chat_memory import BaseChatMemory
|
3 |
-
from langchain.schema import BaseMessage, HumanMessage, AIMessage, SystemMessage, ChatMessage
|
4 |
-
|
5 |
-
|
6 |
-
def get_buffer_string(
|
7 |
-
messages: List[BaseMessage], human_prefix: str = "Human", ai_prefix: str = "AI"
|
8 |
-
) -> str:
|
9 |
-
"""Get buffer string of messages."""
|
10 |
-
string_messages = []
|
11 |
-
for m in messages:
|
12 |
-
if isinstance(m, HumanMessage):
|
13 |
-
print("HumanMessage: " + m.content)
|
14 |
-
role = human_prefix + ": "
|
15 |
-
elif isinstance(m, AIMessage):
|
16 |
-
print("AIMessage" + m.content)
|
17 |
-
role = ""
|
18 |
-
elif isinstance(m, SystemMessage):
|
19 |
-
print("SystemMessage")
|
20 |
-
role = "System: "
|
21 |
-
elif isinstance(m, ChatMessage):
|
22 |
-
print("ChatMessage")
|
23 |
-
role = m.role + ": "
|
24 |
-
else:
|
25 |
-
raise ValueError(f"Got unsupported message type: {m}")
|
26 |
-
|
27 |
-
string_messages.append(f"{role + m.content}")
|
28 |
-
|
29 |
-
return "\n".join(string_messages)
|
30 |
-
|
31 |
-
|
32 |
-
class HumenFeedbackBufferMemory(BaseChatMemory):
|
33 |
-
"""Buffer for storing conversation memory."""
|
34 |
-
|
35 |
-
human_prefix: str = "Human"
|
36 |
-
ai_prefix: str = "AI"
|
37 |
-
memory_key: str = "history" #: :meta private:
|
38 |
-
|
39 |
-
@property
|
40 |
-
def buffer(self) -> Any:
|
41 |
-
"""String buffer of memory."""
|
42 |
-
if self.return_messages:
|
43 |
-
return self.chat_memory.messages
|
44 |
-
else:
|
45 |
-
return get_buffer_string(
|
46 |
-
self.chat_memory.messages,
|
47 |
-
human_prefix=self.human_prefix,
|
48 |
-
ai_prefix=self.ai_prefix,
|
49 |
-
)
|
50 |
-
|
51 |
-
@property
|
52 |
-
def memory_variables(self) -> List[str]:
|
53 |
-
"""Will always return list of memory variables.
|
54 |
-
|
55 |
-
:meta private:
|
56 |
-
"""
|
57 |
-
return [self.memory_key]
|
58 |
-
|
59 |
-
def load_memory_variables(self, inputs: Dict[str, Any]) -> Dict[str, Any]:
|
60 |
-
"""Return history buffer."""
|
61 |
-
return {self.memory_key: self.buffer}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/AIZ2H/04-Gradio-SOTA-Seq2Seq-AutoQA/qasrl_model_pipeline.py
DELETED
@@ -1,183 +0,0 @@
|
|
1 |
-
from typing import Optional
|
2 |
-
import json
|
3 |
-
from argparse import Namespace
|
4 |
-
from pathlib import Path
|
5 |
-
from transformers import Text2TextGenerationPipeline, AutoModelForSeq2SeqLM, AutoTokenizer
|
6 |
-
|
7 |
-
def get_markers_for_model(is_t5_model: bool) -> Namespace:
|
8 |
-
special_tokens_constants = Namespace()
|
9 |
-
if is_t5_model:
|
10 |
-
# T5 model have 100 special tokens by default
|
11 |
-
special_tokens_constants.separator_input_question_predicate = "<extra_id_1>"
|
12 |
-
special_tokens_constants.separator_output_answers = "<extra_id_3>"
|
13 |
-
special_tokens_constants.separator_output_questions = "<extra_id_5>" # if using only questions
|
14 |
-
special_tokens_constants.separator_output_question_answer = "<extra_id_7>"
|
15 |
-
special_tokens_constants.separator_output_pairs = "<extra_id_9>"
|
16 |
-
special_tokens_constants.predicate_generic_marker = "<extra_id_10>"
|
17 |
-
special_tokens_constants.predicate_verb_marker = "<extra_id_11>"
|
18 |
-
special_tokens_constants.predicate_nominalization_marker = "<extra_id_12>"
|
19 |
-
|
20 |
-
else:
|
21 |
-
special_tokens_constants.separator_input_question_predicate = "<question_predicate_sep>"
|
22 |
-
special_tokens_constants.separator_output_answers = "<answers_sep>"
|
23 |
-
special_tokens_constants.separator_output_questions = "<question_sep>" # if using only questions
|
24 |
-
special_tokens_constants.separator_output_question_answer = "<question_answer_sep>"
|
25 |
-
special_tokens_constants.separator_output_pairs = "<qa_pairs_sep>"
|
26 |
-
special_tokens_constants.predicate_generic_marker = "<predicate_marker>"
|
27 |
-
special_tokens_constants.predicate_verb_marker = "<verbal_predicate_marker>"
|
28 |
-
special_tokens_constants.predicate_nominalization_marker = "<nominalization_predicate_marker>"
|
29 |
-
return special_tokens_constants
|
30 |
-
|
31 |
-
def load_trained_model(name_or_path):
|
32 |
-
import huggingface_hub as HFhub
|
33 |
-
tokenizer = AutoTokenizer.from_pretrained(name_or_path)
|
34 |
-
model = AutoModelForSeq2SeqLM.from_pretrained(name_or_path)
|
35 |
-
# load preprocessing_kwargs from the model repo on HF hub, or from the local model directory
|
36 |
-
kwargs_filename = None
|
37 |
-
if name_or_path.startswith("kleinay/"): # and 'preprocessing_kwargs.json' in HFhub.list_repo_files(name_or_path): # the supported version of HFhub doesn't support list_repo_files
|
38 |
-
kwargs_filename = HFhub.hf_hub_download(repo_id=name_or_path, filename="preprocessing_kwargs.json")
|
39 |
-
elif Path(name_or_path).is_dir() and (Path(name_or_path) / "experiment_kwargs.json").exists():
|
40 |
-
kwargs_filename = Path(name_or_path) / "experiment_kwargs.json"
|
41 |
-
|
42 |
-
if kwargs_filename:
|
43 |
-
preprocessing_kwargs = json.load(open(kwargs_filename))
|
44 |
-
# integrate into model.config (for decoding args, e.g. "num_beams"), and save also as standalone object for preprocessing
|
45 |
-
model.config.preprocessing_kwargs = Namespace(**preprocessing_kwargs)
|
46 |
-
model.config.update(preprocessing_kwargs)
|
47 |
-
return model, tokenizer
|
48 |
-
|
49 |
-
|
50 |
-
class QASRL_Pipeline(Text2TextGenerationPipeline):
|
51 |
-
def __init__(self, model_repo: str, **kwargs):
|
52 |
-
model, tokenizer = load_trained_model(model_repo)
|
53 |
-
super().__init__(model, tokenizer, framework="pt")
|
54 |
-
self.is_t5_model = "t5" in model.config.model_type
|
55 |
-
self.special_tokens = get_markers_for_model(self.is_t5_model)
|
56 |
-
self.data_args = model.config.preprocessing_kwargs
|
57 |
-
# backward compatibility - default keyword values implemeted in `run_summarization`, thus not saved in `preprocessing_kwargs`
|
58 |
-
if "predicate_marker_type" not in vars(self.data_args):
|
59 |
-
self.data_args.predicate_marker_type = "generic"
|
60 |
-
if "use_bilateral_predicate_marker" not in vars(self.data_args):
|
61 |
-
self.data_args.use_bilateral_predicate_marker = True
|
62 |
-
if "append_verb_form" not in vars(self.data_args):
|
63 |
-
self.data_args.append_verb_form = True
|
64 |
-
self._update_config(**kwargs)
|
65 |
-
|
66 |
-
def _update_config(self, **kwargs):
|
67 |
-
" Update self.model.config with initialization parameters and necessary defaults. "
|
68 |
-
# set default values that will always override model.config, but can overriden by __init__ kwargs
|
69 |
-
kwargs["max_length"] = kwargs.get("max_length", 80)
|
70 |
-
# override model.config with kwargs
|
71 |
-
for k,v in kwargs.items():
|
72 |
-
self.model.config.__dict__[k] = v
|
73 |
-
|
74 |
-
def _sanitize_parameters(self, **kwargs):
|
75 |
-
preprocess_kwargs, forward_kwargs, postprocess_kwargs = {}, {}, {}
|
76 |
-
if "predicate_marker" in kwargs:
|
77 |
-
preprocess_kwargs["predicate_marker"] = kwargs["predicate_marker"]
|
78 |
-
if "predicate_type" in kwargs:
|
79 |
-
preprocess_kwargs["predicate_type"] = kwargs["predicate_type"]
|
80 |
-
if "verb_form" in kwargs:
|
81 |
-
preprocess_kwargs["verb_form"] = kwargs["verb_form"]
|
82 |
-
return preprocess_kwargs, forward_kwargs, postprocess_kwargs
|
83 |
-
|
84 |
-
def preprocess(self, inputs, predicate_marker="<predicate>", predicate_type=None, verb_form=None):
|
85 |
-
# Here, inputs is string or list of strings; apply string postprocessing
|
86 |
-
if isinstance(inputs, str):
|
87 |
-
processed_inputs = self._preprocess_string(inputs, predicate_marker, predicate_type, verb_form)
|
88 |
-
elif hasattr(inputs, "__iter__"):
|
89 |
-
processed_inputs = [self._preprocess_string(s, predicate_marker, predicate_type, verb_form) for s in inputs]
|
90 |
-
else:
|
91 |
-
raise ValueError("inputs must be str or Iterable[str]")
|
92 |
-
# Now pass to super.preprocess for tokenization
|
93 |
-
return super().preprocess(processed_inputs)
|
94 |
-
|
95 |
-
def _preprocess_string(self, seq: str, predicate_marker: str, predicate_type: Optional[str], verb_form: Optional[str]) -> str:
|
96 |
-
sent_tokens = seq.split(" ")
|
97 |
-
assert predicate_marker in sent_tokens, f"Input sentence must include a predicate-marker token ('{predicate_marker}') before the target predicate word"
|
98 |
-
predicate_idx = sent_tokens.index(predicate_marker)
|
99 |
-
sent_tokens.remove(predicate_marker)
|
100 |
-
sentence_before_predicate = " ".join([sent_tokens[i] for i in range(predicate_idx)])
|
101 |
-
predicate = sent_tokens[predicate_idx]
|
102 |
-
sentence_after_predicate = " ".join([sent_tokens[i] for i in range(predicate_idx+1, len(sent_tokens))])
|
103 |
-
|
104 |
-
if self.data_args.predicate_marker_type == "generic":
|
105 |
-
predicate_marker = self.special_tokens.predicate_generic_marker
|
106 |
-
# In case we want special marker for each predicate type: """
|
107 |
-
elif self.data_args.predicate_marker_type == "pred_type":
|
108 |
-
assert predicate_type is not None, "For this model, you must provide the `predicate_type` either when initializing QASRL_Pipeline(...) or when applying __call__(...) on it"
|
109 |
-
assert predicate_type in ("verbal", "nominal"), f"`predicate_type` must be either 'verbal' or 'nominal'; got '{predicate_type}'"
|
110 |
-
predicate_marker = {"verbal": self.special_tokens.predicate_verb_marker ,
|
111 |
-
"nominal": self.special_tokens.predicate_nominalization_marker
|
112 |
-
}[predicate_type]
|
113 |
-
|
114 |
-
if self.data_args.use_bilateral_predicate_marker:
|
115 |
-
seq = f"{sentence_before_predicate} {predicate_marker} {predicate} {predicate_marker} {sentence_after_predicate}"
|
116 |
-
else:
|
117 |
-
seq = f"{sentence_before_predicate} {predicate_marker} {predicate} {sentence_after_predicate}"
|
118 |
-
|
119 |
-
# embed also verb_form
|
120 |
-
if self.data_args.append_verb_form and verb_form is None:
|
121 |
-
raise ValueError(f"For this model, you must provide the `verb_form` of the predicate when applying __call__(...)")
|
122 |
-
elif self.data_args.append_verb_form:
|
123 |
-
seq = f"{seq} {self.special_tokens.separator_input_question_predicate} {verb_form} "
|
124 |
-
else:
|
125 |
-
seq = f"{seq} "
|
126 |
-
|
127 |
-
# append source prefix (for t5 models)
|
128 |
-
prefix = self._get_source_prefix(predicate_type)
|
129 |
-
|
130 |
-
return prefix + seq
|
131 |
-
|
132 |
-
def _get_source_prefix(self, predicate_type: Optional[str]):
|
133 |
-
if not self.is_t5_model or self.data_args.source_prefix is None:
|
134 |
-
return ''
|
135 |
-
if not self.data_args.source_prefix.startswith("<"): # Regular prefix - not dependent on input row x
|
136 |
-
return self.data_args.source_prefix
|
137 |
-
if self.data_args.source_prefix == "<predicate-type>":
|
138 |
-
if predicate_type is None:
|
139 |
-
raise ValueError("source_prefix is '<predicate-type>' but input no `predicate_type`.")
|
140 |
-
else:
|
141 |
-
return f"Generate QAs for {predicate_type} QASRL: "
|
142 |
-
|
143 |
-
def _forward(self, *args, **kwargs):
|
144 |
-
outputs = super()._forward(*args, **kwargs)
|
145 |
-
return outputs
|
146 |
-
|
147 |
-
|
148 |
-
def postprocess(self, model_outputs):
|
149 |
-
output_seq = self.tokenizer.decode(
|
150 |
-
model_outputs["output_ids"].squeeze(),
|
151 |
-
skip_special_tokens=False,
|
152 |
-
clean_up_tokenization_spaces=False,
|
153 |
-
)
|
154 |
-
output_seq = output_seq.strip(self.tokenizer.pad_token).strip(self.tokenizer.eos_token).strip()
|
155 |
-
qa_subseqs = output_seq.split(self.special_tokens.separator_output_pairs)
|
156 |
-
qas = [self._postrocess_qa(qa_subseq) for qa_subseq in qa_subseqs]
|
157 |
-
return {"generated_text": output_seq,
|
158 |
-
"QAs": qas}
|
159 |
-
|
160 |
-
def _postrocess_qa(self, seq: str) -> str:
|
161 |
-
# split question and answers
|
162 |
-
if self.special_tokens.separator_output_question_answer in seq:
|
163 |
-
question, answer = seq.split(self.special_tokens.separator_output_question_answer)[:2]
|
164 |
-
else:
|
165 |
-
print("invalid format: no separator between question and answer found...")
|
166 |
-
return None
|
167 |
-
# question, answer = seq, '' # Or: backoff to only question
|
168 |
-
# skip "_" slots in questions
|
169 |
-
question = ' '.join(t for t in question.split(' ') if t != '_')
|
170 |
-
answers = [a.strip() for a in answer.split(self.special_tokens.separator_output_answers)]
|
171 |
-
return {"question": question, "answers": answers}
|
172 |
-
|
173 |
-
|
174 |
-
if __name__ == "__main__":
|
175 |
-
pipe = QASRL_Pipeline("kleinay/qanom-seq2seq-model-baseline")
|
176 |
-
res1 = pipe("The student was interested in Luke 's <predicate> research about sea animals .", verb_form="research", predicate_type="nominal")
|
177 |
-
res2 = pipe(["The doctor was interested in Luke 's <predicate> treatment .",
|
178 |
-
"The Veterinary student was interested in Luke 's <predicate> treatment of sea animals ."], verb_form="treat", predicate_type="nominal", num_beams=10)
|
179 |
-
res3 = pipe("A number of professions have <predicate> developed that specialize in the treatment of mental disorders .", verb_form="develop", predicate_type="verbal")
|
180 |
-
print(res1)
|
181 |
-
print(res2)
|
182 |
-
print(res3)
|
183 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/ATang0729/Forecast4Muses/Model/Model6/Model6_2_ProfileRecogition/mmpretrain/configs/_base_/models/__init__.py
DELETED
File without changes
|
spaces/AchyuthGamer/OpenGPT/g4f/Provider/Providers/deprecated/Equing.py
DELETED
@@ -1,81 +0,0 @@
|
|
1 |
-
from __future__ import annotations
|
2 |
-
|
3 |
-
import json
|
4 |
-
from abc import ABC, abstractmethod
|
5 |
-
|
6 |
-
import requests
|
7 |
-
|
8 |
-
from ...typing import Any, CreateResult
|
9 |
-
from ..base_provider import BaseProvider
|
10 |
-
|
11 |
-
|
12 |
-
class Equing(BaseProvider):
|
13 |
-
url: str = 'https://next.eqing.tech/'
|
14 |
-
working = False
|
15 |
-
supports_stream = True
|
16 |
-
supports_gpt_35_turbo = True
|
17 |
-
supports_gpt_4 = False
|
18 |
-
|
19 |
-
@staticmethod
|
20 |
-
@abstractmethod
|
21 |
-
def create_completion(
|
22 |
-
model: str,
|
23 |
-
messages: list[dict[str, str]],
|
24 |
-
stream: bool, **kwargs: Any) -> CreateResult:
|
25 |
-
|
26 |
-
headers = {
|
27 |
-
'authority' : 'next.eqing.tech',
|
28 |
-
'accept' : 'text/event-stream',
|
29 |
-
'accept-language' : 'en,fr-FR;q=0.9,fr;q=0.8,es-ES;q=0.7,es;q=0.6,en-US;q=0.5,am;q=0.4,de;q=0.3',
|
30 |
-
'cache-control' : 'no-cache',
|
31 |
-
'content-type' : 'application/json',
|
32 |
-
'origin' : 'https://next.eqing.tech',
|
33 |
-
'plugins' : '0',
|
34 |
-
'pragma' : 'no-cache',
|
35 |
-
'referer' : 'https://next.eqing.tech/',
|
36 |
-
'sec-ch-ua' : '"Not/A)Brand";v="99", "Google Chrome";v="115", "Chromium";v="115"',
|
37 |
-
'sec-ch-ua-mobile' : '?0',
|
38 |
-
'sec-ch-ua-platform': '"macOS"',
|
39 |
-
'sec-fetch-dest' : 'empty',
|
40 |
-
'sec-fetch-mode' : 'cors',
|
41 |
-
'sec-fetch-site' : 'same-origin',
|
42 |
-
'user-agent' : 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/115.0.0.0 Safari/537.36',
|
43 |
-
'usesearch' : 'false',
|
44 |
-
'x-requested-with' : 'XMLHttpRequest'
|
45 |
-
}
|
46 |
-
|
47 |
-
json_data = {
|
48 |
-
'messages' : messages,
|
49 |
-
'stream' : stream,
|
50 |
-
'model' : model,
|
51 |
-
'temperature' : kwargs.get('temperature', 0.5),
|
52 |
-
'presence_penalty' : kwargs.get('presence_penalty', 0),
|
53 |
-
'frequency_penalty' : kwargs.get('frequency_penalty', 0),
|
54 |
-
'top_p' : kwargs.get('top_p', 1),
|
55 |
-
}
|
56 |
-
|
57 |
-
response = requests.post('https://next.eqing.tech/api/openai/v1/chat/completions',
|
58 |
-
headers=headers, json=json_data, stream=stream)
|
59 |
-
|
60 |
-
if not stream:
|
61 |
-
yield response.json()["choices"][0]["message"]["content"]
|
62 |
-
return
|
63 |
-
|
64 |
-
for line in response.iter_content(chunk_size=1024):
|
65 |
-
if line:
|
66 |
-
if b'content' in line:
|
67 |
-
line_json = json.loads(line.decode('utf-8').split('data: ')[1])
|
68 |
-
token = line_json['choices'][0]['delta'].get('content')
|
69 |
-
if token:
|
70 |
-
yield token
|
71 |
-
|
72 |
-
@classmethod
|
73 |
-
@property
|
74 |
-
def params(cls):
|
75 |
-
params = [
|
76 |
-
("model", "str"),
|
77 |
-
("messages", "list[dict[str, str]]"),
|
78 |
-
("stream", "bool"),
|
79 |
-
]
|
80 |
-
param = ", ".join([": ".join(p) for p in params])
|
81 |
-
return f"g4f.provider.{cls.__name__} supports: ({param})"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/AgentVerse/agentVerse/ui/src/phaser3-rex-plugins/templates/ui/hiddenedit/Factory.d.ts
DELETED
@@ -1,6 +0,0 @@
|
|
1 |
-
import HiddenEdit from './HiddenEdit';
|
2 |
-
|
3 |
-
export default function (
|
4 |
-
textObject: Phaser.GameObjects.GameObject,
|
5 |
-
config?: HiddenEdit.IConfig
|
6 |
-
): HiddenEdit;
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/AgentVerse/agentVerse/ui/src/phaser3-rex-plugins/templates/ui/holygrail/methods/GetAddChildConfig.js
DELETED
@@ -1,70 +0,0 @@
|
|
1 |
-
const GetValue = Phaser.Utils.Objects.GetValue;
|
2 |
-
|
3 |
-
var GetAddChildConfig = function (config, key, defaultValues) {
|
4 |
-
var proportion = GetValue(config, `proportion.${key}`, defaultValues.proportion);
|
5 |
-
var align = GetValue(config, `align.${key}`, 'center');
|
6 |
-
var padding = GetValue(config, `space.${key}`, undefined);
|
7 |
-
if ((typeof (padding) === 'number') && defaultValues.paddingKey) {
|
8 |
-
var paddingNum = padding;
|
9 |
-
padding = {};
|
10 |
-
padding[defaultValues.paddingKey] = paddingNum;
|
11 |
-
}
|
12 |
-
var expand = GetValue(config, `expand.${key}`, true);
|
13 |
-
|
14 |
-
return {
|
15 |
-
proportion: proportion,
|
16 |
-
align: align,
|
17 |
-
padding: padding,
|
18 |
-
expand: expand,
|
19 |
-
}
|
20 |
-
}
|
21 |
-
|
22 |
-
var GetAddHeaderConfig = function (config) {
|
23 |
-
return GetAddChildConfig(config, 'header', {
|
24 |
-
proportion: 0,
|
25 |
-
paddingKey: 'bottom'
|
26 |
-
})
|
27 |
-
}
|
28 |
-
|
29 |
-
var GetAddLeftSideConfig = function (config) {
|
30 |
-
return GetAddChildConfig(config, 'leftSide', {
|
31 |
-
proportion: 0,
|
32 |
-
paddingKey: 'right'
|
33 |
-
})
|
34 |
-
}
|
35 |
-
|
36 |
-
var GetAddContentConfig = function (config) {
|
37 |
-
return GetAddChildConfig(config, 'content', {
|
38 |
-
proportion: 1
|
39 |
-
})
|
40 |
-
}
|
41 |
-
|
42 |
-
var GetAddRightSideConfig = function (config) {
|
43 |
-
return GetAddChildConfig(config, 'rightSide', {
|
44 |
-
proportion: 0,
|
45 |
-
paddingKey: 'left'
|
46 |
-
})
|
47 |
-
}
|
48 |
-
|
49 |
-
var GetAddFooterConfig = function (config) {
|
50 |
-
return GetAddChildConfig(config, 'footer', {
|
51 |
-
proportion: 0,
|
52 |
-
paddingKey: 'top'
|
53 |
-
})
|
54 |
-
}
|
55 |
-
|
56 |
-
var GetAddContainerConfig = function (config) {
|
57 |
-
return {
|
58 |
-
proportion: 1,
|
59 |
-
align: 'center',
|
60 |
-
padding: 0,
|
61 |
-
expand: true,
|
62 |
-
}
|
63 |
-
}
|
64 |
-
|
65 |
-
export {
|
66 |
-
GetAddHeaderConfig,
|
67 |
-
GetAddLeftSideConfig, GetAddContentConfig, GetAddRightSideConfig,
|
68 |
-
GetAddFooterConfig,
|
69 |
-
GetAddContainerConfig
|
70 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/AkitoP/umamusume_bert_vits2/train_ms_acc.py
DELETED
@@ -1,623 +0,0 @@
|
|
1 |
-
# flake8: noqa: E402
|
2 |
-
|
3 |
-
import os
|
4 |
-
import torch
|
5 |
-
from torch.nn import functional as F
|
6 |
-
from torch.utils.data import DataLoader
|
7 |
-
from torch.utils.tensorboard import SummaryWriter
|
8 |
-
import torch.distributed as dist
|
9 |
-
from torch.nn.parallel import DistributedDataParallel as DDP
|
10 |
-
from torch.cuda.amp import autocast, GradScaler
|
11 |
-
from tqdm import tqdm
|
12 |
-
import logging
|
13 |
-
|
14 |
-
logging.getLogger("numba").setLevel(logging.WARNING)
|
15 |
-
import commons
|
16 |
-
import utils
|
17 |
-
from data_utils import (
|
18 |
-
TextAudioSpeakerLoader,
|
19 |
-
TextAudioSpeakerCollate,
|
20 |
-
DistributedBucketSampler,
|
21 |
-
)
|
22 |
-
from models import (
|
23 |
-
SynthesizerTrn,
|
24 |
-
MultiPeriodDiscriminator,
|
25 |
-
DurationDiscriminator,
|
26 |
-
)
|
27 |
-
from losses import generator_loss, discriminator_loss, feature_loss, kl_loss
|
28 |
-
from mel_processing import mel_spectrogram_torch, spec_to_mel_torch
|
29 |
-
from text.symbols import symbols
|
30 |
-
|
31 |
-
torch.backends.cuda.matmul.allow_tf32 = True
|
32 |
-
torch.backends.cudnn.allow_tf32 = (
|
33 |
-
True # If encontered training problem,please try to disable TF32.
|
34 |
-
)
|
35 |
-
torch.set_float32_matmul_precision("medium")
|
36 |
-
torch.backends.cudnn.benchmark = True
|
37 |
-
torch.backends.cuda.sdp_kernel("flash")
|
38 |
-
torch.backends.cuda.enable_flash_sdp(True)
|
39 |
-
torch.backends.cuda.enable_mem_efficient_sdp(
|
40 |
-
True
|
41 |
-
) # Not available if torch version is lower than 2.0
|
42 |
-
torch.backends.cuda.enable_math_sdp(True)
|
43 |
-
global_step = 0
|
44 |
-
|
45 |
-
|
46 |
-
def run():
|
47 |
-
dist.init_process_group(
|
48 |
-
backend="gloo",
|
49 |
-
init_method='tcp://127.0.0.1:11451', # Due to some training problem,we proposed to use gloo instead of nccl.
|
50 |
-
rank=0,
|
51 |
-
world_size=1,
|
52 |
-
) # Use torchrun instead of mp.spawn
|
53 |
-
rank = dist.get_rank()
|
54 |
-
n_gpus = dist.get_world_size()
|
55 |
-
hps = utils.get_hparams()
|
56 |
-
torch.manual_seed(hps.train.seed)
|
57 |
-
torch.cuda.set_device(rank)
|
58 |
-
global global_step
|
59 |
-
if rank == 0:
|
60 |
-
logger = utils.get_logger(hps.model_dir)
|
61 |
-
logger.info(hps)
|
62 |
-
utils.check_git_hash(hps.model_dir)
|
63 |
-
writer = SummaryWriter(log_dir=hps.model_dir)
|
64 |
-
writer_eval = SummaryWriter(log_dir=os.path.join(hps.model_dir, "eval"))
|
65 |
-
train_dataset = TextAudioSpeakerLoader(hps.data.training_files, hps.data)
|
66 |
-
train_sampler = DistributedBucketSampler(
|
67 |
-
train_dataset,
|
68 |
-
hps.train.batch_size,
|
69 |
-
[32, 300, 400, 500, 600, 700, 800, 900, 1000],
|
70 |
-
num_replicas=n_gpus,
|
71 |
-
rank=rank,
|
72 |
-
shuffle=True,
|
73 |
-
)
|
74 |
-
collate_fn = TextAudioSpeakerCollate()
|
75 |
-
train_loader = DataLoader(
|
76 |
-
train_dataset,
|
77 |
-
num_workers=16,
|
78 |
-
shuffle=False,
|
79 |
-
pin_memory=True,
|
80 |
-
collate_fn=collate_fn,
|
81 |
-
batch_sampler=train_sampler,
|
82 |
-
persistent_workers=True,
|
83 |
-
prefetch_factor=4,
|
84 |
-
) # DataLoader config could be adjusted.
|
85 |
-
if rank == 0:
|
86 |
-
eval_dataset = TextAudioSpeakerLoader(hps.data.validation_files, hps.data)
|
87 |
-
eval_loader = DataLoader(
|
88 |
-
eval_dataset,
|
89 |
-
num_workers=0,
|
90 |
-
shuffle=False,
|
91 |
-
batch_size=1,
|
92 |
-
pin_memory=True,
|
93 |
-
drop_last=False,
|
94 |
-
collate_fn=collate_fn,
|
95 |
-
)
|
96 |
-
if (
|
97 |
-
"use_noise_scaled_mas" in hps.model.keys()
|
98 |
-
and hps.model.use_noise_scaled_mas is True
|
99 |
-
):
|
100 |
-
print("Using noise scaled MAS for VITS2")
|
101 |
-
mas_noise_scale_initial = 0.01
|
102 |
-
noise_scale_delta = 2e-6
|
103 |
-
else:
|
104 |
-
print("Using normal MAS for VITS1")
|
105 |
-
mas_noise_scale_initial = 0.0
|
106 |
-
noise_scale_delta = 0.0
|
107 |
-
if (
|
108 |
-
"use_duration_discriminator" in hps.model.keys()
|
109 |
-
and hps.model.use_duration_discriminator is True
|
110 |
-
):
|
111 |
-
print("Using duration discriminator for VITS2")
|
112 |
-
net_dur_disc = DurationDiscriminator(
|
113 |
-
hps.model.hidden_channels,
|
114 |
-
hps.model.hidden_channels,
|
115 |
-
3,
|
116 |
-
0.1,
|
117 |
-
gin_channels=hps.model.gin_channels if hps.data.n_speakers != 0 else 0,
|
118 |
-
).cuda(rank)
|
119 |
-
if (
|
120 |
-
"use_spk_conditioned_encoder" in hps.model.keys()
|
121 |
-
and hps.model.use_spk_conditioned_encoder is True
|
122 |
-
):
|
123 |
-
if hps.data.n_speakers == 0:
|
124 |
-
raise ValueError(
|
125 |
-
"n_speakers must be > 0 when using spk conditioned encoder to train multi-speaker model"
|
126 |
-
)
|
127 |
-
else:
|
128 |
-
print("Using normal encoder for VITS1")
|
129 |
-
|
130 |
-
net_g = SynthesizerTrn(
|
131 |
-
len(symbols),
|
132 |
-
hps.data.filter_length // 2 + 1,
|
133 |
-
hps.train.segment_size // hps.data.hop_length,
|
134 |
-
n_speakers=hps.data.n_speakers,
|
135 |
-
mas_noise_scale_initial=mas_noise_scale_initial,
|
136 |
-
noise_scale_delta=noise_scale_delta,
|
137 |
-
**hps.model,
|
138 |
-
).cuda(rank)
|
139 |
-
|
140 |
-
net_d = MultiPeriodDiscriminator(hps.model.use_spectral_norm).cuda(rank)
|
141 |
-
optim_g = torch.optim.AdamW(
|
142 |
-
filter(lambda p: p.requires_grad, net_g.parameters()),
|
143 |
-
hps.train.learning_rate,
|
144 |
-
betas=hps.train.betas,
|
145 |
-
eps=hps.train.eps,
|
146 |
-
)
|
147 |
-
optim_d = torch.optim.AdamW(
|
148 |
-
net_d.parameters(),
|
149 |
-
hps.train.learning_rate,
|
150 |
-
betas=hps.train.betas,
|
151 |
-
eps=hps.train.eps,
|
152 |
-
)
|
153 |
-
if net_dur_disc is not None:
|
154 |
-
optim_dur_disc = torch.optim.AdamW(
|
155 |
-
net_dur_disc.parameters(),
|
156 |
-
hps.train.learning_rate,
|
157 |
-
betas=hps.train.betas,
|
158 |
-
eps=hps.train.eps,
|
159 |
-
)
|
160 |
-
else:
|
161 |
-
optim_dur_disc = None
|
162 |
-
net_g = DDP(net_g, device_ids=[rank], find_unused_parameters=True)
|
163 |
-
net_d = DDP(net_d, device_ids=[rank], find_unused_parameters=True)
|
164 |
-
if net_dur_disc is not None:
|
165 |
-
net_dur_disc = DDP(net_dur_disc, device_ids=[rank], find_unused_parameters=True)
|
166 |
-
try:
|
167 |
-
if net_dur_disc is not None:
|
168 |
-
_, _, dur_resume_lr, epoch_str = utils.load_checkpoint(
|
169 |
-
utils.latest_checkpoint_path(hps.model_dir, "DUR_*.pth"),
|
170 |
-
net_dur_disc,
|
171 |
-
optim_dur_disc,
|
172 |
-
skip_optimizer=hps.train.skip_optimizer
|
173 |
-
if "skip_optimizer" in hps.train
|
174 |
-
else True,
|
175 |
-
)
|
176 |
-
_, optim_g, g_resume_lr, epoch_str = utils.load_checkpoint(
|
177 |
-
utils.latest_checkpoint_path(hps.model_dir, "G_*.pth"),
|
178 |
-
net_g,
|
179 |
-
optim_g,
|
180 |
-
skip_optimizer=hps.train.skip_optimizer
|
181 |
-
if "skip_optimizer" in hps.train
|
182 |
-
else True,
|
183 |
-
)
|
184 |
-
_, optim_d, d_resume_lr, epoch_str = utils.load_checkpoint(
|
185 |
-
utils.latest_checkpoint_path(hps.model_dir, "D_*.pth"),
|
186 |
-
net_d,
|
187 |
-
optim_d,
|
188 |
-
skip_optimizer=hps.train.skip_optimizer
|
189 |
-
if "skip_optimizer" in hps.train
|
190 |
-
else True,
|
191 |
-
)
|
192 |
-
if not optim_g.param_groups[0].get("initial_lr"):
|
193 |
-
optim_g.param_groups[0]["initial_lr"] = g_resume_lr
|
194 |
-
if not optim_d.param_groups[0].get("initial_lr"):
|
195 |
-
optim_d.param_groups[0]["initial_lr"] = d_resume_lr
|
196 |
-
if not optim_dur_disc.param_groups[0].get("initial_lr"):
|
197 |
-
optim_dur_disc.param_groups[0]["initial_lr"] = dur_resume_lr
|
198 |
-
|
199 |
-
epoch_str = max(epoch_str, 1)
|
200 |
-
global_step = (epoch_str - 1) * len(train_loader)
|
201 |
-
except Exception as e:
|
202 |
-
print(e)
|
203 |
-
epoch_str = 1
|
204 |
-
global_step = 0
|
205 |
-
|
206 |
-
scheduler_g = torch.optim.lr_scheduler.ExponentialLR(
|
207 |
-
optim_g, gamma=hps.train.lr_decay, last_epoch=epoch_str - 2
|
208 |
-
)
|
209 |
-
scheduler_d = torch.optim.lr_scheduler.ExponentialLR(
|
210 |
-
optim_d, gamma=hps.train.lr_decay, last_epoch=epoch_str - 2
|
211 |
-
)
|
212 |
-
if net_dur_disc is not None:
|
213 |
-
if not optim_dur_disc.param_groups[0].get("initial_lr"):
|
214 |
-
optim_dur_disc.param_groups[0]["initial_lr"] = dur_resume_lr
|
215 |
-
scheduler_dur_disc = torch.optim.lr_scheduler.ExponentialLR(
|
216 |
-
optim_dur_disc, gamma=hps.train.lr_decay, last_epoch=epoch_str - 2
|
217 |
-
)
|
218 |
-
else:
|
219 |
-
scheduler_dur_disc = None
|
220 |
-
scaler = GradScaler(enabled=hps.train.fp16_run)
|
221 |
-
|
222 |
-
|
223 |
-
|
224 |
-
|
225 |
-
for epoch in range(epoch_str, hps.train.epochs + 1):
|
226 |
-
if rank == 0:
|
227 |
-
train_and_evaluate(
|
228 |
-
rank,
|
229 |
-
epoch,
|
230 |
-
hps,
|
231 |
-
[net_g, net_d, net_dur_disc],
|
232 |
-
[optim_g, optim_d, optim_dur_disc],
|
233 |
-
[scheduler_g, scheduler_d, scheduler_dur_disc],
|
234 |
-
scaler,
|
235 |
-
[train_loader, eval_loader],
|
236 |
-
logger,
|
237 |
-
[writer, writer_eval],
|
238 |
-
)
|
239 |
-
else:
|
240 |
-
train_and_evaluate(
|
241 |
-
rank,
|
242 |
-
epoch,
|
243 |
-
hps,
|
244 |
-
[net_g, net_d, net_dur_disc],
|
245 |
-
[optim_g, optim_d, optim_dur_disc],
|
246 |
-
[scheduler_g, scheduler_d, scheduler_dur_disc],
|
247 |
-
scaler,
|
248 |
-
[train_loader, None],
|
249 |
-
None,
|
250 |
-
None,
|
251 |
-
)
|
252 |
-
scheduler_g.step()
|
253 |
-
scheduler_d.step()
|
254 |
-
if net_dur_disc is not None:
|
255 |
-
scheduler_dur_disc.step()
|
256 |
-
|
257 |
-
|
258 |
-
__ACCUMULATION_STEP__ = 6
|
259 |
-
__CURRENT_ACCUMULATION_STEP__ = 0
|
260 |
-
|
261 |
-
def train_and_evaluate(
|
262 |
-
rank, epoch, hps, nets, optims, schedulers, scaler, loaders, logger, writers
|
263 |
-
):
|
264 |
-
global __ACCUMULATION_STEP__
|
265 |
-
global __CURRENT_ACCUMULATION_STEP__
|
266 |
-
net_g, net_d, net_dur_disc = nets
|
267 |
-
optim_g, optim_d, optim_dur_disc = optims
|
268 |
-
scheduler_g, scheduler_d, scheduler_dur_disc = schedulers
|
269 |
-
train_loader, eval_loader = loaders
|
270 |
-
if writers is not None:
|
271 |
-
writer, writer_eval = writers
|
272 |
-
|
273 |
-
train_loader.batch_sampler.set_epoch(epoch)
|
274 |
-
global global_step
|
275 |
-
|
276 |
-
net_g.train()
|
277 |
-
net_d.train()
|
278 |
-
if net_dur_disc is not None:
|
279 |
-
net_dur_disc.train()
|
280 |
-
for batch_idx, (
|
281 |
-
x,
|
282 |
-
x_lengths,
|
283 |
-
spec,
|
284 |
-
spec_lengths,
|
285 |
-
y,
|
286 |
-
y_lengths,
|
287 |
-
speakers,
|
288 |
-
tone,
|
289 |
-
language,
|
290 |
-
bert,
|
291 |
-
ja_bert,
|
292 |
-
) in tqdm(enumerate(train_loader)):
|
293 |
-
if net_g.module.use_noise_scaled_mas:
|
294 |
-
current_mas_noise_scale = (
|
295 |
-
net_g.module.mas_noise_scale_initial
|
296 |
-
- net_g.module.noise_scale_delta * global_step
|
297 |
-
)
|
298 |
-
net_g.module.current_mas_noise_scale = max(current_mas_noise_scale, 0.0)
|
299 |
-
x, x_lengths = x.cuda(rank, non_blocking=True), x_lengths.cuda(
|
300 |
-
rank, non_blocking=True
|
301 |
-
)
|
302 |
-
spec, spec_lengths = spec.cuda(rank, non_blocking=True), spec_lengths.cuda(
|
303 |
-
rank, non_blocking=True
|
304 |
-
)
|
305 |
-
y, y_lengths = y.cuda(rank, non_blocking=True), y_lengths.cuda(
|
306 |
-
rank, non_blocking=True
|
307 |
-
)
|
308 |
-
speakers = speakers.cuda(rank, non_blocking=True)
|
309 |
-
tone = tone.cuda(rank, non_blocking=True)
|
310 |
-
language = language.cuda(rank, non_blocking=True)
|
311 |
-
bert = bert.cuda(rank, non_blocking=True)
|
312 |
-
ja_bert = ja_bert.cuda(rank, non_blocking=True)
|
313 |
-
|
314 |
-
with autocast(enabled=hps.train.fp16_run):
|
315 |
-
(
|
316 |
-
y_hat,
|
317 |
-
l_length,
|
318 |
-
attn,
|
319 |
-
ids_slice,
|
320 |
-
x_mask,
|
321 |
-
z_mask,
|
322 |
-
(z, z_p, m_p, logs_p, m_q, logs_q),
|
323 |
-
(hidden_x, logw, logw_),
|
324 |
-
) = net_g(
|
325 |
-
x,
|
326 |
-
x_lengths,
|
327 |
-
spec,
|
328 |
-
spec_lengths,
|
329 |
-
speakers,
|
330 |
-
tone,
|
331 |
-
language,
|
332 |
-
bert,
|
333 |
-
ja_bert,
|
334 |
-
)
|
335 |
-
mel = spec_to_mel_torch(
|
336 |
-
spec,
|
337 |
-
hps.data.filter_length,
|
338 |
-
hps.data.n_mel_channels,
|
339 |
-
hps.data.sampling_rate,
|
340 |
-
hps.data.mel_fmin,
|
341 |
-
hps.data.mel_fmax,
|
342 |
-
)
|
343 |
-
y_mel = commons.slice_segments(
|
344 |
-
mel, ids_slice, hps.train.segment_size // hps.data.hop_length
|
345 |
-
)
|
346 |
-
y_hat_mel = mel_spectrogram_torch(
|
347 |
-
y_hat.squeeze(1),
|
348 |
-
hps.data.filter_length,
|
349 |
-
hps.data.n_mel_channels,
|
350 |
-
hps.data.sampling_rate,
|
351 |
-
hps.data.hop_length,
|
352 |
-
hps.data.win_length,
|
353 |
-
hps.data.mel_fmin,
|
354 |
-
hps.data.mel_fmax,
|
355 |
-
)
|
356 |
-
|
357 |
-
y = commons.slice_segments(
|
358 |
-
y, ids_slice * hps.data.hop_length, hps.train.segment_size
|
359 |
-
) # slice
|
360 |
-
|
361 |
-
# Discriminator
|
362 |
-
y_d_hat_r, y_d_hat_g, _, _ = net_d(y, y_hat.detach())
|
363 |
-
with autocast(enabled=False):
|
364 |
-
loss_disc, losses_disc_r, losses_disc_g = discriminator_loss(
|
365 |
-
y_d_hat_r, y_d_hat_g
|
366 |
-
)
|
367 |
-
loss_disc_all = loss_disc
|
368 |
-
if net_dur_disc is not None:
|
369 |
-
y_dur_hat_r, y_dur_hat_g = net_dur_disc(
|
370 |
-
hidden_x.detach(), x_mask.detach(), logw.detach(), logw_.detach()
|
371 |
-
)
|
372 |
-
with autocast(enabled=False):
|
373 |
-
# TODO: I think need to mean using the mask, but for now, just mean all
|
374 |
-
(
|
375 |
-
loss_dur_disc,
|
376 |
-
losses_dur_disc_r,
|
377 |
-
losses_dur_disc_g,
|
378 |
-
) = discriminator_loss(y_dur_hat_r, y_dur_hat_g)
|
379 |
-
loss_dur_disc_all = loss_dur_disc
|
380 |
-
optim_dur_disc.zero_grad()
|
381 |
-
scaler.scale(loss_dur_disc_all).backward()
|
382 |
-
scaler.unscale_(optim_dur_disc)
|
383 |
-
commons.clip_grad_value_(net_dur_disc.parameters(), None)
|
384 |
-
scaler.step(optim_dur_disc)
|
385 |
-
|
386 |
-
|
387 |
-
|
388 |
-
scaler.scale(loss_disc_all/__ACCUMULATION_STEP__).backward()
|
389 |
-
__CURRENT_ACCUMULATION_STEP__ += 1
|
390 |
-
|
391 |
-
if __CURRENT_ACCUMULATION_STEP__ == __ACCUMULATION_STEP__:
|
392 |
-
__CURRENT_ACCUMULATION_STEP__ = 0
|
393 |
-
|
394 |
-
scaler.unscale_(optim_d)
|
395 |
-
grad_norm_d = commons.clip_grad_value_(net_d.parameters(), None)
|
396 |
-
scaler.step(optim_d)
|
397 |
-
optim_d.zero_grad()
|
398 |
-
|
399 |
-
|
400 |
-
|
401 |
-
|
402 |
-
with autocast(enabled=hps.train.fp16_run):
|
403 |
-
# Generator
|
404 |
-
y_d_hat_r, y_d_hat_g, fmap_r, fmap_g = net_d(y, y_hat)
|
405 |
-
if net_dur_disc is not None:
|
406 |
-
y_dur_hat_r, y_dur_hat_g = net_dur_disc(hidden_x, x_mask, logw, logw_)
|
407 |
-
with autocast(enabled=False):
|
408 |
-
loss_dur = torch.sum(l_length.float())
|
409 |
-
loss_mel = F.l1_loss(y_mel, y_hat_mel) * hps.train.c_mel
|
410 |
-
loss_kl = kl_loss(z_p, logs_q, m_p, logs_p, z_mask) * hps.train.c_kl
|
411 |
-
|
412 |
-
loss_fm = feature_loss(fmap_r, fmap_g)
|
413 |
-
loss_gen, losses_gen = generator_loss(y_d_hat_g)
|
414 |
-
loss_gen_all = loss_gen + loss_fm + loss_mel + loss_dur + loss_kl
|
415 |
-
if net_dur_disc is not None:
|
416 |
-
loss_dur_gen, losses_dur_gen = generator_loss(y_dur_hat_g)
|
417 |
-
loss_gen_all += loss_dur_gen
|
418 |
-
|
419 |
-
|
420 |
-
scaler.scale(loss_gen_all/__ACCUMULATION_STEP__).backward()
|
421 |
-
if __CURRENT_ACCUMULATION_STEP__ == __ACCUMULATION_STEP__:
|
422 |
-
__CURRENT_ACCUMULATION_STEP__ = 0
|
423 |
-
|
424 |
-
scaler.unscale_(optim_g)
|
425 |
-
grad_norm_g = commons.clip_grad_value_(net_g.parameters(), None)
|
426 |
-
scaler.step(optim_g)
|
427 |
-
scaler.update()
|
428 |
-
optim_g.zero_grad()
|
429 |
-
|
430 |
-
|
431 |
-
|
432 |
-
|
433 |
-
if rank == 0:
|
434 |
-
if (global_step-1) % hps.train.log_interval == 0:
|
435 |
-
lr = optim_g.param_groups[0]["lr"]
|
436 |
-
losses = [loss_disc, loss_gen, loss_fm, loss_mel, loss_dur, loss_kl]
|
437 |
-
logger.info(
|
438 |
-
"Train Epoch: {} [{:.0f}%]".format(
|
439 |
-
epoch, 100.0 * batch_idx / len(train_loader)
|
440 |
-
)
|
441 |
-
)
|
442 |
-
logger.info([x.item() for x in losses] + [global_step, lr])
|
443 |
-
|
444 |
-
scalar_dict = {
|
445 |
-
"loss/g/total": loss_gen_all,
|
446 |
-
"loss/d/total": loss_disc_all,
|
447 |
-
"learning_rate": lr,
|
448 |
-
"grad_norm_d": grad_norm_d,
|
449 |
-
"grad_norm_g": grad_norm_g,
|
450 |
-
}
|
451 |
-
scalar_dict.update(
|
452 |
-
{
|
453 |
-
"loss/g/fm": loss_fm,
|
454 |
-
"loss/g/mel": loss_mel,
|
455 |
-
"loss/g/dur": loss_dur,
|
456 |
-
"loss/g/kl": loss_kl,
|
457 |
-
}
|
458 |
-
)
|
459 |
-
scalar_dict.update(
|
460 |
-
{"loss/g/{}".format(i): v for i, v in enumerate(losses_gen)}
|
461 |
-
)
|
462 |
-
scalar_dict.update(
|
463 |
-
{"loss/d_r/{}".format(i): v for i, v in enumerate(losses_disc_r)}
|
464 |
-
)
|
465 |
-
scalar_dict.update(
|
466 |
-
{"loss/d_g/{}".format(i): v for i, v in enumerate(losses_disc_g)}
|
467 |
-
)
|
468 |
-
|
469 |
-
image_dict = {
|
470 |
-
"slice/mel_org": utils.plot_spectrogram_to_numpy(
|
471 |
-
y_mel[0].data.cpu().numpy()
|
472 |
-
),
|
473 |
-
"slice/mel_gen": utils.plot_spectrogram_to_numpy(
|
474 |
-
y_hat_mel[0].data.cpu().numpy()
|
475 |
-
),
|
476 |
-
"all/mel": utils.plot_spectrogram_to_numpy(
|
477 |
-
mel[0].data.cpu().numpy()
|
478 |
-
),
|
479 |
-
"all/attn": utils.plot_alignment_to_numpy(
|
480 |
-
attn[0, 0].data.cpu().numpy()
|
481 |
-
),
|
482 |
-
}
|
483 |
-
utils.summarize(
|
484 |
-
writer=writer,
|
485 |
-
global_step=global_step,
|
486 |
-
images=image_dict,
|
487 |
-
scalars=scalar_dict,
|
488 |
-
)
|
489 |
-
|
490 |
-
if (global_step-1) % hps.train.eval_interval == 0:
|
491 |
-
evaluate(hps, net_g, eval_loader, writer_eval)
|
492 |
-
utils.save_checkpoint(
|
493 |
-
net_g,
|
494 |
-
optim_g,
|
495 |
-
hps.train.learning_rate,
|
496 |
-
epoch,
|
497 |
-
os.path.join(hps.model_dir, "G_{}.pth".format(global_step)),
|
498 |
-
)
|
499 |
-
utils.save_checkpoint(
|
500 |
-
net_d,
|
501 |
-
optim_d,
|
502 |
-
hps.train.learning_rate,
|
503 |
-
epoch,
|
504 |
-
os.path.join(hps.model_dir, "D_{}.pth".format(global_step)),
|
505 |
-
)
|
506 |
-
if net_dur_disc is not None:
|
507 |
-
utils.save_checkpoint(
|
508 |
-
net_dur_disc,
|
509 |
-
optim_dur_disc,
|
510 |
-
hps.train.learning_rate,
|
511 |
-
epoch,
|
512 |
-
os.path.join(hps.model_dir, "DUR_{}.pth".format(global_step)),
|
513 |
-
)
|
514 |
-
keep_ckpts = getattr(hps.train, "keep_ckpts", 5)
|
515 |
-
if keep_ckpts > 0:
|
516 |
-
utils.clean_checkpoints(
|
517 |
-
path_to_models=hps.model_dir,
|
518 |
-
n_ckpts_to_keep=keep_ckpts,
|
519 |
-
sort_by_time=True,
|
520 |
-
)
|
521 |
-
|
522 |
-
global_step += 1
|
523 |
-
|
524 |
-
if rank == 0:
|
525 |
-
logger.info("====> Epoch: {} ===>{}".format(epoch, __CURRENT_ACCUMULATION_STEP__))
|
526 |
-
|
527 |
-
|
528 |
-
|
529 |
-
def evaluate(hps, generator, eval_loader, writer_eval):
|
530 |
-
generator.eval()
|
531 |
-
image_dict = {}
|
532 |
-
audio_dict = {}
|
533 |
-
print("Evaluating ...")
|
534 |
-
with torch.no_grad():
|
535 |
-
for batch_idx, (
|
536 |
-
x,
|
537 |
-
x_lengths,
|
538 |
-
spec,
|
539 |
-
spec_lengths,
|
540 |
-
y,
|
541 |
-
y_lengths,
|
542 |
-
speakers,
|
543 |
-
tone,
|
544 |
-
language,
|
545 |
-
bert,
|
546 |
-
ja_bert,
|
547 |
-
) in enumerate(eval_loader):
|
548 |
-
x, x_lengths = x.cuda(), x_lengths.cuda()
|
549 |
-
spec, spec_lengths = spec.cuda(), spec_lengths.cuda()
|
550 |
-
y, y_lengths = y.cuda(), y_lengths.cuda()
|
551 |
-
speakers = speakers.cuda()
|
552 |
-
bert = bert.cuda()
|
553 |
-
ja_bert = ja_bert.cuda()
|
554 |
-
tone = tone.cuda()
|
555 |
-
language = language.cuda()
|
556 |
-
for use_sdp in [True, False]:
|
557 |
-
y_hat, attn, mask, *_ = generator.module.infer(
|
558 |
-
x,
|
559 |
-
x_lengths,
|
560 |
-
speakers,
|
561 |
-
tone,
|
562 |
-
language,
|
563 |
-
bert,
|
564 |
-
ja_bert,
|
565 |
-
y=spec,
|
566 |
-
max_len=1000,
|
567 |
-
sdp_ratio=0.0 if not use_sdp else 1.0,
|
568 |
-
)
|
569 |
-
y_hat_lengths = mask.sum([1, 2]).long() * hps.data.hop_length
|
570 |
-
|
571 |
-
mel = spec_to_mel_torch(
|
572 |
-
spec,
|
573 |
-
hps.data.filter_length,
|
574 |
-
hps.data.n_mel_channels,
|
575 |
-
hps.data.sampling_rate,
|
576 |
-
hps.data.mel_fmin,
|
577 |
-
hps.data.mel_fmax,
|
578 |
-
)
|
579 |
-
y_hat_mel = mel_spectrogram_torch(
|
580 |
-
y_hat.squeeze(1).float(),
|
581 |
-
hps.data.filter_length,
|
582 |
-
hps.data.n_mel_channels,
|
583 |
-
hps.data.sampling_rate,
|
584 |
-
hps.data.hop_length,
|
585 |
-
hps.data.win_length,
|
586 |
-
hps.data.mel_fmin,
|
587 |
-
hps.data.mel_fmax,
|
588 |
-
)
|
589 |
-
image_dict.update(
|
590 |
-
{
|
591 |
-
f"gen/mel_{batch_idx}": utils.plot_spectrogram_to_numpy(
|
592 |
-
y_hat_mel[0].cpu().numpy()
|
593 |
-
)
|
594 |
-
}
|
595 |
-
)
|
596 |
-
audio_dict.update(
|
597 |
-
{
|
598 |
-
f"gen/audio_{batch_idx}_{use_sdp}": y_hat[
|
599 |
-
0, :, : y_hat_lengths[0]
|
600 |
-
]
|
601 |
-
}
|
602 |
-
)
|
603 |
-
image_dict.update(
|
604 |
-
{
|
605 |
-
f"gt/mel_{batch_idx}": utils.plot_spectrogram_to_numpy(
|
606 |
-
mel[0].cpu().numpy()
|
607 |
-
)
|
608 |
-
}
|
609 |
-
)
|
610 |
-
audio_dict.update({f"gt/audio_{batch_idx}": y[0, :, : y_lengths[0]]})
|
611 |
-
|
612 |
-
utils.summarize(
|
613 |
-
writer=writer_eval,
|
614 |
-
global_step=global_step,
|
615 |
-
images=image_dict,
|
616 |
-
audios=audio_dict,
|
617 |
-
audio_sampling_rate=hps.data.sampling_rate,
|
618 |
-
)
|
619 |
-
generator.train()
|
620 |
-
|
621 |
-
|
622 |
-
if __name__ == "__main__":
|
623 |
-
run()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/AlekseyKorshuk/instagram-filter-removal/modeling/build.py
DELETED
@@ -1,19 +0,0 @@
|
|
1 |
-
from modeling.ifrnet import IFRNet, Discriminator, PatchDiscriminator, MLP
|
2 |
-
from modeling.benchmark import UNet
|
3 |
-
|
4 |
-
|
5 |
-
def build_model(args):
|
6 |
-
if args.MODEL.NAME.lower() == "ifrnet":
|
7 |
-
net = IFRNet(base_n_channels=args.MODEL.IFR.NUM_CHANNELS, destyler_n_channels=args.MODEL.IFR.DESTYLER_CHANNELS)
|
8 |
-
mlp = MLP(base_n_channels=args.MODEL.IFR.NUM_CHANNELS, num_class=args.MODEL.NUM_CLASS)
|
9 |
-
elif args.MODEL.NAME.lower() == "ifr-no-aux":
|
10 |
-
net = IFRNet(base_n_channels=args.MODEL.IFR.NUM_CHANNELS, destyler_n_channels=args.MODEL.IFR.DESTYLER_CHANNELS)
|
11 |
-
mlp = None
|
12 |
-
else:
|
13 |
-
raise NotImplementedError
|
14 |
-
return net, mlp
|
15 |
-
|
16 |
-
|
17 |
-
def build_discriminators(args):
|
18 |
-
return Discriminator(base_n_channels=args.MODEL.D.NUM_CHANNELS), PatchDiscriminator(base_n_channels=args.MODEL.D.NUM_CHANNELS)
|
19 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/AlexWang/lama/saicinpainting/training/data/datasets.py
DELETED
@@ -1,304 +0,0 @@
|
|
1 |
-
import glob
|
2 |
-
import logging
|
3 |
-
import os
|
4 |
-
import random
|
5 |
-
|
6 |
-
import albumentations as A
|
7 |
-
import cv2
|
8 |
-
import numpy as np
|
9 |
-
import torch
|
10 |
-
import torch.nn.functional as F
|
11 |
-
import webdataset
|
12 |
-
from omegaconf import open_dict, OmegaConf
|
13 |
-
from skimage.feature import canny
|
14 |
-
from skimage.transform import rescale, resize
|
15 |
-
from torch.utils.data import Dataset, IterableDataset, DataLoader, DistributedSampler, ConcatDataset
|
16 |
-
|
17 |
-
from saicinpainting.evaluation.data import InpaintingDataset as InpaintingEvaluationDataset, \
|
18 |
-
OurInpaintingDataset as OurInpaintingEvaluationDataset, ceil_modulo, InpaintingEvalOnlineDataset
|
19 |
-
from saicinpainting.training.data.aug import IAAAffine2, IAAPerspective2
|
20 |
-
from saicinpainting.training.data.masks import get_mask_generator
|
21 |
-
|
22 |
-
LOGGER = logging.getLogger(__name__)
|
23 |
-
|
24 |
-
|
25 |
-
class InpaintingTrainDataset(Dataset):
|
26 |
-
def __init__(self, indir, mask_generator, transform):
|
27 |
-
self.in_files = list(glob.glob(os.path.join(indir, '**', '*.jpg'), recursive=True))
|
28 |
-
self.mask_generator = mask_generator
|
29 |
-
self.transform = transform
|
30 |
-
self.iter_i = 0
|
31 |
-
|
32 |
-
def __len__(self):
|
33 |
-
return len(self.in_files)
|
34 |
-
|
35 |
-
def __getitem__(self, item):
|
36 |
-
path = self.in_files[item]
|
37 |
-
img = cv2.imread(path)
|
38 |
-
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
|
39 |
-
img = self.transform(image=img)['image']
|
40 |
-
img = np.transpose(img, (2, 0, 1))
|
41 |
-
# TODO: maybe generate mask before augmentations? slower, but better for segmentation-based masks
|
42 |
-
mask = self.mask_generator(img, iter_i=self.iter_i)
|
43 |
-
self.iter_i += 1
|
44 |
-
return dict(image=img,
|
45 |
-
mask=mask)
|
46 |
-
|
47 |
-
|
48 |
-
class InpaintingTrainWebDataset(IterableDataset):
|
49 |
-
def __init__(self, indir, mask_generator, transform, shuffle_buffer=200):
|
50 |
-
self.impl = webdataset.Dataset(indir).shuffle(shuffle_buffer).decode('rgb').to_tuple('jpg')
|
51 |
-
self.mask_generator = mask_generator
|
52 |
-
self.transform = transform
|
53 |
-
|
54 |
-
def __iter__(self):
|
55 |
-
for iter_i, (img,) in enumerate(self.impl):
|
56 |
-
img = np.clip(img * 255, 0, 255).astype('uint8')
|
57 |
-
img = self.transform(image=img)['image']
|
58 |
-
img = np.transpose(img, (2, 0, 1))
|
59 |
-
mask = self.mask_generator(img, iter_i=iter_i)
|
60 |
-
yield dict(image=img,
|
61 |
-
mask=mask)
|
62 |
-
|
63 |
-
|
64 |
-
class ImgSegmentationDataset(Dataset):
|
65 |
-
def __init__(self, indir, mask_generator, transform, out_size, segm_indir, semantic_seg_n_classes):
|
66 |
-
self.indir = indir
|
67 |
-
self.segm_indir = segm_indir
|
68 |
-
self.mask_generator = mask_generator
|
69 |
-
self.transform = transform
|
70 |
-
self.out_size = out_size
|
71 |
-
self.semantic_seg_n_classes = semantic_seg_n_classes
|
72 |
-
self.in_files = list(glob.glob(os.path.join(indir, '**', '*.jpg'), recursive=True))
|
73 |
-
|
74 |
-
def __len__(self):
|
75 |
-
return len(self.in_files)
|
76 |
-
|
77 |
-
def __getitem__(self, item):
|
78 |
-
path = self.in_files[item]
|
79 |
-
img = cv2.imread(path)
|
80 |
-
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
|
81 |
-
img = cv2.resize(img, (self.out_size, self.out_size))
|
82 |
-
img = self.transform(image=img)['image']
|
83 |
-
img = np.transpose(img, (2, 0, 1))
|
84 |
-
mask = self.mask_generator(img)
|
85 |
-
segm, segm_classes= self.load_semantic_segm(path)
|
86 |
-
result = dict(image=img,
|
87 |
-
mask=mask,
|
88 |
-
segm=segm,
|
89 |
-
segm_classes=segm_classes)
|
90 |
-
return result
|
91 |
-
|
92 |
-
def load_semantic_segm(self, img_path):
|
93 |
-
segm_path = img_path.replace(self.indir, self.segm_indir).replace(".jpg", ".png")
|
94 |
-
mask = cv2.imread(segm_path, cv2.IMREAD_GRAYSCALE)
|
95 |
-
mask = cv2.resize(mask, (self.out_size, self.out_size))
|
96 |
-
tensor = torch.from_numpy(np.clip(mask.astype(int)-1, 0, None))
|
97 |
-
ohe = F.one_hot(tensor.long(), num_classes=self.semantic_seg_n_classes) # w x h x n_classes
|
98 |
-
return ohe.permute(2, 0, 1).float(), tensor.unsqueeze(0)
|
99 |
-
|
100 |
-
|
101 |
-
def get_transforms(transform_variant, out_size):
|
102 |
-
if transform_variant == 'default':
|
103 |
-
transform = A.Compose([
|
104 |
-
A.RandomScale(scale_limit=0.2), # +/- 20%
|
105 |
-
A.PadIfNeeded(min_height=out_size, min_width=out_size),
|
106 |
-
A.RandomCrop(height=out_size, width=out_size),
|
107 |
-
A.HorizontalFlip(),
|
108 |
-
A.CLAHE(),
|
109 |
-
A.RandomBrightnessContrast(brightness_limit=0.2, contrast_limit=0.2),
|
110 |
-
A.HueSaturationValue(hue_shift_limit=5, sat_shift_limit=30, val_shift_limit=5),
|
111 |
-
A.ToFloat()
|
112 |
-
])
|
113 |
-
elif transform_variant == 'distortions':
|
114 |
-
transform = A.Compose([
|
115 |
-
IAAPerspective2(scale=(0.0, 0.06)),
|
116 |
-
IAAAffine2(scale=(0.7, 1.3),
|
117 |
-
rotate=(-40, 40),
|
118 |
-
shear=(-0.1, 0.1)),
|
119 |
-
A.PadIfNeeded(min_height=out_size, min_width=out_size),
|
120 |
-
A.OpticalDistortion(),
|
121 |
-
A.RandomCrop(height=out_size, width=out_size),
|
122 |
-
A.HorizontalFlip(),
|
123 |
-
A.CLAHE(),
|
124 |
-
A.RandomBrightnessContrast(brightness_limit=0.2, contrast_limit=0.2),
|
125 |
-
A.HueSaturationValue(hue_shift_limit=5, sat_shift_limit=30, val_shift_limit=5),
|
126 |
-
A.ToFloat()
|
127 |
-
])
|
128 |
-
elif transform_variant == 'distortions_scale05_1':
|
129 |
-
transform = A.Compose([
|
130 |
-
IAAPerspective2(scale=(0.0, 0.06)),
|
131 |
-
IAAAffine2(scale=(0.5, 1.0),
|
132 |
-
rotate=(-40, 40),
|
133 |
-
shear=(-0.1, 0.1),
|
134 |
-
p=1),
|
135 |
-
A.PadIfNeeded(min_height=out_size, min_width=out_size),
|
136 |
-
A.OpticalDistortion(),
|
137 |
-
A.RandomCrop(height=out_size, width=out_size),
|
138 |
-
A.HorizontalFlip(),
|
139 |
-
A.CLAHE(),
|
140 |
-
A.RandomBrightnessContrast(brightness_limit=0.2, contrast_limit=0.2),
|
141 |
-
A.HueSaturationValue(hue_shift_limit=5, sat_shift_limit=30, val_shift_limit=5),
|
142 |
-
A.ToFloat()
|
143 |
-
])
|
144 |
-
elif transform_variant == 'distortions_scale03_12':
|
145 |
-
transform = A.Compose([
|
146 |
-
IAAPerspective2(scale=(0.0, 0.06)),
|
147 |
-
IAAAffine2(scale=(0.3, 1.2),
|
148 |
-
rotate=(-40, 40),
|
149 |
-
shear=(-0.1, 0.1),
|
150 |
-
p=1),
|
151 |
-
A.PadIfNeeded(min_height=out_size, min_width=out_size),
|
152 |
-
A.OpticalDistortion(),
|
153 |
-
A.RandomCrop(height=out_size, width=out_size),
|
154 |
-
A.HorizontalFlip(),
|
155 |
-
A.CLAHE(),
|
156 |
-
A.RandomBrightnessContrast(brightness_limit=0.2, contrast_limit=0.2),
|
157 |
-
A.HueSaturationValue(hue_shift_limit=5, sat_shift_limit=30, val_shift_limit=5),
|
158 |
-
A.ToFloat()
|
159 |
-
])
|
160 |
-
elif transform_variant == 'distortions_scale03_07':
|
161 |
-
transform = A.Compose([
|
162 |
-
IAAPerspective2(scale=(0.0, 0.06)),
|
163 |
-
IAAAffine2(scale=(0.3, 0.7), # scale 512 to 256 in average
|
164 |
-
rotate=(-40, 40),
|
165 |
-
shear=(-0.1, 0.1),
|
166 |
-
p=1),
|
167 |
-
A.PadIfNeeded(min_height=out_size, min_width=out_size),
|
168 |
-
A.OpticalDistortion(),
|
169 |
-
A.RandomCrop(height=out_size, width=out_size),
|
170 |
-
A.HorizontalFlip(),
|
171 |
-
A.CLAHE(),
|
172 |
-
A.RandomBrightnessContrast(brightness_limit=0.2, contrast_limit=0.2),
|
173 |
-
A.HueSaturationValue(hue_shift_limit=5, sat_shift_limit=30, val_shift_limit=5),
|
174 |
-
A.ToFloat()
|
175 |
-
])
|
176 |
-
elif transform_variant == 'distortions_light':
|
177 |
-
transform = A.Compose([
|
178 |
-
IAAPerspective2(scale=(0.0, 0.02)),
|
179 |
-
IAAAffine2(scale=(0.8, 1.8),
|
180 |
-
rotate=(-20, 20),
|
181 |
-
shear=(-0.03, 0.03)),
|
182 |
-
A.PadIfNeeded(min_height=out_size, min_width=out_size),
|
183 |
-
A.RandomCrop(height=out_size, width=out_size),
|
184 |
-
A.HorizontalFlip(),
|
185 |
-
A.CLAHE(),
|
186 |
-
A.RandomBrightnessContrast(brightness_limit=0.2, contrast_limit=0.2),
|
187 |
-
A.HueSaturationValue(hue_shift_limit=5, sat_shift_limit=30, val_shift_limit=5),
|
188 |
-
A.ToFloat()
|
189 |
-
])
|
190 |
-
elif transform_variant == 'non_space_transform':
|
191 |
-
transform = A.Compose([
|
192 |
-
A.CLAHE(),
|
193 |
-
A.RandomBrightnessContrast(brightness_limit=0.2, contrast_limit=0.2),
|
194 |
-
A.HueSaturationValue(hue_shift_limit=5, sat_shift_limit=30, val_shift_limit=5),
|
195 |
-
A.ToFloat()
|
196 |
-
])
|
197 |
-
elif transform_variant == 'no_augs':
|
198 |
-
transform = A.Compose([
|
199 |
-
A.ToFloat()
|
200 |
-
])
|
201 |
-
else:
|
202 |
-
raise ValueError(f'Unexpected transform_variant {transform_variant}')
|
203 |
-
return transform
|
204 |
-
|
205 |
-
|
206 |
-
def make_default_train_dataloader(indir, kind='default', out_size=512, mask_gen_kwargs=None, transform_variant='default',
|
207 |
-
mask_generator_kind="mixed", dataloader_kwargs=None, ddp_kwargs=None, **kwargs):
|
208 |
-
LOGGER.info(f'Make train dataloader {kind} from {indir}. Using mask generator={mask_generator_kind}')
|
209 |
-
|
210 |
-
mask_generator = get_mask_generator(kind=mask_generator_kind, kwargs=mask_gen_kwargs)
|
211 |
-
transform = get_transforms(transform_variant, out_size)
|
212 |
-
|
213 |
-
if kind == 'default':
|
214 |
-
dataset = InpaintingTrainDataset(indir=indir,
|
215 |
-
mask_generator=mask_generator,
|
216 |
-
transform=transform,
|
217 |
-
**kwargs)
|
218 |
-
elif kind == 'default_web':
|
219 |
-
dataset = InpaintingTrainWebDataset(indir=indir,
|
220 |
-
mask_generator=mask_generator,
|
221 |
-
transform=transform,
|
222 |
-
**kwargs)
|
223 |
-
elif kind == 'img_with_segm':
|
224 |
-
dataset = ImgSegmentationDataset(indir=indir,
|
225 |
-
mask_generator=mask_generator,
|
226 |
-
transform=transform,
|
227 |
-
out_size=out_size,
|
228 |
-
**kwargs)
|
229 |
-
else:
|
230 |
-
raise ValueError(f'Unknown train dataset kind {kind}')
|
231 |
-
|
232 |
-
if dataloader_kwargs is None:
|
233 |
-
dataloader_kwargs = {}
|
234 |
-
|
235 |
-
is_dataset_only_iterable = kind in ('default_web',)
|
236 |
-
|
237 |
-
if ddp_kwargs is not None and not is_dataset_only_iterable:
|
238 |
-
dataloader_kwargs['shuffle'] = False
|
239 |
-
dataloader_kwargs['sampler'] = DistributedSampler(dataset, **ddp_kwargs)
|
240 |
-
|
241 |
-
if is_dataset_only_iterable and 'shuffle' in dataloader_kwargs:
|
242 |
-
with open_dict(dataloader_kwargs):
|
243 |
-
del dataloader_kwargs['shuffle']
|
244 |
-
|
245 |
-
dataloader = DataLoader(dataset, **dataloader_kwargs)
|
246 |
-
return dataloader
|
247 |
-
|
248 |
-
|
249 |
-
def make_default_val_dataset(indir, kind='default', out_size=512, transform_variant='default', **kwargs):
|
250 |
-
if OmegaConf.is_list(indir) or isinstance(indir, (tuple, list)):
|
251 |
-
return ConcatDataset([
|
252 |
-
make_default_val_dataset(idir, kind=kind, out_size=out_size, transform_variant=transform_variant, **kwargs) for idir in indir
|
253 |
-
])
|
254 |
-
|
255 |
-
LOGGER.info(f'Make val dataloader {kind} from {indir}')
|
256 |
-
mask_generator = get_mask_generator(kind=kwargs.get("mask_generator_kind"), kwargs=kwargs.get("mask_gen_kwargs"))
|
257 |
-
|
258 |
-
if transform_variant is not None:
|
259 |
-
transform = get_transforms(transform_variant, out_size)
|
260 |
-
|
261 |
-
if kind == 'default':
|
262 |
-
dataset = InpaintingEvaluationDataset(indir, **kwargs)
|
263 |
-
elif kind == 'our_eval':
|
264 |
-
dataset = OurInpaintingEvaluationDataset(indir, **kwargs)
|
265 |
-
elif kind == 'img_with_segm':
|
266 |
-
dataset = ImgSegmentationDataset(indir=indir,
|
267 |
-
mask_generator=mask_generator,
|
268 |
-
transform=transform,
|
269 |
-
out_size=out_size,
|
270 |
-
**kwargs)
|
271 |
-
elif kind == 'online':
|
272 |
-
dataset = InpaintingEvalOnlineDataset(indir=indir,
|
273 |
-
mask_generator=mask_generator,
|
274 |
-
transform=transform,
|
275 |
-
out_size=out_size,
|
276 |
-
**kwargs)
|
277 |
-
else:
|
278 |
-
raise ValueError(f'Unknown val dataset kind {kind}')
|
279 |
-
|
280 |
-
return dataset
|
281 |
-
|
282 |
-
|
283 |
-
def make_default_val_dataloader(*args, dataloader_kwargs=None, **kwargs):
|
284 |
-
dataset = make_default_val_dataset(*args, **kwargs)
|
285 |
-
|
286 |
-
if dataloader_kwargs is None:
|
287 |
-
dataloader_kwargs = {}
|
288 |
-
dataloader = DataLoader(dataset, **dataloader_kwargs)
|
289 |
-
return dataloader
|
290 |
-
|
291 |
-
|
292 |
-
def make_constant_area_crop_params(img_height, img_width, min_size=128, max_size=512, area=256*256, round_to_mod=16):
|
293 |
-
min_size = min(img_height, img_width, min_size)
|
294 |
-
max_size = min(img_height, img_width, max_size)
|
295 |
-
if random.random() < 0.5:
|
296 |
-
out_height = min(max_size, ceil_modulo(random.randint(min_size, max_size), round_to_mod))
|
297 |
-
out_width = min(max_size, ceil_modulo(area // out_height, round_to_mod))
|
298 |
-
else:
|
299 |
-
out_width = min(max_size, ceil_modulo(random.randint(min_size, max_size), round_to_mod))
|
300 |
-
out_height = min(max_size, ceil_modulo(area // out_width, round_to_mod))
|
301 |
-
|
302 |
-
start_y = random.randint(0, img_height - out_height)
|
303 |
-
start_x = random.randint(0, img_width - out_width)
|
304 |
-
return (start_y, start_x, out_height, out_width)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Ameaou/academic-chatgpt3.1/crazy_functions/crazy_functions_test.py
DELETED
@@ -1,92 +0,0 @@
|
|
1 |
-
"""
|
2 |
-
这是什么?
|
3 |
-
这个文件用于函数插件的单元测试
|
4 |
-
运行方法 python crazy_functions/crazy_functions_test.py
|
5 |
-
"""
|
6 |
-
|
7 |
-
def validate_path():
|
8 |
-
import os, sys
|
9 |
-
dir_name = os.path.dirname(__file__)
|
10 |
-
root_dir_assume = os.path.abspath(os.path.dirname(__file__) + '/..')
|
11 |
-
os.chdir(root_dir_assume)
|
12 |
-
sys.path.append(root_dir_assume)
|
13 |
-
|
14 |
-
validate_path() # validate path so you can run from base directory
|
15 |
-
|
16 |
-
from toolbox import get_conf, ChatBotWithCookies
|
17 |
-
proxies, WEB_PORT, LLM_MODEL, CONCURRENT_COUNT, AUTHENTICATION, CHATBOT_HEIGHT, LAYOUT, API_KEY = \
|
18 |
-
get_conf('proxies', 'WEB_PORT', 'LLM_MODEL', 'CONCURRENT_COUNT', 'AUTHENTICATION', 'CHATBOT_HEIGHT', 'LAYOUT', 'API_KEY')
|
19 |
-
|
20 |
-
llm_kwargs = {
|
21 |
-
'api_key': API_KEY,
|
22 |
-
'llm_model': LLM_MODEL,
|
23 |
-
'top_p':1.0,
|
24 |
-
'max_length': None,
|
25 |
-
'temperature':1.0,
|
26 |
-
}
|
27 |
-
plugin_kwargs = { }
|
28 |
-
chatbot = ChatBotWithCookies(llm_kwargs)
|
29 |
-
history = []
|
30 |
-
system_prompt = "Serve me as a writing and programming assistant."
|
31 |
-
web_port = 1024
|
32 |
-
|
33 |
-
|
34 |
-
def test_解析一个Python项目():
|
35 |
-
from crazy_functions.解析项目源代码 import 解析一个Python项目
|
36 |
-
txt = "crazy_functions/test_project/python/dqn"
|
37 |
-
for cookies, cb, hist, msg in 解析一个Python项目(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port):
|
38 |
-
print(cb)
|
39 |
-
|
40 |
-
def test_解析一个Cpp项目():
|
41 |
-
from crazy_functions.解析项目源代码 import 解析一个C项目
|
42 |
-
txt = "crazy_functions/test_project/cpp/cppipc"
|
43 |
-
for cookies, cb, hist, msg in 解析一个C项目(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port):
|
44 |
-
print(cb)
|
45 |
-
|
46 |
-
def test_Latex英文润色():
|
47 |
-
from crazy_functions.Latex全文润色 import Latex英文润色
|
48 |
-
txt = "crazy_functions/test_project/latex/attention"
|
49 |
-
for cookies, cb, hist, msg in Latex英文润色(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port):
|
50 |
-
print(cb)
|
51 |
-
|
52 |
-
def test_Markdown中译英():
|
53 |
-
from crazy_functions.批量Markdown翻译 import Markdown中译英
|
54 |
-
txt = "README.md"
|
55 |
-
for cookies, cb, hist, msg in Markdown中译英(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port):
|
56 |
-
print(cb)
|
57 |
-
|
58 |
-
def test_批量翻译PDF文档():
|
59 |
-
from crazy_functions.批量翻译PDF文档_多线程 import 批量翻译PDF文档
|
60 |
-
txt = "crazy_functions/test_project/pdf_and_word"
|
61 |
-
for cookies, cb, hist, msg in 批量翻译PDF文档(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port):
|
62 |
-
print(cb)
|
63 |
-
|
64 |
-
def test_谷歌检索小助手():
|
65 |
-
from crazy_functions.谷歌检索小助手 import 谷歌检索小助手
|
66 |
-
txt = "https://scholar.google.com/scholar?hl=en&as_sdt=0%2C5&q=auto+reinforcement+learning&btnG="
|
67 |
-
for cookies, cb, hist, msg in 谷歌检索小助手(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port):
|
68 |
-
print(cb)
|
69 |
-
|
70 |
-
def test_总结word文档():
|
71 |
-
from crazy_functions.总结word文档 import 总结word文档
|
72 |
-
txt = "crazy_functions/test_project/pdf_and_word"
|
73 |
-
for cookies, cb, hist, msg in 总结word文档(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port):
|
74 |
-
print(cb)
|
75 |
-
|
76 |
-
def test_下载arxiv论文并翻译摘要():
|
77 |
-
from crazy_functions.下载arxiv论文翻译摘要 import 下载arxiv论文并翻译摘要
|
78 |
-
txt = "1812.10695"
|
79 |
-
for cookies, cb, hist, msg in 下载arxiv论文并翻译摘要(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port):
|
80 |
-
print(cb)
|
81 |
-
|
82 |
-
test_解析一个Python项目()
|
83 |
-
test_Latex英文润色()
|
84 |
-
test_Markdown中译英()
|
85 |
-
test_批量翻译PDF文档()
|
86 |
-
test_谷歌检索小助手()
|
87 |
-
test_总结word文档()
|
88 |
-
test_下载arxiv论文并翻译摘要()
|
89 |
-
test_解析一个Cpp项目()
|
90 |
-
|
91 |
-
input("程序完成,回车退出。")
|
92 |
-
print("退出。")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Amrrs/DragGan-Inversion/PTI/models/e4e/latent_codes_pool.py
DELETED
@@ -1,55 +0,0 @@
|
|
1 |
-
import random
|
2 |
-
import torch
|
3 |
-
|
4 |
-
|
5 |
-
class LatentCodesPool:
|
6 |
-
"""This class implements latent codes buffer that stores previously generated w latent codes.
|
7 |
-
This buffer enables us to update discriminators using a history of generated w's
|
8 |
-
rather than the ones produced by the latest encoder.
|
9 |
-
"""
|
10 |
-
|
11 |
-
def __init__(self, pool_size):
|
12 |
-
"""Initialize the ImagePool class
|
13 |
-
Parameters:
|
14 |
-
pool_size (int) -- the size of image buffer, if pool_size=0, no buffer will be created
|
15 |
-
"""
|
16 |
-
self.pool_size = pool_size
|
17 |
-
if self.pool_size > 0: # create an empty pool
|
18 |
-
self.num_ws = 0
|
19 |
-
self.ws = []
|
20 |
-
|
21 |
-
def query(self, ws):
|
22 |
-
"""Return w's from the pool.
|
23 |
-
Parameters:
|
24 |
-
ws: the latest generated w's from the generator
|
25 |
-
Returns w's from the buffer.
|
26 |
-
By 50/100, the buffer will return input w's.
|
27 |
-
By 50/100, the buffer will return w's previously stored in the buffer,
|
28 |
-
and insert the current w's to the buffer.
|
29 |
-
"""
|
30 |
-
if self.pool_size == 0: # if the buffer size is 0, do nothing
|
31 |
-
return ws
|
32 |
-
return_ws = []
|
33 |
-
for w in ws: # ws.shape: (batch, 512) or (batch, n_latent, 512)
|
34 |
-
# w = torch.unsqueeze(image.data, 0)
|
35 |
-
if w.ndim == 2:
|
36 |
-
i = random.randint(0, len(w) - 1) # apply a random latent index as a candidate
|
37 |
-
w = w[i]
|
38 |
-
self.handle_w(w, return_ws)
|
39 |
-
return_ws = torch.stack(return_ws, 0) # collect all the images and return
|
40 |
-
return return_ws
|
41 |
-
|
42 |
-
def handle_w(self, w, return_ws):
|
43 |
-
if self.num_ws < self.pool_size: # if the buffer is not full; keep inserting current codes to the buffer
|
44 |
-
self.num_ws = self.num_ws + 1
|
45 |
-
self.ws.append(w)
|
46 |
-
return_ws.append(w)
|
47 |
-
else:
|
48 |
-
p = random.uniform(0, 1)
|
49 |
-
if p > 0.5: # by 50% chance, the buffer will return a previously stored latent code, and insert the current code into the buffer
|
50 |
-
random_id = random.randint(0, self.pool_size - 1) # randint is inclusive
|
51 |
-
tmp = self.ws[random_id].clone()
|
52 |
-
self.ws[random_id] = w
|
53 |
-
return_ws.append(tmp)
|
54 |
-
else: # by another 50% chance, the buffer will return the current image
|
55 |
-
return_ws.append(w)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Amrrs/DragGan-Inversion/stylegan_human/torch_utils/ops/bias_act.h
DELETED
@@ -1,40 +0,0 @@
|
|
1 |
-
// Copyright (c) SenseTime Research. All rights reserved.
|
2 |
-
|
3 |
-
// Copyright (c) 2021, NVIDIA CORPORATION. All rights reserved.
|
4 |
-
//
|
5 |
-
// NVIDIA CORPORATION and its licensors retain all intellectual property
|
6 |
-
// and proprietary rights in and to this software, related documentation
|
7 |
-
// and any modifications thereto. Any use, reproduction, disclosure or
|
8 |
-
// distribution of this software and related documentation without an express
|
9 |
-
// license agreement from NVIDIA CORPORATION is strictly prohibited.
|
10 |
-
|
11 |
-
//------------------------------------------------------------------------
|
12 |
-
// CUDA kernel parameters.
|
13 |
-
|
14 |
-
struct bias_act_kernel_params
|
15 |
-
{
|
16 |
-
const void* x; // [sizeX]
|
17 |
-
const void* b; // [sizeB] or NULL
|
18 |
-
const void* xref; // [sizeX] or NULL
|
19 |
-
const void* yref; // [sizeX] or NULL
|
20 |
-
const void* dy; // [sizeX] or NULL
|
21 |
-
void* y; // [sizeX]
|
22 |
-
|
23 |
-
int grad;
|
24 |
-
int act;
|
25 |
-
float alpha;
|
26 |
-
float gain;
|
27 |
-
float clamp;
|
28 |
-
|
29 |
-
int sizeX;
|
30 |
-
int sizeB;
|
31 |
-
int stepB;
|
32 |
-
int loopX;
|
33 |
-
};
|
34 |
-
|
35 |
-
//------------------------------------------------------------------------
|
36 |
-
// CUDA kernel selection.
|
37 |
-
|
38 |
-
template <class T> void* choose_bias_act_kernel(const bias_act_kernel_params& p);
|
39 |
-
|
40 |
-
//------------------------------------------------------------------------
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Androidonnxfork/CivitAi-to-Diffusers/diffusers/src/diffusers/pipelines/deepfloyd_if/pipeline_if_superresolution.py
DELETED
@@ -1,914 +0,0 @@
|
|
1 |
-
import html
|
2 |
-
import inspect
|
3 |
-
import re
|
4 |
-
import urllib.parse as ul
|
5 |
-
from typing import Any, Callable, Dict, List, Optional, Union
|
6 |
-
|
7 |
-
import numpy as np
|
8 |
-
import PIL
|
9 |
-
import torch
|
10 |
-
import torch.nn.functional as F
|
11 |
-
from transformers import CLIPImageProcessor, T5EncoderModel, T5Tokenizer
|
12 |
-
|
13 |
-
from ...loaders import LoraLoaderMixin
|
14 |
-
from ...models import UNet2DConditionModel
|
15 |
-
from ...schedulers import DDPMScheduler
|
16 |
-
from ...utils import (
|
17 |
-
BACKENDS_MAPPING,
|
18 |
-
is_accelerate_available,
|
19 |
-
is_accelerate_version,
|
20 |
-
is_bs4_available,
|
21 |
-
is_ftfy_available,
|
22 |
-
logging,
|
23 |
-
randn_tensor,
|
24 |
-
replace_example_docstring,
|
25 |
-
)
|
26 |
-
from ..pipeline_utils import DiffusionPipeline
|
27 |
-
from . import IFPipelineOutput
|
28 |
-
from .safety_checker import IFSafetyChecker
|
29 |
-
from .watermark import IFWatermarker
|
30 |
-
|
31 |
-
|
32 |
-
if is_bs4_available():
|
33 |
-
from bs4 import BeautifulSoup
|
34 |
-
|
35 |
-
if is_ftfy_available():
|
36 |
-
import ftfy
|
37 |
-
|
38 |
-
|
39 |
-
logger = logging.get_logger(__name__) # pylint: disable=invalid-name
|
40 |
-
|
41 |
-
|
42 |
-
EXAMPLE_DOC_STRING = """
|
43 |
-
Examples:
|
44 |
-
```py
|
45 |
-
>>> from diffusers import IFPipeline, IFSuperResolutionPipeline, DiffusionPipeline
|
46 |
-
>>> from diffusers.utils import pt_to_pil
|
47 |
-
>>> import torch
|
48 |
-
|
49 |
-
>>> pipe = IFPipeline.from_pretrained("DeepFloyd/IF-I-XL-v1.0", variant="fp16", torch_dtype=torch.float16)
|
50 |
-
>>> pipe.enable_model_cpu_offload()
|
51 |
-
|
52 |
-
>>> prompt = 'a photo of a kangaroo wearing an orange hoodie and blue sunglasses standing in front of the eiffel tower holding a sign that says "very deep learning"'
|
53 |
-
>>> prompt_embeds, negative_embeds = pipe.encode_prompt(prompt)
|
54 |
-
|
55 |
-
>>> image = pipe(prompt_embeds=prompt_embeds, negative_prompt_embeds=negative_embeds, output_type="pt").images
|
56 |
-
|
57 |
-
>>> # save intermediate image
|
58 |
-
>>> pil_image = pt_to_pil(image)
|
59 |
-
>>> pil_image[0].save("./if_stage_I.png")
|
60 |
-
|
61 |
-
>>> super_res_1_pipe = IFSuperResolutionPipeline.from_pretrained(
|
62 |
-
... "DeepFloyd/IF-II-L-v1.0", text_encoder=None, variant="fp16", torch_dtype=torch.float16
|
63 |
-
... )
|
64 |
-
>>> super_res_1_pipe.enable_model_cpu_offload()
|
65 |
-
|
66 |
-
>>> image = super_res_1_pipe(
|
67 |
-
... image=image, prompt_embeds=prompt_embeds, negative_prompt_embeds=negative_embeds
|
68 |
-
... ).images
|
69 |
-
>>> image[0].save("./if_stage_II.png")
|
70 |
-
```
|
71 |
-
"""
|
72 |
-
|
73 |
-
|
74 |
-
class IFSuperResolutionPipeline(DiffusionPipeline, LoraLoaderMixin):
|
75 |
-
tokenizer: T5Tokenizer
|
76 |
-
text_encoder: T5EncoderModel
|
77 |
-
|
78 |
-
unet: UNet2DConditionModel
|
79 |
-
scheduler: DDPMScheduler
|
80 |
-
image_noising_scheduler: DDPMScheduler
|
81 |
-
|
82 |
-
feature_extractor: Optional[CLIPImageProcessor]
|
83 |
-
safety_checker: Optional[IFSafetyChecker]
|
84 |
-
|
85 |
-
watermarker: Optional[IFWatermarker]
|
86 |
-
|
87 |
-
bad_punct_regex = re.compile(
|
88 |
-
r"[" + "#®•©™&@·º½¾¿¡§~" + "\)" + "\(" + "\]" + "\[" + "\}" + "\{" + "\|" + "\\" + "\/" + "\*" + r"]{1,}"
|
89 |
-
) # noqa
|
90 |
-
|
91 |
-
_optional_components = ["tokenizer", "text_encoder", "safety_checker", "feature_extractor", "watermarker"]
|
92 |
-
|
93 |
-
def __init__(
|
94 |
-
self,
|
95 |
-
tokenizer: T5Tokenizer,
|
96 |
-
text_encoder: T5EncoderModel,
|
97 |
-
unet: UNet2DConditionModel,
|
98 |
-
scheduler: DDPMScheduler,
|
99 |
-
image_noising_scheduler: DDPMScheduler,
|
100 |
-
safety_checker: Optional[IFSafetyChecker],
|
101 |
-
feature_extractor: Optional[CLIPImageProcessor],
|
102 |
-
watermarker: Optional[IFWatermarker],
|
103 |
-
requires_safety_checker: bool = True,
|
104 |
-
):
|
105 |
-
super().__init__()
|
106 |
-
|
107 |
-
if safety_checker is None and requires_safety_checker:
|
108 |
-
logger.warning(
|
109 |
-
f"You have disabled the safety checker for {self.__class__} by passing `safety_checker=None`. Ensure"
|
110 |
-
" that you abide to the conditions of the IF license and do not expose unfiltered"
|
111 |
-
" results in services or applications open to the public. Both the diffusers team and Hugging Face"
|
112 |
-
" strongly recommend to keep the safety filter enabled in all public facing circumstances, disabling"
|
113 |
-
" it only for use-cases that involve analyzing network behavior or auditing its results. For more"
|
114 |
-
" information, please have a look at https://github.com/huggingface/diffusers/pull/254 ."
|
115 |
-
)
|
116 |
-
|
117 |
-
if safety_checker is not None and feature_extractor is None:
|
118 |
-
raise ValueError(
|
119 |
-
"Make sure to define a feature extractor when loading {self.__class__} if you want to use the safety"
|
120 |
-
" checker. If you do not want to use the safety checker, you can pass `'safety_checker=None'` instead."
|
121 |
-
)
|
122 |
-
|
123 |
-
if unet.config.in_channels != 6:
|
124 |
-
logger.warn(
|
125 |
-
"It seems like you have loaded a checkpoint that shall not be used for super resolution from {unet.config._name_or_path} as it accepts {unet.config.in_channels} input channels instead of 6. Please make sure to pass a super resolution checkpoint as the `'unet'`: IFSuperResolutionPipeline.from_pretrained(unet=super_resolution_unet, ...)`."
|
126 |
-
)
|
127 |
-
|
128 |
-
self.register_modules(
|
129 |
-
tokenizer=tokenizer,
|
130 |
-
text_encoder=text_encoder,
|
131 |
-
unet=unet,
|
132 |
-
scheduler=scheduler,
|
133 |
-
image_noising_scheduler=image_noising_scheduler,
|
134 |
-
safety_checker=safety_checker,
|
135 |
-
feature_extractor=feature_extractor,
|
136 |
-
watermarker=watermarker,
|
137 |
-
)
|
138 |
-
self.register_to_config(requires_safety_checker=requires_safety_checker)
|
139 |
-
|
140 |
-
# Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline.enable_model_cpu_offload
|
141 |
-
def enable_model_cpu_offload(self, gpu_id=0):
|
142 |
-
r"""
|
143 |
-
Offloads all models to CPU using accelerate, reducing memory usage with a low impact on performance. Compared
|
144 |
-
to `enable_sequential_cpu_offload`, this method moves one whole model at a time to the GPU when its `forward`
|
145 |
-
method is called, and the model remains in GPU until the next model runs. Memory savings are lower than with
|
146 |
-
`enable_sequential_cpu_offload`, but performance is much better due to the iterative execution of the `unet`.
|
147 |
-
"""
|
148 |
-
if is_accelerate_available() and is_accelerate_version(">=", "0.17.0.dev0"):
|
149 |
-
from accelerate import cpu_offload_with_hook
|
150 |
-
else:
|
151 |
-
raise ImportError("`enable_model_cpu_offload` requires `accelerate v0.17.0` or higher.")
|
152 |
-
|
153 |
-
device = torch.device(f"cuda:{gpu_id}")
|
154 |
-
|
155 |
-
if self.device.type != "cpu":
|
156 |
-
self.to("cpu", silence_dtype_warnings=True)
|
157 |
-
torch.cuda.empty_cache() # otherwise we don't see the memory savings (but they probably exist)
|
158 |
-
|
159 |
-
hook = None
|
160 |
-
|
161 |
-
if self.text_encoder is not None:
|
162 |
-
_, hook = cpu_offload_with_hook(self.text_encoder, device, prev_module_hook=hook)
|
163 |
-
|
164 |
-
# Accelerate will move the next model to the device _before_ calling the offload hook of the
|
165 |
-
# previous model. This will cause both models to be present on the device at the same time.
|
166 |
-
# IF uses T5 for its text encoder which is really large. We can manually call the offload
|
167 |
-
# hook for the text encoder to ensure it's moved to the cpu before the unet is moved to
|
168 |
-
# the GPU.
|
169 |
-
self.text_encoder_offload_hook = hook
|
170 |
-
|
171 |
-
_, hook = cpu_offload_with_hook(self.unet, device, prev_module_hook=hook)
|
172 |
-
|
173 |
-
# if the safety checker isn't called, `unet_offload_hook` will have to be called to manually offload the unet
|
174 |
-
self.unet_offload_hook = hook
|
175 |
-
|
176 |
-
if self.safety_checker is not None:
|
177 |
-
_, hook = cpu_offload_with_hook(self.safety_checker, device, prev_module_hook=hook)
|
178 |
-
|
179 |
-
# We'll offload the last model manually.
|
180 |
-
self.final_offload_hook = hook
|
181 |
-
|
182 |
-
# Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline.remove_all_hooks
|
183 |
-
def remove_all_hooks(self):
|
184 |
-
if is_accelerate_available():
|
185 |
-
from accelerate.hooks import remove_hook_from_module
|
186 |
-
else:
|
187 |
-
raise ImportError("Please install accelerate via `pip install accelerate`")
|
188 |
-
|
189 |
-
for model in [self.text_encoder, self.unet, self.safety_checker]:
|
190 |
-
if model is not None:
|
191 |
-
remove_hook_from_module(model, recurse=True)
|
192 |
-
|
193 |
-
self.unet_offload_hook = None
|
194 |
-
self.text_encoder_offload_hook = None
|
195 |
-
self.final_offload_hook = None
|
196 |
-
|
197 |
-
# Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline._text_preprocessing
|
198 |
-
def _text_preprocessing(self, text, clean_caption=False):
|
199 |
-
if clean_caption and not is_bs4_available():
|
200 |
-
logger.warn(BACKENDS_MAPPING["bs4"][-1].format("Setting `clean_caption=True`"))
|
201 |
-
logger.warn("Setting `clean_caption` to False...")
|
202 |
-
clean_caption = False
|
203 |
-
|
204 |
-
if clean_caption and not is_ftfy_available():
|
205 |
-
logger.warn(BACKENDS_MAPPING["ftfy"][-1].format("Setting `clean_caption=True`"))
|
206 |
-
logger.warn("Setting `clean_caption` to False...")
|
207 |
-
clean_caption = False
|
208 |
-
|
209 |
-
if not isinstance(text, (tuple, list)):
|
210 |
-
text = [text]
|
211 |
-
|
212 |
-
def process(text: str):
|
213 |
-
if clean_caption:
|
214 |
-
text = self._clean_caption(text)
|
215 |
-
text = self._clean_caption(text)
|
216 |
-
else:
|
217 |
-
text = text.lower().strip()
|
218 |
-
return text
|
219 |
-
|
220 |
-
return [process(t) for t in text]
|
221 |
-
|
222 |
-
# Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline._clean_caption
|
223 |
-
def _clean_caption(self, caption):
|
224 |
-
caption = str(caption)
|
225 |
-
caption = ul.unquote_plus(caption)
|
226 |
-
caption = caption.strip().lower()
|
227 |
-
caption = re.sub("<person>", "person", caption)
|
228 |
-
# urls:
|
229 |
-
caption = re.sub(
|
230 |
-
r"\b((?:https?:(?:\/{1,3}|[a-zA-Z0-9%])|[a-zA-Z0-9.\-]+[.](?:com|co|ru|net|org|edu|gov|it)[\w/-]*\b\/?(?!@)))", # noqa
|
231 |
-
"",
|
232 |
-
caption,
|
233 |
-
) # regex for urls
|
234 |
-
caption = re.sub(
|
235 |
-
r"\b((?:www:(?:\/{1,3}|[a-zA-Z0-9%])|[a-zA-Z0-9.\-]+[.](?:com|co|ru|net|org|edu|gov|it)[\w/-]*\b\/?(?!@)))", # noqa
|
236 |
-
"",
|
237 |
-
caption,
|
238 |
-
) # regex for urls
|
239 |
-
# html:
|
240 |
-
caption = BeautifulSoup(caption, features="html.parser").text
|
241 |
-
|
242 |
-
# @<nickname>
|
243 |
-
caption = re.sub(r"@[\w\d]+\b", "", caption)
|
244 |
-
|
245 |
-
# 31C0—31EF CJK Strokes
|
246 |
-
# 31F0—31FF Katakana Phonetic Extensions
|
247 |
-
# 3200—32FF Enclosed CJK Letters and Months
|
248 |
-
# 3300—33FF CJK Compatibility
|
249 |
-
# 3400—4DBF CJK Unified Ideographs Extension A
|
250 |
-
# 4DC0—4DFF Yijing Hexagram Symbols
|
251 |
-
# 4E00—9FFF CJK Unified Ideographs
|
252 |
-
caption = re.sub(r"[\u31c0-\u31ef]+", "", caption)
|
253 |
-
caption = re.sub(r"[\u31f0-\u31ff]+", "", caption)
|
254 |
-
caption = re.sub(r"[\u3200-\u32ff]+", "", caption)
|
255 |
-
caption = re.sub(r"[\u3300-\u33ff]+", "", caption)
|
256 |
-
caption = re.sub(r"[\u3400-\u4dbf]+", "", caption)
|
257 |
-
caption = re.sub(r"[\u4dc0-\u4dff]+", "", caption)
|
258 |
-
caption = re.sub(r"[\u4e00-\u9fff]+", "", caption)
|
259 |
-
#######################################################
|
260 |
-
|
261 |
-
# все виды тире / all types of dash --> "-"
|
262 |
-
caption = re.sub(
|
263 |
-
r"[\u002D\u058A\u05BE\u1400\u1806\u2010-\u2015\u2E17\u2E1A\u2E3A\u2E3B\u2E40\u301C\u3030\u30A0\uFE31\uFE32\uFE58\uFE63\uFF0D]+", # noqa
|
264 |
-
"-",
|
265 |
-
caption,
|
266 |
-
)
|
267 |
-
|
268 |
-
# кавычки к одному стандарту
|
269 |
-
caption = re.sub(r"[`´«»“”¨]", '"', caption)
|
270 |
-
caption = re.sub(r"[‘’]", "'", caption)
|
271 |
-
|
272 |
-
# "
|
273 |
-
caption = re.sub(r""?", "", caption)
|
274 |
-
# &
|
275 |
-
caption = re.sub(r"&", "", caption)
|
276 |
-
|
277 |
-
# ip adresses:
|
278 |
-
caption = re.sub(r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}", " ", caption)
|
279 |
-
|
280 |
-
# article ids:
|
281 |
-
caption = re.sub(r"\d:\d\d\s+$", "", caption)
|
282 |
-
|
283 |
-
# \n
|
284 |
-
caption = re.sub(r"\\n", " ", caption)
|
285 |
-
|
286 |
-
# "#123"
|
287 |
-
caption = re.sub(r"#\d{1,3}\b", "", caption)
|
288 |
-
# "#12345.."
|
289 |
-
caption = re.sub(r"#\d{5,}\b", "", caption)
|
290 |
-
# "123456.."
|
291 |
-
caption = re.sub(r"\b\d{6,}\b", "", caption)
|
292 |
-
# filenames:
|
293 |
-
caption = re.sub(r"[\S]+\.(?:png|jpg|jpeg|bmp|webp|eps|pdf|apk|mp4)", "", caption)
|
294 |
-
|
295 |
-
#
|
296 |
-
caption = re.sub(r"[\"\']{2,}", r'"', caption) # """AUSVERKAUFT"""
|
297 |
-
caption = re.sub(r"[\.]{2,}", r" ", caption) # """AUSVERKAUFT"""
|
298 |
-
|
299 |
-
caption = re.sub(self.bad_punct_regex, r" ", caption) # ***AUSVERKAUFT***, #AUSVERKAUFT
|
300 |
-
caption = re.sub(r"\s+\.\s+", r" ", caption) # " . "
|
301 |
-
|
302 |
-
# this-is-my-cute-cat / this_is_my_cute_cat
|
303 |
-
regex2 = re.compile(r"(?:\-|\_)")
|
304 |
-
if len(re.findall(regex2, caption)) > 3:
|
305 |
-
caption = re.sub(regex2, " ", caption)
|
306 |
-
|
307 |
-
caption = ftfy.fix_text(caption)
|
308 |
-
caption = html.unescape(html.unescape(caption))
|
309 |
-
|
310 |
-
caption = re.sub(r"\b[a-zA-Z]{1,3}\d{3,15}\b", "", caption) # jc6640
|
311 |
-
caption = re.sub(r"\b[a-zA-Z]+\d+[a-zA-Z]+\b", "", caption) # jc6640vc
|
312 |
-
caption = re.sub(r"\b\d+[a-zA-Z]+\d+\b", "", caption) # 6640vc231
|
313 |
-
|
314 |
-
caption = re.sub(r"(worldwide\s+)?(free\s+)?shipping", "", caption)
|
315 |
-
caption = re.sub(r"(free\s)?download(\sfree)?", "", caption)
|
316 |
-
caption = re.sub(r"\bclick\b\s(?:for|on)\s\w+", "", caption)
|
317 |
-
caption = re.sub(r"\b(?:png|jpg|jpeg|bmp|webp|eps|pdf|apk|mp4)(\simage[s]?)?", "", caption)
|
318 |
-
caption = re.sub(r"\bpage\s+\d+\b", "", caption)
|
319 |
-
|
320 |
-
caption = re.sub(r"\b\d*[a-zA-Z]+\d+[a-zA-Z]+\d+[a-zA-Z\d]*\b", r" ", caption) # j2d1a2a...
|
321 |
-
|
322 |
-
caption = re.sub(r"\b\d+\.?\d*[xх×]\d+\.?\d*\b", "", caption)
|
323 |
-
|
324 |
-
caption = re.sub(r"\b\s+\:\s+", r": ", caption)
|
325 |
-
caption = re.sub(r"(\D[,\./])\b", r"\1 ", caption)
|
326 |
-
caption = re.sub(r"\s+", " ", caption)
|
327 |
-
|
328 |
-
caption.strip()
|
329 |
-
|
330 |
-
caption = re.sub(r"^[\"\']([\w\W]+)[\"\']$", r"\1", caption)
|
331 |
-
caption = re.sub(r"^[\'\_,\-\:;]", r"", caption)
|
332 |
-
caption = re.sub(r"[\'\_,\-\:\-\+]$", r"", caption)
|
333 |
-
caption = re.sub(r"^\.\S+$", "", caption)
|
334 |
-
|
335 |
-
return caption.strip()
|
336 |
-
|
337 |
-
@torch.no_grad()
|
338 |
-
# Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline.encode_prompt
|
339 |
-
def encode_prompt(
|
340 |
-
self,
|
341 |
-
prompt,
|
342 |
-
do_classifier_free_guidance=True,
|
343 |
-
num_images_per_prompt=1,
|
344 |
-
device=None,
|
345 |
-
negative_prompt=None,
|
346 |
-
prompt_embeds: Optional[torch.FloatTensor] = None,
|
347 |
-
negative_prompt_embeds: Optional[torch.FloatTensor] = None,
|
348 |
-
clean_caption: bool = False,
|
349 |
-
):
|
350 |
-
r"""
|
351 |
-
Encodes the prompt into text encoder hidden states.
|
352 |
-
|
353 |
-
Args:
|
354 |
-
prompt (`str` or `List[str]`, *optional*):
|
355 |
-
prompt to be encoded
|
356 |
-
device: (`torch.device`, *optional*):
|
357 |
-
torch device to place the resulting embeddings on
|
358 |
-
num_images_per_prompt (`int`, *optional*, defaults to 1):
|
359 |
-
number of images that should be generated per prompt
|
360 |
-
do_classifier_free_guidance (`bool`, *optional*, defaults to `True`):
|
361 |
-
whether to use classifier free guidance or not
|
362 |
-
negative_prompt (`str` or `List[str]`, *optional*):
|
363 |
-
The prompt or prompts not to guide the image generation. If not defined, one has to pass
|
364 |
-
`negative_prompt_embeds`. instead. If not defined, one has to pass `negative_prompt_embeds`. instead.
|
365 |
-
Ignored when not using guidance (i.e., ignored if `guidance_scale` is less than `1`).
|
366 |
-
prompt_embeds (`torch.FloatTensor`, *optional*):
|
367 |
-
Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not
|
368 |
-
provided, text embeddings will be generated from `prompt` input argument.
|
369 |
-
negative_prompt_embeds (`torch.FloatTensor`, *optional*):
|
370 |
-
Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt
|
371 |
-
weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input
|
372 |
-
argument.
|
373 |
-
"""
|
374 |
-
if prompt is not None and negative_prompt is not None:
|
375 |
-
if type(prompt) is not type(negative_prompt):
|
376 |
-
raise TypeError(
|
377 |
-
f"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !="
|
378 |
-
f" {type(prompt)}."
|
379 |
-
)
|
380 |
-
|
381 |
-
if device is None:
|
382 |
-
device = self._execution_device
|
383 |
-
|
384 |
-
if prompt is not None and isinstance(prompt, str):
|
385 |
-
batch_size = 1
|
386 |
-
elif prompt is not None and isinstance(prompt, list):
|
387 |
-
batch_size = len(prompt)
|
388 |
-
else:
|
389 |
-
batch_size = prompt_embeds.shape[0]
|
390 |
-
|
391 |
-
# while T5 can handle much longer input sequences than 77, the text encoder was trained with a max length of 77 for IF
|
392 |
-
max_length = 77
|
393 |
-
|
394 |
-
if prompt_embeds is None:
|
395 |
-
prompt = self._text_preprocessing(prompt, clean_caption=clean_caption)
|
396 |
-
text_inputs = self.tokenizer(
|
397 |
-
prompt,
|
398 |
-
padding="max_length",
|
399 |
-
max_length=max_length,
|
400 |
-
truncation=True,
|
401 |
-
add_special_tokens=True,
|
402 |
-
return_tensors="pt",
|
403 |
-
)
|
404 |
-
text_input_ids = text_inputs.input_ids
|
405 |
-
untruncated_ids = self.tokenizer(prompt, padding="longest", return_tensors="pt").input_ids
|
406 |
-
|
407 |
-
if untruncated_ids.shape[-1] >= text_input_ids.shape[-1] and not torch.equal(
|
408 |
-
text_input_ids, untruncated_ids
|
409 |
-
):
|
410 |
-
removed_text = self.tokenizer.batch_decode(untruncated_ids[:, max_length - 1 : -1])
|
411 |
-
logger.warning(
|
412 |
-
"The following part of your input was truncated because CLIP can only handle sequences up to"
|
413 |
-
f" {max_length} tokens: {removed_text}"
|
414 |
-
)
|
415 |
-
|
416 |
-
attention_mask = text_inputs.attention_mask.to(device)
|
417 |
-
|
418 |
-
prompt_embeds = self.text_encoder(
|
419 |
-
text_input_ids.to(device),
|
420 |
-
attention_mask=attention_mask,
|
421 |
-
)
|
422 |
-
prompt_embeds = prompt_embeds[0]
|
423 |
-
|
424 |
-
if self.text_encoder is not None:
|
425 |
-
dtype = self.text_encoder.dtype
|
426 |
-
elif self.unet is not None:
|
427 |
-
dtype = self.unet.dtype
|
428 |
-
else:
|
429 |
-
dtype = None
|
430 |
-
|
431 |
-
prompt_embeds = prompt_embeds.to(dtype=dtype, device=device)
|
432 |
-
|
433 |
-
bs_embed, seq_len, _ = prompt_embeds.shape
|
434 |
-
# duplicate text embeddings for each generation per prompt, using mps friendly method
|
435 |
-
prompt_embeds = prompt_embeds.repeat(1, num_images_per_prompt, 1)
|
436 |
-
prompt_embeds = prompt_embeds.view(bs_embed * num_images_per_prompt, seq_len, -1)
|
437 |
-
|
438 |
-
# get unconditional embeddings for classifier free guidance
|
439 |
-
if do_classifier_free_guidance and negative_prompt_embeds is None:
|
440 |
-
uncond_tokens: List[str]
|
441 |
-
if negative_prompt is None:
|
442 |
-
uncond_tokens = [""] * batch_size
|
443 |
-
elif isinstance(negative_prompt, str):
|
444 |
-
uncond_tokens = [negative_prompt]
|
445 |
-
elif batch_size != len(negative_prompt):
|
446 |
-
raise ValueError(
|
447 |
-
f"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:"
|
448 |
-
f" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches"
|
449 |
-
" the batch size of `prompt`."
|
450 |
-
)
|
451 |
-
else:
|
452 |
-
uncond_tokens = negative_prompt
|
453 |
-
|
454 |
-
uncond_tokens = self._text_preprocessing(uncond_tokens, clean_caption=clean_caption)
|
455 |
-
max_length = prompt_embeds.shape[1]
|
456 |
-
uncond_input = self.tokenizer(
|
457 |
-
uncond_tokens,
|
458 |
-
padding="max_length",
|
459 |
-
max_length=max_length,
|
460 |
-
truncation=True,
|
461 |
-
return_attention_mask=True,
|
462 |
-
add_special_tokens=True,
|
463 |
-
return_tensors="pt",
|
464 |
-
)
|
465 |
-
attention_mask = uncond_input.attention_mask.to(device)
|
466 |
-
|
467 |
-
negative_prompt_embeds = self.text_encoder(
|
468 |
-
uncond_input.input_ids.to(device),
|
469 |
-
attention_mask=attention_mask,
|
470 |
-
)
|
471 |
-
negative_prompt_embeds = negative_prompt_embeds[0]
|
472 |
-
|
473 |
-
if do_classifier_free_guidance:
|
474 |
-
# duplicate unconditional embeddings for each generation per prompt, using mps friendly method
|
475 |
-
seq_len = negative_prompt_embeds.shape[1]
|
476 |
-
|
477 |
-
negative_prompt_embeds = negative_prompt_embeds.to(dtype=dtype, device=device)
|
478 |
-
|
479 |
-
negative_prompt_embeds = negative_prompt_embeds.repeat(1, num_images_per_prompt, 1)
|
480 |
-
negative_prompt_embeds = negative_prompt_embeds.view(batch_size * num_images_per_prompt, seq_len, -1)
|
481 |
-
|
482 |
-
# For classifier free guidance, we need to do two forward passes.
|
483 |
-
# Here we concatenate the unconditional and text embeddings into a single batch
|
484 |
-
# to avoid doing two forward passes
|
485 |
-
else:
|
486 |
-
negative_prompt_embeds = None
|
487 |
-
|
488 |
-
return prompt_embeds, negative_prompt_embeds
|
489 |
-
|
490 |
-
# Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline.run_safety_checker
|
491 |
-
def run_safety_checker(self, image, device, dtype):
|
492 |
-
if self.safety_checker is not None:
|
493 |
-
safety_checker_input = self.feature_extractor(self.numpy_to_pil(image), return_tensors="pt").to(device)
|
494 |
-
image, nsfw_detected, watermark_detected = self.safety_checker(
|
495 |
-
images=image,
|
496 |
-
clip_input=safety_checker_input.pixel_values.to(dtype=dtype),
|
497 |
-
)
|
498 |
-
else:
|
499 |
-
nsfw_detected = None
|
500 |
-
watermark_detected = None
|
501 |
-
|
502 |
-
if hasattr(self, "unet_offload_hook") and self.unet_offload_hook is not None:
|
503 |
-
self.unet_offload_hook.offload()
|
504 |
-
|
505 |
-
return image, nsfw_detected, watermark_detected
|
506 |
-
|
507 |
-
# Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline.prepare_extra_step_kwargs
|
508 |
-
def prepare_extra_step_kwargs(self, generator, eta):
|
509 |
-
# prepare extra kwargs for the scheduler step, since not all schedulers have the same signature
|
510 |
-
# eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.
|
511 |
-
# eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502
|
512 |
-
# and should be between [0, 1]
|
513 |
-
|
514 |
-
accepts_eta = "eta" in set(inspect.signature(self.scheduler.step).parameters.keys())
|
515 |
-
extra_step_kwargs = {}
|
516 |
-
if accepts_eta:
|
517 |
-
extra_step_kwargs["eta"] = eta
|
518 |
-
|
519 |
-
# check if the scheduler accepts generator
|
520 |
-
accepts_generator = "generator" in set(inspect.signature(self.scheduler.step).parameters.keys())
|
521 |
-
if accepts_generator:
|
522 |
-
extra_step_kwargs["generator"] = generator
|
523 |
-
return extra_step_kwargs
|
524 |
-
|
525 |
-
def check_inputs(
|
526 |
-
self,
|
527 |
-
prompt,
|
528 |
-
image,
|
529 |
-
batch_size,
|
530 |
-
noise_level,
|
531 |
-
callback_steps,
|
532 |
-
negative_prompt=None,
|
533 |
-
prompt_embeds=None,
|
534 |
-
negative_prompt_embeds=None,
|
535 |
-
):
|
536 |
-
if (callback_steps is None) or (
|
537 |
-
callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)
|
538 |
-
):
|
539 |
-
raise ValueError(
|
540 |
-
f"`callback_steps` has to be a positive integer but is {callback_steps} of type"
|
541 |
-
f" {type(callback_steps)}."
|
542 |
-
)
|
543 |
-
|
544 |
-
if prompt is not None and prompt_embeds is not None:
|
545 |
-
raise ValueError(
|
546 |
-
f"Cannot forward both `prompt`: {prompt} and `prompt_embeds`: {prompt_embeds}. Please make sure to"
|
547 |
-
" only forward one of the two."
|
548 |
-
)
|
549 |
-
elif prompt is None and prompt_embeds is None:
|
550 |
-
raise ValueError(
|
551 |
-
"Provide either `prompt` or `prompt_embeds`. Cannot leave both `prompt` and `prompt_embeds` undefined."
|
552 |
-
)
|
553 |
-
elif prompt is not None and (not isinstance(prompt, str) and not isinstance(prompt, list)):
|
554 |
-
raise ValueError(f"`prompt` has to be of type `str` or `list` but is {type(prompt)}")
|
555 |
-
|
556 |
-
if negative_prompt is not None and negative_prompt_embeds is not None:
|
557 |
-
raise ValueError(
|
558 |
-
f"Cannot forward both `negative_prompt`: {negative_prompt} and `negative_prompt_embeds`:"
|
559 |
-
f" {negative_prompt_embeds}. Please make sure to only forward one of the two."
|
560 |
-
)
|
561 |
-
|
562 |
-
if prompt_embeds is not None and negative_prompt_embeds is not None:
|
563 |
-
if prompt_embeds.shape != negative_prompt_embeds.shape:
|
564 |
-
raise ValueError(
|
565 |
-
"`prompt_embeds` and `negative_prompt_embeds` must have the same shape when passed directly, but"
|
566 |
-
f" got: `prompt_embeds` {prompt_embeds.shape} != `negative_prompt_embeds`"
|
567 |
-
f" {negative_prompt_embeds.shape}."
|
568 |
-
)
|
569 |
-
|
570 |
-
if noise_level < 0 or noise_level >= self.image_noising_scheduler.config.num_train_timesteps:
|
571 |
-
raise ValueError(
|
572 |
-
f"`noise_level`: {noise_level} must be a valid timestep in `self.noising_scheduler`, [0, {self.image_noising_scheduler.config.num_train_timesteps})"
|
573 |
-
)
|
574 |
-
|
575 |
-
if isinstance(image, list):
|
576 |
-
check_image_type = image[0]
|
577 |
-
else:
|
578 |
-
check_image_type = image
|
579 |
-
|
580 |
-
if (
|
581 |
-
not isinstance(check_image_type, torch.Tensor)
|
582 |
-
and not isinstance(check_image_type, PIL.Image.Image)
|
583 |
-
and not isinstance(check_image_type, np.ndarray)
|
584 |
-
):
|
585 |
-
raise ValueError(
|
586 |
-
"`image` has to be of type `torch.FloatTensor`, `PIL.Image.Image`, `np.ndarray`, or List[...] but is"
|
587 |
-
f" {type(check_image_type)}"
|
588 |
-
)
|
589 |
-
|
590 |
-
if isinstance(image, list):
|
591 |
-
image_batch_size = len(image)
|
592 |
-
elif isinstance(image, torch.Tensor):
|
593 |
-
image_batch_size = image.shape[0]
|
594 |
-
elif isinstance(image, PIL.Image.Image):
|
595 |
-
image_batch_size = 1
|
596 |
-
elif isinstance(image, np.ndarray):
|
597 |
-
image_batch_size = image.shape[0]
|
598 |
-
else:
|
599 |
-
assert False
|
600 |
-
|
601 |
-
if batch_size != image_batch_size:
|
602 |
-
raise ValueError(f"image batch size: {image_batch_size} must be same as prompt batch size {batch_size}")
|
603 |
-
|
604 |
-
# Copied from diffusers.pipelines.deepfloyd_if.pipeline_if.IFPipeline.prepare_intermediate_images
|
605 |
-
def prepare_intermediate_images(self, batch_size, num_channels, height, width, dtype, device, generator):
|
606 |
-
shape = (batch_size, num_channels, height, width)
|
607 |
-
if isinstance(generator, list) and len(generator) != batch_size:
|
608 |
-
raise ValueError(
|
609 |
-
f"You have passed a list of generators of length {len(generator)}, but requested an effective batch"
|
610 |
-
f" size of {batch_size}. Make sure the batch size matches the length of the generators."
|
611 |
-
)
|
612 |
-
|
613 |
-
intermediate_images = randn_tensor(shape, generator=generator, device=device, dtype=dtype)
|
614 |
-
|
615 |
-
# scale the initial noise by the standard deviation required by the scheduler
|
616 |
-
intermediate_images = intermediate_images * self.scheduler.init_noise_sigma
|
617 |
-
return intermediate_images
|
618 |
-
|
619 |
-
def preprocess_image(self, image, num_images_per_prompt, device):
|
620 |
-
if not isinstance(image, torch.Tensor) and not isinstance(image, list):
|
621 |
-
image = [image]
|
622 |
-
|
623 |
-
if isinstance(image[0], PIL.Image.Image):
|
624 |
-
image = [np.array(i).astype(np.float32) / 127.5 - 1.0 for i in image]
|
625 |
-
|
626 |
-
image = np.stack(image, axis=0) # to np
|
627 |
-
image = torch.from_numpy(image.transpose(0, 3, 1, 2))
|
628 |
-
elif isinstance(image[0], np.ndarray):
|
629 |
-
image = np.stack(image, axis=0) # to np
|
630 |
-
if image.ndim == 5:
|
631 |
-
image = image[0]
|
632 |
-
|
633 |
-
image = torch.from_numpy(image.transpose(0, 3, 1, 2))
|
634 |
-
elif isinstance(image, list) and isinstance(image[0], torch.Tensor):
|
635 |
-
dims = image[0].ndim
|
636 |
-
|
637 |
-
if dims == 3:
|
638 |
-
image = torch.stack(image, dim=0)
|
639 |
-
elif dims == 4:
|
640 |
-
image = torch.concat(image, dim=0)
|
641 |
-
else:
|
642 |
-
raise ValueError(f"Image must have 3 or 4 dimensions, instead got {dims}")
|
643 |
-
|
644 |
-
image = image.to(device=device, dtype=self.unet.dtype)
|
645 |
-
|
646 |
-
image = image.repeat_interleave(num_images_per_prompt, dim=0)
|
647 |
-
|
648 |
-
return image
|
649 |
-
|
650 |
-
@torch.no_grad()
|
651 |
-
@replace_example_docstring(EXAMPLE_DOC_STRING)
|
652 |
-
def __call__(
|
653 |
-
self,
|
654 |
-
prompt: Union[str, List[str]] = None,
|
655 |
-
height: int = None,
|
656 |
-
width: int = None,
|
657 |
-
image: Union[PIL.Image.Image, np.ndarray, torch.FloatTensor] = None,
|
658 |
-
num_inference_steps: int = 50,
|
659 |
-
timesteps: List[int] = None,
|
660 |
-
guidance_scale: float = 4.0,
|
661 |
-
negative_prompt: Optional[Union[str, List[str]]] = None,
|
662 |
-
num_images_per_prompt: Optional[int] = 1,
|
663 |
-
eta: float = 0.0,
|
664 |
-
generator: Optional[Union[torch.Generator, List[torch.Generator]]] = None,
|
665 |
-
prompt_embeds: Optional[torch.FloatTensor] = None,
|
666 |
-
negative_prompt_embeds: Optional[torch.FloatTensor] = None,
|
667 |
-
output_type: Optional[str] = "pil",
|
668 |
-
return_dict: bool = True,
|
669 |
-
callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,
|
670 |
-
callback_steps: int = 1,
|
671 |
-
cross_attention_kwargs: Optional[Dict[str, Any]] = None,
|
672 |
-
noise_level: int = 250,
|
673 |
-
clean_caption: bool = True,
|
674 |
-
):
|
675 |
-
"""
|
676 |
-
Function invoked when calling the pipeline for generation.
|
677 |
-
|
678 |
-
Args:
|
679 |
-
prompt (`str` or `List[str]`, *optional*):
|
680 |
-
The prompt or prompts to guide the image generation. If not defined, one has to pass `prompt_embeds`.
|
681 |
-
instead.
|
682 |
-
height (`int`, *optional*, defaults to self.unet.config.sample_size):
|
683 |
-
The height in pixels of the generated image.
|
684 |
-
width (`int`, *optional*, defaults to self.unet.config.sample_size):
|
685 |
-
The width in pixels of the generated image.
|
686 |
-
image (`PIL.Image.Image`, `np.ndarray`, `torch.FloatTensor`):
|
687 |
-
The image to be upscaled.
|
688 |
-
num_inference_steps (`int`, *optional*, defaults to 50):
|
689 |
-
The number of denoising steps. More denoising steps usually lead to a higher quality image at the
|
690 |
-
expense of slower inference.
|
691 |
-
timesteps (`List[int]`, *optional*):
|
692 |
-
Custom timesteps to use for the denoising process. If not defined, equal spaced `num_inference_steps`
|
693 |
-
timesteps are used. Must be in descending order.
|
694 |
-
guidance_scale (`float`, *optional*, defaults to 7.5):
|
695 |
-
Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).
|
696 |
-
`guidance_scale` is defined as `w` of equation 2. of [Imagen
|
697 |
-
Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >
|
698 |
-
1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,
|
699 |
-
usually at the expense of lower image quality.
|
700 |
-
negative_prompt (`str` or `List[str]`, *optional*):
|
701 |
-
The prompt or prompts not to guide the image generation. If not defined, one has to pass
|
702 |
-
`negative_prompt_embeds` instead. Ignored when not using guidance (i.e., ignored if `guidance_scale` is
|
703 |
-
less than `1`).
|
704 |
-
num_images_per_prompt (`int`, *optional*, defaults to 1):
|
705 |
-
The number of images to generate per prompt.
|
706 |
-
eta (`float`, *optional*, defaults to 0.0):
|
707 |
-
Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to
|
708 |
-
[`schedulers.DDIMScheduler`], will be ignored for others.
|
709 |
-
generator (`torch.Generator` or `List[torch.Generator]`, *optional*):
|
710 |
-
One or a list of [torch generator(s)](https://pytorch.org/docs/stable/generated/torch.Generator.html)
|
711 |
-
to make generation deterministic.
|
712 |
-
prompt_embeds (`torch.FloatTensor`, *optional*):
|
713 |
-
Pre-generated text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt weighting. If not
|
714 |
-
provided, text embeddings will be generated from `prompt` input argument.
|
715 |
-
negative_prompt_embeds (`torch.FloatTensor`, *optional*):
|
716 |
-
Pre-generated negative text embeddings. Can be used to easily tweak text inputs, *e.g.* prompt
|
717 |
-
weighting. If not provided, negative_prompt_embeds will be generated from `negative_prompt` input
|
718 |
-
argument.
|
719 |
-
output_type (`str`, *optional*, defaults to `"pil"`):
|
720 |
-
The output format of the generate image. Choose between
|
721 |
-
[PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.
|
722 |
-
return_dict (`bool`, *optional*, defaults to `True`):
|
723 |
-
Whether or not to return a [`~pipelines.stable_diffusion.IFPipelineOutput`] instead of a plain tuple.
|
724 |
-
callback (`Callable`, *optional*):
|
725 |
-
A function that will be called every `callback_steps` steps during inference. The function will be
|
726 |
-
called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.
|
727 |
-
callback_steps (`int`, *optional*, defaults to 1):
|
728 |
-
The frequency at which the `callback` function will be called. If not specified, the callback will be
|
729 |
-
called at every step.
|
730 |
-
cross_attention_kwargs (`dict`, *optional*):
|
731 |
-
A kwargs dictionary that if specified is passed along to the `AttentionProcessor` as defined under
|
732 |
-
`self.processor` in
|
733 |
-
[diffusers.cross_attention](https://github.com/huggingface/diffusers/blob/main/src/diffusers/models/cross_attention.py).
|
734 |
-
noise_level (`int`, *optional*, defaults to 250):
|
735 |
-
The amount of noise to add to the upscaled image. Must be in the range `[0, 1000)`
|
736 |
-
clean_caption (`bool`, *optional*, defaults to `True`):
|
737 |
-
Whether or not to clean the caption before creating embeddings. Requires `beautifulsoup4` and `ftfy` to
|
738 |
-
be installed. If the dependencies are not installed, the embeddings will be created from the raw
|
739 |
-
prompt.
|
740 |
-
|
741 |
-
Examples:
|
742 |
-
|
743 |
-
Returns:
|
744 |
-
[`~pipelines.stable_diffusion.IFPipelineOutput`] or `tuple`:
|
745 |
-
[`~pipelines.stable_diffusion.IFPipelineOutput`] if `return_dict` is True, otherwise a `tuple. When
|
746 |
-
returning a tuple, the first element is a list with the generated images, and the second element is a list
|
747 |
-
of `bool`s denoting whether the corresponding generated image likely represents "not-safe-for-work" (nsfw)
|
748 |
-
or watermarked content, according to the `safety_checker`.
|
749 |
-
"""
|
750 |
-
# 1. Check inputs. Raise error if not correct
|
751 |
-
|
752 |
-
if prompt is not None and isinstance(prompt, str):
|
753 |
-
batch_size = 1
|
754 |
-
elif prompt is not None and isinstance(prompt, list):
|
755 |
-
batch_size = len(prompt)
|
756 |
-
else:
|
757 |
-
batch_size = prompt_embeds.shape[0]
|
758 |
-
|
759 |
-
self.check_inputs(
|
760 |
-
prompt,
|
761 |
-
image,
|
762 |
-
batch_size,
|
763 |
-
noise_level,
|
764 |
-
callback_steps,
|
765 |
-
negative_prompt,
|
766 |
-
prompt_embeds,
|
767 |
-
negative_prompt_embeds,
|
768 |
-
)
|
769 |
-
|
770 |
-
# 2. Define call parameters
|
771 |
-
|
772 |
-
height = height or self.unet.config.sample_size
|
773 |
-
width = width or self.unet.config.sample_size
|
774 |
-
|
775 |
-
device = self._execution_device
|
776 |
-
|
777 |
-
# here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)
|
778 |
-
# of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`
|
779 |
-
# corresponds to doing no classifier free guidance.
|
780 |
-
do_classifier_free_guidance = guidance_scale > 1.0
|
781 |
-
|
782 |
-
# 3. Encode input prompt
|
783 |
-
prompt_embeds, negative_prompt_embeds = self.encode_prompt(
|
784 |
-
prompt,
|
785 |
-
do_classifier_free_guidance,
|
786 |
-
num_images_per_prompt=num_images_per_prompt,
|
787 |
-
device=device,
|
788 |
-
negative_prompt=negative_prompt,
|
789 |
-
prompt_embeds=prompt_embeds,
|
790 |
-
negative_prompt_embeds=negative_prompt_embeds,
|
791 |
-
clean_caption=clean_caption,
|
792 |
-
)
|
793 |
-
|
794 |
-
if do_classifier_free_guidance:
|
795 |
-
prompt_embeds = torch.cat([negative_prompt_embeds, prompt_embeds])
|
796 |
-
|
797 |
-
# 4. Prepare timesteps
|
798 |
-
if timesteps is not None:
|
799 |
-
self.scheduler.set_timesteps(timesteps=timesteps, device=device)
|
800 |
-
timesteps = self.scheduler.timesteps
|
801 |
-
num_inference_steps = len(timesteps)
|
802 |
-
else:
|
803 |
-
self.scheduler.set_timesteps(num_inference_steps, device=device)
|
804 |
-
timesteps = self.scheduler.timesteps
|
805 |
-
|
806 |
-
# 5. Prepare intermediate images
|
807 |
-
num_channels = self.unet.config.in_channels // 2
|
808 |
-
intermediate_images = self.prepare_intermediate_images(
|
809 |
-
batch_size * num_images_per_prompt,
|
810 |
-
num_channels,
|
811 |
-
height,
|
812 |
-
width,
|
813 |
-
prompt_embeds.dtype,
|
814 |
-
device,
|
815 |
-
generator,
|
816 |
-
)
|
817 |
-
|
818 |
-
# 6. Prepare extra step kwargs. TODO: Logic should ideally just be moved out of the pipeline
|
819 |
-
extra_step_kwargs = self.prepare_extra_step_kwargs(generator, eta)
|
820 |
-
|
821 |
-
# 7. Prepare upscaled image and noise level
|
822 |
-
image = self.preprocess_image(image, num_images_per_prompt, device)
|
823 |
-
upscaled = F.interpolate(image, (height, width), mode="bilinear", align_corners=True)
|
824 |
-
|
825 |
-
noise_level = torch.tensor([noise_level] * upscaled.shape[0], device=upscaled.device)
|
826 |
-
noise = randn_tensor(upscaled.shape, generator=generator, device=upscaled.device, dtype=upscaled.dtype)
|
827 |
-
upscaled = self.image_noising_scheduler.add_noise(upscaled, noise, timesteps=noise_level)
|
828 |
-
|
829 |
-
if do_classifier_free_guidance:
|
830 |
-
noise_level = torch.cat([noise_level] * 2)
|
831 |
-
|
832 |
-
# HACK: see comment in `enable_model_cpu_offload`
|
833 |
-
if hasattr(self, "text_encoder_offload_hook") and self.text_encoder_offload_hook is not None:
|
834 |
-
self.text_encoder_offload_hook.offload()
|
835 |
-
|
836 |
-
# 8. Denoising loop
|
837 |
-
num_warmup_steps = len(timesteps) - num_inference_steps * self.scheduler.order
|
838 |
-
with self.progress_bar(total=num_inference_steps) as progress_bar:
|
839 |
-
for i, t in enumerate(timesteps):
|
840 |
-
model_input = torch.cat([intermediate_images, upscaled], dim=1)
|
841 |
-
|
842 |
-
model_input = torch.cat([model_input] * 2) if do_classifier_free_guidance else model_input
|
843 |
-
model_input = self.scheduler.scale_model_input(model_input, t)
|
844 |
-
|
845 |
-
# predict the noise residual
|
846 |
-
noise_pred = self.unet(
|
847 |
-
model_input,
|
848 |
-
t,
|
849 |
-
encoder_hidden_states=prompt_embeds,
|
850 |
-
class_labels=noise_level,
|
851 |
-
cross_attention_kwargs=cross_attention_kwargs,
|
852 |
-
return_dict=False,
|
853 |
-
)[0]
|
854 |
-
|
855 |
-
# perform guidance
|
856 |
-
if do_classifier_free_guidance:
|
857 |
-
noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)
|
858 |
-
noise_pred_uncond, _ = noise_pred_uncond.split(model_input.shape[1] // 2, dim=1)
|
859 |
-
noise_pred_text, predicted_variance = noise_pred_text.split(model_input.shape[1] // 2, dim=1)
|
860 |
-
noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)
|
861 |
-
noise_pred = torch.cat([noise_pred, predicted_variance], dim=1)
|
862 |
-
|
863 |
-
if self.scheduler.config.variance_type not in ["learned", "learned_range"]:
|
864 |
-
noise_pred, _ = noise_pred.split(intermediate_images.shape[1], dim=1)
|
865 |
-
|
866 |
-
# compute the previous noisy sample x_t -> x_t-1
|
867 |
-
intermediate_images = self.scheduler.step(
|
868 |
-
noise_pred, t, intermediate_images, **extra_step_kwargs, return_dict=False
|
869 |
-
)[0]
|
870 |
-
|
871 |
-
# call the callback, if provided
|
872 |
-
if i == len(timesteps) - 1 or ((i + 1) > num_warmup_steps and (i + 1) % self.scheduler.order == 0):
|
873 |
-
progress_bar.update()
|
874 |
-
if callback is not None and i % callback_steps == 0:
|
875 |
-
callback(i, t, intermediate_images)
|
876 |
-
|
877 |
-
image = intermediate_images
|
878 |
-
|
879 |
-
if output_type == "pil":
|
880 |
-
# 9. Post-processing
|
881 |
-
image = (image / 2 + 0.5).clamp(0, 1)
|
882 |
-
image = image.cpu().permute(0, 2, 3, 1).float().numpy()
|
883 |
-
|
884 |
-
# 10. Run safety checker
|
885 |
-
image, nsfw_detected, watermark_detected = self.run_safety_checker(image, device, prompt_embeds.dtype)
|
886 |
-
|
887 |
-
# 11. Convert to PIL
|
888 |
-
image = self.numpy_to_pil(image)
|
889 |
-
|
890 |
-
# 12. Apply watermark
|
891 |
-
if self.watermarker is not None:
|
892 |
-
self.watermarker.apply_watermark(image, self.unet.config.sample_size)
|
893 |
-
elif output_type == "pt":
|
894 |
-
nsfw_detected = None
|
895 |
-
watermark_detected = None
|
896 |
-
|
897 |
-
if hasattr(self, "unet_offload_hook") and self.unet_offload_hook is not None:
|
898 |
-
self.unet_offload_hook.offload()
|
899 |
-
else:
|
900 |
-
# 9. Post-processing
|
901 |
-
image = (image / 2 + 0.5).clamp(0, 1)
|
902 |
-
image = image.cpu().permute(0, 2, 3, 1).float().numpy()
|
903 |
-
|
904 |
-
# 10. Run safety checker
|
905 |
-
image, nsfw_detected, watermark_detected = self.run_safety_checker(image, device, prompt_embeds.dtype)
|
906 |
-
|
907 |
-
# Offload last model to CPU
|
908 |
-
if hasattr(self, "final_offload_hook") and self.final_offload_hook is not None:
|
909 |
-
self.final_offload_hook.offload()
|
910 |
-
|
911 |
-
if not return_dict:
|
912 |
-
return (image, nsfw_detected, watermark_detected)
|
913 |
-
|
914 |
-
return IFPipelineOutput(images=image, nsfw_detected=nsfw_detected, watermark_detected=watermark_detected)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Andy1621/uniformer_image_detection/configs/sabl/README.md
DELETED
@@ -1,37 +0,0 @@
|
|
1 |
-
# Side-Aware Boundary Localization for More Precise Object Detection
|
2 |
-
|
3 |
-
## Introduction
|
4 |
-
|
5 |
-
[ALGORITHM]
|
6 |
-
|
7 |
-
We provide config files to reproduce the object detection results in the ECCV 2020 Spotlight paper for [Side-Aware Boundary Localization for More Precise Object Detection](https://arxiv.org/abs/1912.04260).
|
8 |
-
|
9 |
-
```latex
|
10 |
-
@inproceedings{Wang_2020_ECCV,
|
11 |
-
title = {Side-Aware Boundary Localization for More Precise Object Detection},
|
12 |
-
author = {Jiaqi Wang and Wenwei Zhang and Yuhang Cao and Kai Chen and Jiangmiao Pang and Tao Gong and Jianping Shi and Chen Change Loy and Dahua Lin},
|
13 |
-
booktitle = {ECCV},
|
14 |
-
year = {2020}
|
15 |
-
}
|
16 |
-
```
|
17 |
-
|
18 |
-
## Results and Models
|
19 |
-
|
20 |
-
The results on COCO 2017 val is shown in the below table. (results on test-dev are usually slightly higher than val).
|
21 |
-
Single-scale testing (1333x800) is adopted in all results.
|
22 |
-
|
23 |
-
| Method | Backbone | Lr schd | ms-train | box AP | Config | Download |
|
24 |
-
| :----------------: | :-------: | :-----: | :------: | :----: | :----------------------------------------------------------------------------------------------------------------: | :---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: |
|
25 |
-
| SABL Faster R-CNN | R-50-FPN | 1x | N | 39.9 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl/sabl_faster_rcnn_r50_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_faster_rcnn_r50_fpn_1x_coco/sabl_faster_rcnn_r50_fpn_1x_coco-e867595b.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_faster_rcnn_r50_fpn_1x_coco/20200830_130324.log.json) |
|
26 |
-
| SABL Faster R-CNN | R-101-FPN | 1x | N | 41.7 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl/sabl_faster_rcnn_r101_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_faster_rcnn_r101_fpn_1x_coco/sabl_faster_rcnn_r101_fpn_1x_coco-f804c6c1.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_faster_rcnn_r101_fpn_1x_coco/20200830_183949.log.json) |
|
27 |
-
| SABL Cascade R-CNN | R-50-FPN | 1x | N | 41.6 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl/sabl_cascade_rcnn_r50_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_cascade_rcnn_r50_fpn_1x_coco/sabl_cascade_rcnn_r50_fpn_1x_coco-e1748e5e.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_cascade_rcnn_r50_fpn_1x_coco/20200831_033726.log.json) |
|
28 |
-
| SABL Cascade R-CNN | R-101-FPN | 1x | N | 43.0 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl/sabl_cascade_rcnn_r101_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_cascade_rcnn_r101_fpn_1x_coco/sabl_cascade_rcnn_r101_fpn_1x_coco-2b83e87c.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_cascade_rcnn_r101_fpn_1x_coco/20200831_141745.log.json) |
|
29 |
-
|
30 |
-
| Method | Backbone | GN | Lr schd | ms-train | box AP | Config | Download |
|
31 |
-
| :------------: | :-------: | :---: | :-----: | :---------: | :----: | :---------------------------------------------------------------------------------------------------------------------------: | :------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------: |
|
32 |
-
| SABL RetinaNet | R-50-FPN | N | 1x | N | 37.7 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl/sabl_retinanet_r50_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r50_fpn_1x_coco/sabl_retinanet_r50_fpn_1x_coco-6c54fd4f.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r50_fpn_1x_coco/20200830_053451.log.json) |
|
33 |
-
| SABL RetinaNet | R-50-FPN | Y | 1x | N | 38.8 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl/sabl_retinanet_r50_fpn_gn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r50_fpn_gn_1x_coco/sabl_retinanet_r50_fpn_gn_1x_coco-e16dfcf1.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r50_fpn_gn_1x_coco/20200831_141955.log.json) |
|
34 |
-
| SABL RetinaNet | R-101-FPN | N | 1x | N | 39.7 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl/sabl_retinanet_r101_fpn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r101_fpn_1x_coco/sabl_retinanet_r101_fpn_1x_coco-42026904.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r101_fpn_1x_coco/20200831_034256.log.json) |
|
35 |
-
| SABL RetinaNet | R-101-FPN | Y | 1x | N | 40.5 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl/sabl_retinanet_r101_fpn_gn_1x_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r101_fpn_gn_1x_coco/sabl_retinanet_r101_fpn_gn_1x_coco-40a893e8.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r101_fpn_gn_1x_coco/20200830_201422.log.json) |
|
36 |
-
| SABL RetinaNet | R-101-FPN | Y | 2x | Y (640~800) | 42.9 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_640_800_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_640_800_coco/sabl_retinanet_r101_fpn_gn_2x_ms_640_800_coco-1e63382c.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_640_800_coco/20200830_144807.log.json) |
|
37 |
-
| SABL RetinaNet | R-101-FPN | Y | 2x | Y (480~960) | 43.6 | [config](https://github.com/open-mmlab/mmdetection/blob/master/configs/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_480_960_coco.py) | [model](http://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_480_960_coco/sabl_retinanet_r101_fpn_gn_2x_ms_480_960_coco-5342f857.pth) | [log](http://download.openmmlab.com/mmdetection/v2.0/sabl/sabl_retinanet_r101_fpn_gn_2x_ms_480_960_coco/20200830_164537.log.json) |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Andy1621/uniformer_image_segmentation/configs/_base_/schedules/schedule_20k.py
DELETED
@@ -1,9 +0,0 @@
|
|
1 |
-
# optimizer
|
2 |
-
optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0005)
|
3 |
-
optimizer_config = dict()
|
4 |
-
# learning policy
|
5 |
-
lr_config = dict(policy='poly', power=0.9, min_lr=1e-4, by_epoch=False)
|
6 |
-
# runtime settings
|
7 |
-
runner = dict(type='IterBasedRunner', max_iters=20000)
|
8 |
-
checkpoint_config = dict(by_epoch=False, interval=2000)
|
9 |
-
evaluation = dict(interval=2000, metric='mIoU')
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Anonymous-sub/Rerender/ControlNet/annotator/uniformer/mmseg/utils/collect_env.py
DELETED
@@ -1,17 +0,0 @@
|
|
1 |
-
from annotator.uniformer.mmcv.utils import collect_env as collect_base_env
|
2 |
-
from annotator.uniformer.mmcv.utils import get_git_hash
|
3 |
-
|
4 |
-
import annotator.uniformer.mmseg as mmseg
|
5 |
-
|
6 |
-
|
7 |
-
def collect_env():
|
8 |
-
"""Collect the information of the running environments."""
|
9 |
-
env_info = collect_base_env()
|
10 |
-
env_info['MMSegmentation'] = f'{mmseg.__version__}+{get_git_hash()[:7]}'
|
11 |
-
|
12 |
-
return env_info
|
13 |
-
|
14 |
-
|
15 |
-
if __name__ == '__main__':
|
16 |
-
for name, val in collect_env().items():
|
17 |
-
print('{}: {}'.format(name, val))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Anonymous-sub/Rerender/ControlNet/ldm/modules/midas/midas/transforms.py
DELETED
@@ -1,234 +0,0 @@
|
|
1 |
-
import numpy as np
|
2 |
-
import cv2
|
3 |
-
import math
|
4 |
-
|
5 |
-
|
6 |
-
def apply_min_size(sample, size, image_interpolation_method=cv2.INTER_AREA):
|
7 |
-
"""Rezise the sample to ensure the given size. Keeps aspect ratio.
|
8 |
-
|
9 |
-
Args:
|
10 |
-
sample (dict): sample
|
11 |
-
size (tuple): image size
|
12 |
-
|
13 |
-
Returns:
|
14 |
-
tuple: new size
|
15 |
-
"""
|
16 |
-
shape = list(sample["disparity"].shape)
|
17 |
-
|
18 |
-
if shape[0] >= size[0] and shape[1] >= size[1]:
|
19 |
-
return sample
|
20 |
-
|
21 |
-
scale = [0, 0]
|
22 |
-
scale[0] = size[0] / shape[0]
|
23 |
-
scale[1] = size[1] / shape[1]
|
24 |
-
|
25 |
-
scale = max(scale)
|
26 |
-
|
27 |
-
shape[0] = math.ceil(scale * shape[0])
|
28 |
-
shape[1] = math.ceil(scale * shape[1])
|
29 |
-
|
30 |
-
# resize
|
31 |
-
sample["image"] = cv2.resize(
|
32 |
-
sample["image"], tuple(shape[::-1]), interpolation=image_interpolation_method
|
33 |
-
)
|
34 |
-
|
35 |
-
sample["disparity"] = cv2.resize(
|
36 |
-
sample["disparity"], tuple(shape[::-1]), interpolation=cv2.INTER_NEAREST
|
37 |
-
)
|
38 |
-
sample["mask"] = cv2.resize(
|
39 |
-
sample["mask"].astype(np.float32),
|
40 |
-
tuple(shape[::-1]),
|
41 |
-
interpolation=cv2.INTER_NEAREST,
|
42 |
-
)
|
43 |
-
sample["mask"] = sample["mask"].astype(bool)
|
44 |
-
|
45 |
-
return tuple(shape)
|
46 |
-
|
47 |
-
|
48 |
-
class Resize(object):
|
49 |
-
"""Resize sample to given size (width, height).
|
50 |
-
"""
|
51 |
-
|
52 |
-
def __init__(
|
53 |
-
self,
|
54 |
-
width,
|
55 |
-
height,
|
56 |
-
resize_target=True,
|
57 |
-
keep_aspect_ratio=False,
|
58 |
-
ensure_multiple_of=1,
|
59 |
-
resize_method="lower_bound",
|
60 |
-
image_interpolation_method=cv2.INTER_AREA,
|
61 |
-
):
|
62 |
-
"""Init.
|
63 |
-
|
64 |
-
Args:
|
65 |
-
width (int): desired output width
|
66 |
-
height (int): desired output height
|
67 |
-
resize_target (bool, optional):
|
68 |
-
True: Resize the full sample (image, mask, target).
|
69 |
-
False: Resize image only.
|
70 |
-
Defaults to True.
|
71 |
-
keep_aspect_ratio (bool, optional):
|
72 |
-
True: Keep the aspect ratio of the input sample.
|
73 |
-
Output sample might not have the given width and height, and
|
74 |
-
resize behaviour depends on the parameter 'resize_method'.
|
75 |
-
Defaults to False.
|
76 |
-
ensure_multiple_of (int, optional):
|
77 |
-
Output width and height is constrained to be multiple of this parameter.
|
78 |
-
Defaults to 1.
|
79 |
-
resize_method (str, optional):
|
80 |
-
"lower_bound": Output will be at least as large as the given size.
|
81 |
-
"upper_bound": Output will be at max as large as the given size. (Output size might be smaller than given size.)
|
82 |
-
"minimal": Scale as least as possible. (Output size might be smaller than given size.)
|
83 |
-
Defaults to "lower_bound".
|
84 |
-
"""
|
85 |
-
self.__width = width
|
86 |
-
self.__height = height
|
87 |
-
|
88 |
-
self.__resize_target = resize_target
|
89 |
-
self.__keep_aspect_ratio = keep_aspect_ratio
|
90 |
-
self.__multiple_of = ensure_multiple_of
|
91 |
-
self.__resize_method = resize_method
|
92 |
-
self.__image_interpolation_method = image_interpolation_method
|
93 |
-
|
94 |
-
def constrain_to_multiple_of(self, x, min_val=0, max_val=None):
|
95 |
-
y = (np.round(x / self.__multiple_of) * self.__multiple_of).astype(int)
|
96 |
-
|
97 |
-
if max_val is not None and y > max_val:
|
98 |
-
y = (np.floor(x / self.__multiple_of) * self.__multiple_of).astype(int)
|
99 |
-
|
100 |
-
if y < min_val:
|
101 |
-
y = (np.ceil(x / self.__multiple_of) * self.__multiple_of).astype(int)
|
102 |
-
|
103 |
-
return y
|
104 |
-
|
105 |
-
def get_size(self, width, height):
|
106 |
-
# determine new height and width
|
107 |
-
scale_height = self.__height / height
|
108 |
-
scale_width = self.__width / width
|
109 |
-
|
110 |
-
if self.__keep_aspect_ratio:
|
111 |
-
if self.__resize_method == "lower_bound":
|
112 |
-
# scale such that output size is lower bound
|
113 |
-
if scale_width > scale_height:
|
114 |
-
# fit width
|
115 |
-
scale_height = scale_width
|
116 |
-
else:
|
117 |
-
# fit height
|
118 |
-
scale_width = scale_height
|
119 |
-
elif self.__resize_method == "upper_bound":
|
120 |
-
# scale such that output size is upper bound
|
121 |
-
if scale_width < scale_height:
|
122 |
-
# fit width
|
123 |
-
scale_height = scale_width
|
124 |
-
else:
|
125 |
-
# fit height
|
126 |
-
scale_width = scale_height
|
127 |
-
elif self.__resize_method == "minimal":
|
128 |
-
# scale as least as possbile
|
129 |
-
if abs(1 - scale_width) < abs(1 - scale_height):
|
130 |
-
# fit width
|
131 |
-
scale_height = scale_width
|
132 |
-
else:
|
133 |
-
# fit height
|
134 |
-
scale_width = scale_height
|
135 |
-
else:
|
136 |
-
raise ValueError(
|
137 |
-
f"resize_method {self.__resize_method} not implemented"
|
138 |
-
)
|
139 |
-
|
140 |
-
if self.__resize_method == "lower_bound":
|
141 |
-
new_height = self.constrain_to_multiple_of(
|
142 |
-
scale_height * height, min_val=self.__height
|
143 |
-
)
|
144 |
-
new_width = self.constrain_to_multiple_of(
|
145 |
-
scale_width * width, min_val=self.__width
|
146 |
-
)
|
147 |
-
elif self.__resize_method == "upper_bound":
|
148 |
-
new_height = self.constrain_to_multiple_of(
|
149 |
-
scale_height * height, max_val=self.__height
|
150 |
-
)
|
151 |
-
new_width = self.constrain_to_multiple_of(
|
152 |
-
scale_width * width, max_val=self.__width
|
153 |
-
)
|
154 |
-
elif self.__resize_method == "minimal":
|
155 |
-
new_height = self.constrain_to_multiple_of(scale_height * height)
|
156 |
-
new_width = self.constrain_to_multiple_of(scale_width * width)
|
157 |
-
else:
|
158 |
-
raise ValueError(f"resize_method {self.__resize_method} not implemented")
|
159 |
-
|
160 |
-
return (new_width, new_height)
|
161 |
-
|
162 |
-
def __call__(self, sample):
|
163 |
-
width, height = self.get_size(
|
164 |
-
sample["image"].shape[1], sample["image"].shape[0]
|
165 |
-
)
|
166 |
-
|
167 |
-
# resize sample
|
168 |
-
sample["image"] = cv2.resize(
|
169 |
-
sample["image"],
|
170 |
-
(width, height),
|
171 |
-
interpolation=self.__image_interpolation_method,
|
172 |
-
)
|
173 |
-
|
174 |
-
if self.__resize_target:
|
175 |
-
if "disparity" in sample:
|
176 |
-
sample["disparity"] = cv2.resize(
|
177 |
-
sample["disparity"],
|
178 |
-
(width, height),
|
179 |
-
interpolation=cv2.INTER_NEAREST,
|
180 |
-
)
|
181 |
-
|
182 |
-
if "depth" in sample:
|
183 |
-
sample["depth"] = cv2.resize(
|
184 |
-
sample["depth"], (width, height), interpolation=cv2.INTER_NEAREST
|
185 |
-
)
|
186 |
-
|
187 |
-
sample["mask"] = cv2.resize(
|
188 |
-
sample["mask"].astype(np.float32),
|
189 |
-
(width, height),
|
190 |
-
interpolation=cv2.INTER_NEAREST,
|
191 |
-
)
|
192 |
-
sample["mask"] = sample["mask"].astype(bool)
|
193 |
-
|
194 |
-
return sample
|
195 |
-
|
196 |
-
|
197 |
-
class NormalizeImage(object):
|
198 |
-
"""Normlize image by given mean and std.
|
199 |
-
"""
|
200 |
-
|
201 |
-
def __init__(self, mean, std):
|
202 |
-
self.__mean = mean
|
203 |
-
self.__std = std
|
204 |
-
|
205 |
-
def __call__(self, sample):
|
206 |
-
sample["image"] = (sample["image"] - self.__mean) / self.__std
|
207 |
-
|
208 |
-
return sample
|
209 |
-
|
210 |
-
|
211 |
-
class PrepareForNet(object):
|
212 |
-
"""Prepare sample for usage as network input.
|
213 |
-
"""
|
214 |
-
|
215 |
-
def __init__(self):
|
216 |
-
pass
|
217 |
-
|
218 |
-
def __call__(self, sample):
|
219 |
-
image = np.transpose(sample["image"], (2, 0, 1))
|
220 |
-
sample["image"] = np.ascontiguousarray(image).astype(np.float32)
|
221 |
-
|
222 |
-
if "mask" in sample:
|
223 |
-
sample["mask"] = sample["mask"].astype(np.float32)
|
224 |
-
sample["mask"] = np.ascontiguousarray(sample["mask"])
|
225 |
-
|
226 |
-
if "disparity" in sample:
|
227 |
-
disparity = sample["disparity"].astype(np.float32)
|
228 |
-
sample["disparity"] = np.ascontiguousarray(disparity)
|
229 |
-
|
230 |
-
if "depth" in sample:
|
231 |
-
depth = sample["depth"].astype(np.float32)
|
232 |
-
sample["depth"] = np.ascontiguousarray(depth)
|
233 |
-
|
234 |
-
return sample
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Ashrafb/codellama-34b/style.css
DELETED
@@ -1,16 +0,0 @@
|
|
1 |
-
h1 {
|
2 |
-
text-align: center;
|
3 |
-
}
|
4 |
-
|
5 |
-
#duplicate-button {
|
6 |
-
margin: auto;
|
7 |
-
color: white;
|
8 |
-
background: #1565c0;
|
9 |
-
border-radius: 100vh;
|
10 |
-
}
|
11 |
-
|
12 |
-
#component-0 {
|
13 |
-
max-width: 900px;
|
14 |
-
margin: auto;
|
15 |
-
padding-top: 1.5rem;
|
16 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Audio-AGI/WavJourney/VoiceParser/model.py
DELETED
@@ -1,102 +0,0 @@
|
|
1 |
-
import os
|
2 |
-
import json
|
3 |
-
import numpy as np
|
4 |
-
|
5 |
-
import torch
|
6 |
-
import torchaudio
|
7 |
-
torchaudio.set_audio_backend("soundfile") # Use 'soundfile' backend
|
8 |
-
|
9 |
-
from encodec import EncodecModel
|
10 |
-
from encodec.utils import convert_audio
|
11 |
-
from .hubert_manager import HuBERTManager
|
12 |
-
from .pre_kmeans_hubert import CustomHubert
|
13 |
-
from .customtokenizer import CustomTokenizer
|
14 |
-
|
15 |
-
class VoiceParser():
|
16 |
-
def __init__(self, device='cpu'):
|
17 |
-
model = ('quantifier_hubert_base_ls960_14.pth', 'tokenizer.pth')
|
18 |
-
|
19 |
-
hubert_model = CustomHubert(HuBERTManager.make_sure_hubert_installed(), device=device)
|
20 |
-
quant_model = CustomTokenizer.load_from_checkpoint(HuBERTManager.make_sure_tokenizer_installed(model=model[0], local_file=model[1]), device)
|
21 |
-
encodec_model = EncodecModel.encodec_model_24khz()
|
22 |
-
encodec_model.set_target_bandwidth(6.0)
|
23 |
-
|
24 |
-
self.hubert_model = hubert_model
|
25 |
-
self.quant_model = quant_model
|
26 |
-
self.encodec_model = encodec_model.to(device)
|
27 |
-
self.device = device
|
28 |
-
print('Loaded VoiceParser models!')
|
29 |
-
|
30 |
-
|
31 |
-
def extract_acoustic_embed(self, wav_path, npz_dir):
|
32 |
-
wav, sr = torchaudio.load(wav_path)
|
33 |
-
|
34 |
-
wav_hubert = wav.to(self.device)
|
35 |
-
|
36 |
-
if wav_hubert.shape[0] == 2: # Stereo to mono if needed
|
37 |
-
wav_hubert = wav_hubert.mean(0, keepdim=True)
|
38 |
-
|
39 |
-
semantic_vectors = self.hubert_model.forward(wav_hubert, input_sample_hz=sr)
|
40 |
-
semantic_tokens = self.quant_model.get_token(semantic_vectors)
|
41 |
-
wav = convert_audio(wav, sr, self.encodec_model.sample_rate, 1).unsqueeze(0)
|
42 |
-
|
43 |
-
wav = wav.to(self.device)
|
44 |
-
|
45 |
-
with torch.no_grad():
|
46 |
-
encoded_frames = self.encodec_model.encode(wav)
|
47 |
-
|
48 |
-
codes = torch.cat([encoded[0] for encoded in encoded_frames], dim=-1).squeeze()
|
49 |
-
|
50 |
-
codes = codes.cpu()
|
51 |
-
semantic_tokens = semantic_tokens.cpu()
|
52 |
-
|
53 |
-
wav_name = os.path.split(wav_path)[1]
|
54 |
-
npz_name = wav_name[:-4] + '.npz'
|
55 |
-
npz_path = os.path.join(npz_dir, npz_name)
|
56 |
-
|
57 |
-
np.savez(
|
58 |
-
npz_path,
|
59 |
-
semantic_prompt=semantic_tokens,
|
60 |
-
fine_prompt=codes,
|
61 |
-
coarse_prompt=codes[:2, :]
|
62 |
-
)
|
63 |
-
|
64 |
-
return npz_path
|
65 |
-
|
66 |
-
|
67 |
-
def read_json_file(self, json_path):
|
68 |
-
with open(json_path, 'r') as file:
|
69 |
-
data = json.load(file)
|
70 |
-
return data
|
71 |
-
|
72 |
-
|
73 |
-
def parse_voice_json(self, voice_json, output_dir):
|
74 |
-
"""
|
75 |
-
Parse a voice json file, generate the corresponding output json and npz files
|
76 |
-
Params:
|
77 |
-
voice_json: path of a json file or List of json nodes
|
78 |
-
output_dir: output dir for new json and npz files
|
79 |
-
"""
|
80 |
-
if isinstance(voice_json, list):
|
81 |
-
voice_json = voice_json
|
82 |
-
else:
|
83 |
-
# If voice_json is a file path (str), read the JSON file
|
84 |
-
voice_json = self.read_json_file(voice_json)
|
85 |
-
for item in voice_json:
|
86 |
-
wav_path = item['wav']
|
87 |
-
npz_path = self.extract_acoustic_embed(wav_path=wav_path, npz_dir=output_dir)
|
88 |
-
item['npz'] = npz_path
|
89 |
-
del item['wav']
|
90 |
-
|
91 |
-
output_json = os.path.join(output_dir, 'metadata.json')
|
92 |
-
|
93 |
-
with open(output_json, 'w') as file:
|
94 |
-
json.dump(voice_json, file, indent=4)
|
95 |
-
|
96 |
-
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
-
|
101 |
-
|
102 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Avkash/WhisperUI/whisperui.py
DELETED
@@ -1,216 +0,0 @@
|
|
1 |
-
import whisper
|
2 |
-
import gradio as gr
|
3 |
-
import os
|
4 |
-
from pytube import YouTube
|
5 |
-
|
6 |
-
|
7 |
-
class WhisperModelUI(object):
|
8 |
-
def __init__(self, ui_obj):
|
9 |
-
self.name = "Whisper Model Processor UI"
|
10 |
-
self.description = "This class is designed to build UI for our Whisper Model"
|
11 |
-
self.ui_obj = ui_obj
|
12 |
-
self.audio_files_list = ['No content']
|
13 |
-
self.whisper_model = whisper.model.Whisper
|
14 |
-
self.video_store_path = 'data_files'
|
15 |
-
|
16 |
-
def load_content(self, file_list):
|
17 |
-
video_out_path = os.path.join(os.getcwd(), self.video_store_path)
|
18 |
-
|
19 |
-
self.audio_files_list = [f for f in os.listdir(video_out_path)
|
20 |
-
if os.path.isfile(video_out_path + "/" + f)
|
21 |
-
and (f.endswith(".mp4") or f.endswith('mp3'))]
|
22 |
-
|
23 |
-
return gr.Dropdown.update(choices=self.audio_files_list)
|
24 |
-
|
25 |
-
def load_whisper_model(self, model_type):
|
26 |
-
try:
|
27 |
-
asr_model = whisper.load_model(model_type.lower())
|
28 |
-
self.whisper_model = asr_model
|
29 |
-
status = "{} Model is loaded successfully".format(model_type)
|
30 |
-
except:
|
31 |
-
status = "error in loading {} model".format(model_type)
|
32 |
-
|
33 |
-
return status, str(self.whisper_model)
|
34 |
-
|
35 |
-
def load_youtube_video(self, video_url):
|
36 |
-
video_out_path = os.path.join(os.getcwd(), self.video_store_path)
|
37 |
-
yt = YouTube(video_url)
|
38 |
-
local_video_path = yt.streams.filter(progressive=True, file_extension='mp4').order_by(
|
39 |
-
'resolution').desc().first().download(video_out_path)
|
40 |
-
return local_video_path
|
41 |
-
|
42 |
-
def get_video_to_text(self,
|
43 |
-
transcribe_or_decode,
|
44 |
-
video_list_dropdown_file_name,
|
45 |
-
language_detect,
|
46 |
-
translate_or_transcribe
|
47 |
-
):
|
48 |
-
debug_text = ""
|
49 |
-
try:
|
50 |
-
video_out_path = os.path.join(os.getcwd(), 'data_files')
|
51 |
-
video_full_path = os.path.join(video_out_path, video_list_dropdown_file_name)
|
52 |
-
if not os.path.isfile(video_full_path):
|
53 |
-
video_text = "Selected video/audio is could not be located.."
|
54 |
-
else:
|
55 |
-
video_text = "Bad choice or result.."
|
56 |
-
if transcribe_or_decode == 'Transcribe':
|
57 |
-
video_text, debug_text = self.run_asr_with_transcribe(video_full_path, language_detect,
|
58 |
-
translate_or_transcribe)
|
59 |
-
elif transcribe_or_decode == 'Decode':
|
60 |
-
audio = whisper.load_audio(video_full_path)
|
61 |
-
video_text, debug_text = self.run_asr_with_decode(audio, language_detect,
|
62 |
-
translate_or_transcribe)
|
63 |
-
except:
|
64 |
-
video_text = "Error processing audio..."
|
65 |
-
return video_text, debug_text
|
66 |
-
|
67 |
-
def run_asr_with_decode(self, audio, language_detect, translate_or_transcribe):
|
68 |
-
debug_info = "None.."
|
69 |
-
|
70 |
-
if 'encoder' not in dir(self.whisper_model) or 'decoder' not in dir(self.whisper_model):
|
71 |
-
return "Model is not loaded, please load the model first", debug_info
|
72 |
-
|
73 |
-
if self.whisper_model.encoder is None or self.whisper_model.decoder is None:
|
74 |
-
return "Model is not loaded, please load the model first", debug_info
|
75 |
-
|
76 |
-
try:
|
77 |
-
# pad/trim it to fit 30 seconds
|
78 |
-
audio = whisper.pad_or_trim(audio)
|
79 |
-
|
80 |
-
# make log-Mel spectrogram and move to the same device as the model
|
81 |
-
mel = whisper.log_mel_spectrogram(audio).to(self.whisper_model.device)
|
82 |
-
|
83 |
-
if language_detect == 'Detect':
|
84 |
-
# detect the spoken language
|
85 |
-
_, probs = self.whisper_model.detect_language(mel)
|
86 |
-
# print(f"Detected language: {max(probs, key=probs.get)}")
|
87 |
-
|
88 |
-
# decode the audio
|
89 |
-
# mps crash if fp16=False is not used
|
90 |
-
|
91 |
-
task_type = 'transcribe'
|
92 |
-
if translate_or_transcribe == 'Translate':
|
93 |
-
task_type = 'translate'
|
94 |
-
|
95 |
-
if language_detect != 'Detect':
|
96 |
-
options = whisper.DecodingOptions(fp16=False,
|
97 |
-
language=language_detect,
|
98 |
-
task=task_type)
|
99 |
-
else:
|
100 |
-
options = whisper.DecodingOptions(fp16=False,
|
101 |
-
task=task_type)
|
102 |
-
|
103 |
-
result = whisper.decode(self.whisper_model, mel, options)
|
104 |
-
result_text = result.text
|
105 |
-
debug_info = str(result)
|
106 |
-
except:
|
107 |
-
result_text = "Error handing audio to text.."
|
108 |
-
return result_text, debug_info
|
109 |
-
|
110 |
-
def run_asr_with_transcribe(self, audio_path, language_detect, translate_or_transcribe):
|
111 |
-
result_text = "Error..."
|
112 |
-
debug_info = "None.."
|
113 |
-
|
114 |
-
if 'encoder' not in dir(self.whisper_model) or 'decoder' not in dir(self.whisper_model):
|
115 |
-
return "Model is not loaded, please load the model first", debug_info
|
116 |
-
|
117 |
-
if self.whisper_model.encoder is None or self.whisper_model.decoder is None:
|
118 |
-
return "Model is not loaded, please load the model first", debug_info
|
119 |
-
|
120 |
-
task_type = 'transcribe'
|
121 |
-
if translate_or_transcribe == 'Translate':
|
122 |
-
task_type = 'translate'
|
123 |
-
|
124 |
-
transcribe_options = dict(beam_size=5, best_of=5,
|
125 |
-
fp16=False,
|
126 |
-
task=task_type,
|
127 |
-
without_timestamps=False)
|
128 |
-
if language_detect != 'Detect':
|
129 |
-
transcribe_options['language'] = language_detect
|
130 |
-
|
131 |
-
transcription = self.whisper_model.transcribe(audio_path, **transcribe_options)
|
132 |
-
if transcription is not None:
|
133 |
-
result_text = transcription['text']
|
134 |
-
debug_info = str(transcription)
|
135 |
-
return result_text, debug_info
|
136 |
-
|
137 |
-
def create_whisper_ui(self):
|
138 |
-
with self.ui_obj:
|
139 |
-
gr.Markdown("Whisper ASR Model UI")
|
140 |
-
with gr.Tabs():
|
141 |
-
with gr.TabItem("YouTube to Text"):
|
142 |
-
with gr.Row():
|
143 |
-
with gr.Column():
|
144 |
-
asr_model_type = gr.Radio(['Tiny', 'Base', 'Small', 'Medium', 'Large'],
|
145 |
-
label="Whisper Model Type",
|
146 |
-
value='Base'
|
147 |
-
)
|
148 |
-
model_status_lbl = gr.Label(label="Model Load Status...")
|
149 |
-
load_model_btn = gr.Button("Load Whisper Model")
|
150 |
-
youtube_url = gr.Textbox(label="YouTube URL",
|
151 |
-
# value="https://www.youtube.com/watch?v=Y2nHd7El8iw"
|
152 |
-
value="https://www.youtube.com/watch?v=PpH_mi923_A"
|
153 |
-
)
|
154 |
-
youtube_video = gr.Video(label="YouTube Video")
|
155 |
-
get_video_btn = gr.Button("Load YouTube URL")
|
156 |
-
with gr.Column():
|
157 |
-
video_list_dropdown = gr.Dropdown(self.audio_files_list, label="Saved Videos")
|
158 |
-
load_video_list_btn = gr.Button("Load All Videos")
|
159 |
-
transcribe_or_decode = gr.Radio(['Transcribe', 'Decode'],
|
160 |
-
label="ASR Options",
|
161 |
-
value='Transcribe'
|
162 |
-
)
|
163 |
-
language_detect = gr.Dropdown(['Detect', 'English', 'Hindi', 'Japanese'],
|
164 |
-
label="Provide Language or detect")
|
165 |
-
translate_or_transcribe = gr.Dropdown(['Transcribe', 'Translate'],
|
166 |
-
label="Set your output task - Translate or Transcribe")
|
167 |
-
get_video_txt_btn = gr.Button("Convert Video to Text")
|
168 |
-
video_text = gr.Textbox(label="Video to Text", lines=10)
|
169 |
-
with gr.TabItem("Debug Info"):
|
170 |
-
with gr.Row():
|
171 |
-
with gr.Column():
|
172 |
-
debug_text = gr.Textbox(label="Debug Details", lines=20)
|
173 |
-
load_model_btn.click(
|
174 |
-
self.load_whisper_model,
|
175 |
-
[
|
176 |
-
asr_model_type
|
177 |
-
],
|
178 |
-
[
|
179 |
-
model_status_lbl,
|
180 |
-
debug_text
|
181 |
-
]
|
182 |
-
)
|
183 |
-
get_video_btn.click(
|
184 |
-
self.load_youtube_video,
|
185 |
-
[
|
186 |
-
youtube_url
|
187 |
-
],
|
188 |
-
[
|
189 |
-
youtube_video
|
190 |
-
]
|
191 |
-
)
|
192 |
-
load_video_list_btn.click(
|
193 |
-
self.load_content,
|
194 |
-
[
|
195 |
-
video_list_dropdown
|
196 |
-
],
|
197 |
-
[
|
198 |
-
video_list_dropdown
|
199 |
-
]
|
200 |
-
)
|
201 |
-
get_video_txt_btn.click(
|
202 |
-
self.get_video_to_text,
|
203 |
-
[
|
204 |
-
transcribe_or_decode,
|
205 |
-
video_list_dropdown,
|
206 |
-
language_detect,
|
207 |
-
translate_or_transcribe
|
208 |
-
],
|
209 |
-
[
|
210 |
-
video_text,
|
211 |
-
debug_text
|
212 |
-
]
|
213 |
-
)
|
214 |
-
|
215 |
-
def launch_ui(self):
|
216 |
-
self.ui_obj.launch(debug=True)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Banbri/zcvzcv/src/lib/getInitialRenderedScene.ts
DELETED
@@ -1,11 +0,0 @@
|
|
1 |
-
import { RenderedScene } from "@/types"
|
2 |
-
|
3 |
-
export const getInitialRenderedScene = (): RenderedScene => ({
|
4 |
-
renderId: "",
|
5 |
-
status: "pending",
|
6 |
-
assetUrl: "",
|
7 |
-
alt: "",
|
8 |
-
error: "",
|
9 |
-
maskUrl: "",
|
10 |
-
segments: []
|
11 |
-
})
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Bart92/RVC_HF/julius/lowpass.py
DELETED
@@ -1,181 +0,0 @@
|
|
1 |
-
# File under the MIT license, see https://github.com/adefossez/julius/LICENSE for details.
|
2 |
-
# Author: adefossez, 2020
|
3 |
-
"""
|
4 |
-
FIR windowed sinc lowpass filters.
|
5 |
-
"""
|
6 |
-
|
7 |
-
import math
|
8 |
-
from typing import Sequence, Optional
|
9 |
-
|
10 |
-
import torch
|
11 |
-
from torch.nn import functional as F
|
12 |
-
|
13 |
-
from .core import sinc
|
14 |
-
from .fftconv import fft_conv1d
|
15 |
-
from .utils import simple_repr
|
16 |
-
|
17 |
-
|
18 |
-
class LowPassFilters(torch.nn.Module):
|
19 |
-
"""
|
20 |
-
Bank of low pass filters. Note that a high pass or band pass filter can easily
|
21 |
-
be implemented by substracting a same signal processed with low pass filters with different
|
22 |
-
frequencies (see `julius.bands.SplitBands` for instance).
|
23 |
-
This uses a windowed sinc filter, very similar to the one used in
|
24 |
-
`julius.resample`. However, because we do not change the sample rate here,
|
25 |
-
this filter can be much more efficiently implemented using the FFT convolution from
|
26 |
-
`julius.fftconv`.
|
27 |
-
|
28 |
-
Args:
|
29 |
-
cutoffs (list[float]): list of cutoff frequencies, in [0, 0.5] expressed as `f/f_s` where
|
30 |
-
f_s is the samplerate and `f` is the cutoff frequency.
|
31 |
-
The upper limit is 0.5, because a signal sampled at `f_s` contains only
|
32 |
-
frequencies under `f_s / 2`.
|
33 |
-
stride (int): how much to decimate the output. Keep in mind that decimation
|
34 |
-
of the output is only acceptable if the cutoff frequency is under `1/ (2 * stride)`
|
35 |
-
of the original sampling rate.
|
36 |
-
pad (bool): if True, appropriately pad the input with zero over the edge. If `stride=1`,
|
37 |
-
the output will have the same length as the input.
|
38 |
-
zeros (float): Number of zero crossings to keep.
|
39 |
-
Controls the receptive field of the Finite Impulse Response filter.
|
40 |
-
For lowpass filters with low cutoff frequency, e.g. 40Hz at 44.1kHz,
|
41 |
-
it is a bad idea to set this to a high value.
|
42 |
-
This is likely appropriate for most use. Lower values
|
43 |
-
will result in a faster filter, but with a slower attenuation around the
|
44 |
-
cutoff frequency.
|
45 |
-
fft (bool or None): if True, uses `julius.fftconv` rather than PyTorch convolutions.
|
46 |
-
If False, uses PyTorch convolutions. If None, either one will be chosen automatically
|
47 |
-
depending on the effective filter size.
|
48 |
-
|
49 |
-
|
50 |
-
..warning::
|
51 |
-
All the filters will use the same filter size, aligned on the lowest
|
52 |
-
frequency provided. If you combine a lot of filters with very diverse frequencies, it might
|
53 |
-
be more efficient to split them over multiple modules with similar frequencies.
|
54 |
-
|
55 |
-
..note::
|
56 |
-
A lowpass with a cutoff frequency of 0 is defined as the null function
|
57 |
-
by convention here. This allows for a highpass with a cutoff of 0 to
|
58 |
-
be equal to identity, as defined in `julius.filters.HighPassFilters`.
|
59 |
-
|
60 |
-
Shape:
|
61 |
-
|
62 |
-
- Input: `[*, T]`
|
63 |
-
- Output: `[F, *, T']`, with `T'=T` if `pad` is True and `stride` is 1, and
|
64 |
-
`F` is the numer of cutoff frequencies.
|
65 |
-
|
66 |
-
>>> lowpass = LowPassFilters([1/4])
|
67 |
-
>>> x = torch.randn(4, 12, 21, 1024)
|
68 |
-
>>> list(lowpass(x).shape)
|
69 |
-
[1, 4, 12, 21, 1024]
|
70 |
-
"""
|
71 |
-
|
72 |
-
def __init__(self, cutoffs: Sequence[float], stride: int = 1, pad: bool = True,
|
73 |
-
zeros: float = 8, fft: Optional[bool] = None):
|
74 |
-
super().__init__()
|
75 |
-
self.cutoffs = list(cutoffs)
|
76 |
-
if min(self.cutoffs) < 0:
|
77 |
-
raise ValueError("Minimum cutoff must be larger than zero.")
|
78 |
-
if max(self.cutoffs) > 0.5:
|
79 |
-
raise ValueError("A cutoff above 0.5 does not make sense.")
|
80 |
-
self.stride = stride
|
81 |
-
self.pad = pad
|
82 |
-
self.zeros = zeros
|
83 |
-
self.half_size = int(zeros / min([c for c in self.cutoffs if c > 0]) / 2)
|
84 |
-
if fft is None:
|
85 |
-
fft = self.half_size > 32
|
86 |
-
self.fft = fft
|
87 |
-
window = torch.hann_window(2 * self.half_size + 1, periodic=False)
|
88 |
-
time = torch.arange(-self.half_size, self.half_size + 1)
|
89 |
-
filters = []
|
90 |
-
for cutoff in cutoffs:
|
91 |
-
if cutoff == 0:
|
92 |
-
filter_ = torch.zeros_like(time)
|
93 |
-
else:
|
94 |
-
filter_ = 2 * cutoff * window * sinc(2 * cutoff * math.pi * time)
|
95 |
-
# Normalize filter to have sum = 1, otherwise we will have a small leakage
|
96 |
-
# of the constant component in the input signal.
|
97 |
-
filter_ /= filter_.sum()
|
98 |
-
filters.append(filter_)
|
99 |
-
self.register_buffer("filters", torch.stack(filters)[:, None])
|
100 |
-
|
101 |
-
def forward(self, input):
|
102 |
-
shape = list(input.shape)
|
103 |
-
input = input.view(-1, 1, shape[-1])
|
104 |
-
if self.pad:
|
105 |
-
input = F.pad(input, (self.half_size, self.half_size), mode='replicate')
|
106 |
-
if self.fft:
|
107 |
-
out = fft_conv1d(input, self.filters, stride=self.stride)
|
108 |
-
else:
|
109 |
-
out = F.conv1d(input, self.filters, stride=self.stride)
|
110 |
-
shape.insert(0, len(self.cutoffs))
|
111 |
-
shape[-1] = out.shape[-1]
|
112 |
-
return out.permute(1, 0, 2).reshape(shape)
|
113 |
-
|
114 |
-
def __repr__(self):
|
115 |
-
return simple_repr(self)
|
116 |
-
|
117 |
-
|
118 |
-
class LowPassFilter(torch.nn.Module):
|
119 |
-
"""
|
120 |
-
Same as `LowPassFilters` but applies a single low pass filter.
|
121 |
-
|
122 |
-
Shape:
|
123 |
-
|
124 |
-
- Input: `[*, T]`
|
125 |
-
- Output: `[*, T']`, with `T'=T` if `pad` is True and `stride` is 1.
|
126 |
-
|
127 |
-
>>> lowpass = LowPassFilter(1/4, stride=2)
|
128 |
-
>>> x = torch.randn(4, 124)
|
129 |
-
>>> list(lowpass(x).shape)
|
130 |
-
[4, 62]
|
131 |
-
"""
|
132 |
-
|
133 |
-
def __init__(self, cutoff: float, stride: int = 1, pad: bool = True,
|
134 |
-
zeros: float = 8, fft: Optional[bool] = None):
|
135 |
-
super().__init__()
|
136 |
-
self._lowpasses = LowPassFilters([cutoff], stride, pad, zeros, fft)
|
137 |
-
|
138 |
-
@property
|
139 |
-
def cutoff(self):
|
140 |
-
return self._lowpasses.cutoffs[0]
|
141 |
-
|
142 |
-
@property
|
143 |
-
def stride(self):
|
144 |
-
return self._lowpasses.stride
|
145 |
-
|
146 |
-
@property
|
147 |
-
def pad(self):
|
148 |
-
return self._lowpasses.pad
|
149 |
-
|
150 |
-
@property
|
151 |
-
def zeros(self):
|
152 |
-
return self._lowpasses.zeros
|
153 |
-
|
154 |
-
@property
|
155 |
-
def fft(self):
|
156 |
-
return self._lowpasses.fft
|
157 |
-
|
158 |
-
def forward(self, input):
|
159 |
-
return self._lowpasses(input)[0]
|
160 |
-
|
161 |
-
def __repr__(self):
|
162 |
-
return simple_repr(self)
|
163 |
-
|
164 |
-
|
165 |
-
def lowpass_filters(input: torch.Tensor, cutoffs: Sequence[float],
|
166 |
-
stride: int = 1, pad: bool = True,
|
167 |
-
zeros: float = 8, fft: Optional[bool] = None):
|
168 |
-
"""
|
169 |
-
Functional version of `LowPassFilters`, refer to this class for more information.
|
170 |
-
"""
|
171 |
-
return LowPassFilters(cutoffs, stride, pad, zeros, fft).to(input)(input)
|
172 |
-
|
173 |
-
|
174 |
-
def lowpass_filter(input: torch.Tensor, cutoff: float,
|
175 |
-
stride: int = 1, pad: bool = True,
|
176 |
-
zeros: float = 8, fft: Optional[bool] = None):
|
177 |
-
"""
|
178 |
-
Same as `lowpass_filters` but with a single cutoff frequency.
|
179 |
-
Output will not have a dimension inserted in the front.
|
180 |
-
"""
|
181 |
-
return lowpass_filters(input, [cutoff], stride, pad, zeros, fft)[0]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Benson/text-generation/Examples/Crimen De Gngster Real Versin Antigua Apkpure.md
DELETED
@@ -1,139 +0,0 @@
|
|
1 |
-
|
2 |
-
<h1>Real Gangster Crime Versión antigua APKPure: Una revisión</h1>
|
3 |
-
<p>Si eres un fan de los juegos de acción y aventura de mundo abierto, es posible que hayas oído hablar de Real Gangster Crime. Este es un juego sandbox que te permite explorar una ciudad llena de crimen, violencia y caos. Puedes conducir autos, disparar armas, luchar contra enemigos y completar misiones a medida que te conviertes en el gángster más notorio de la ciudad. ¿Pero sabías que también puedes descargar y jugar la versión antigua de Real Gangster Crime usando APKPure? En este artículo, vamos a revisar lo que es Real Gangster Crime, por qué es posible que desee descargar su versión anterior APKPure, y cómo hacerlo. </p>
|
4 |
-
<h2>¿Qué es el verdadero crimen de gángsters? </h2>
|
5 |
-
<p>Real Gangster Crime es un juego desarrollado por Naxeex Studio y lanzado en 2016. Está disponible para dispositivos Android y se puede descargar desde Google Play Store u otras fuentes. El juego está clasificado 4.1 de 5 estrellas por más de 1 millón de usuarios en Google Play Store.</p>
|
6 |
-
<h2>crimen de gángster real versión antigua apkpure</h2><br /><p><b><b>Download File</b> ⭐ <a href="https://bltlly.com/2v6ITx">https://bltlly.com/2v6ITx</a></b></p><br /><br />
|
7 |
-
<h3>Un juego de sandbox con acción de mundo abierto y aventura</h3>
|
8 |
-
<p>Real Gangster Crime es un juego sandbox, lo que significa que puedes deambular libremente por el mundo del juego e interactuar con varios elementos. El mundo del juego es una ciudad ficticia llamada New Vegas, que se inspira en Las Vegas. La ciudad está llena de rascacielos, casinos, hoteles, clubes y otras atracciones. Sin embargo, también está plagada de crimen, corrupción, pandillas y policía. Puedes elegir seguir el modo historia o crear tus propias aventuras en la ciudad. </p>
|
9 |
-
<h3>Las principales características y jugabilidad de Real Gangster Crime</h3>
|
10 |
-
<p>Las principales características y jugabilidad de Real Gangster Crime incluyen:</p>
|
11 |
-
<ul>
|
12 |
-
<li>Conducir coches: Usted puede conducir varios vehículos en el juego, tales como coches deportivos, motocicletas, camiones, tanques, helicópteros, e incluso ovnis. También puede personalizar sus vehículos con diferentes colores, pegatinas, armas y mejoras. </li>
|
13 |
-
|
14 |
-
<li>Lucha contra los enemigos: Puedes luchar contra diferentes enemigos en el juego, como gángsters rivales, oficiales de policía, soldados, zombis, alienígenas y robots. También puedes unirte o crear tu propia pandilla y reclutar miembros para ayudarte en tus misiones. </li>
|
15 |
-
<li>Completar misiones: Puedes completar varias misiones en el juego, como robar bancos, robar coches, asesinar objetivos, escapar de la prisión, destruir edificios y más. También puedes ganar dinero y reputación completando misiones. </li>
|
16 |
-
</ul>
|
17 |
-
<h2>¿Por qué descargar Real Gangster Crime versión antigua APKPure? </h2>
|
18 |
-
<p>Si te estás preguntando por qué es posible que desee descargar Real Gangster Crime versión antigua APKPure en lugar de la última versión de Google Play Store u otras fuentes, aquí hay algunas razones posibles:</p>
|
19 |
-
<h3>Los beneficios de usar APKPure para descargar versiones antiguas de aplicaciones</h3>
|
20 |
-
<p>APKPure es un sitio web que proporciona descargas gratuitas y seguras de aplicaciones Android en formato APK. APK significa Android Package Kit, que es un formato de archivo que contiene todos los componentes de una aplicación. Al descargar aplicaciones en formato APK desde APKPure, puedes disfrutar de algunos beneficios como:</p>
|
21 |
-
<ul>
|
22 |
-
<li>Eludir las restricciones regionales: Algunas aplicaciones pueden no estar disponibles en su país o región debido a varias razones. Al descargar aplicaciones de APKPure, puede acceder a <p>las aplicaciones que no están disponibles en su región o acceder a las funciones que están restringidas en su área. </li>
|
23 |
-
<li>Obtener versiones anteriores de aplicaciones: Algunas aplicaciones pueden actualizar sus características o diseño con el tiempo, que puede no adaptarse a sus preferencias o la compatibilidad del dispositivo. Al descargar aplicaciones de APKPure, puedes elegir la versión que más te guste o que mejor funcione para tu dispositivo. </li>
|
24 |
-
<li>Ahorro de espacio de almacenamiento: Algunas aplicaciones pueden aumentar su tamaño o requerir datos adicionales después de la actualización, lo que puede ocupar más espacio de almacenamiento en su dispositivo. Al descargar aplicaciones de APKPure, puede obtener la versión que tiene un tamaño más pequeño o menos datos. </li>
|
25 |
-
</ul>
|
26 |
-
|
27 |
-
<p>La versión antigua de Real Gangster Crime que se puede descargar de APKPure es la versión 4.6b, que fue lanzado el 19 de marzo de 2020. La última versión de Real Gangster Crime que se puede descargar desde Google Play Store u otras fuentes es la versión 5.5, que fue lanzado el 12 de mayo de 2021. Las diferencias entre las dos versiones incluyen:</p>
|
28 |
-
<tabla>
|
29 |
-
<tr>
|
30 |
-
<th>Versión antigua</th>
|
31 |
-
<th>Nueva versión</th>
|
32 |
-
</tr>
|
33 |
-
<tr>
|
34 |
-
<td>Ninguna característica de cabeza de dragón</td>
|
35 |
-
<td>función de cabeza de dragón añadido</td>
|
36 |
-
</tr>
|
37 |
-
<tr>
|
38 |
-
<td>Ninguna función de campo de batalla</td>
|
39 |
-
<td>Función de campo de batalla añadida</td>
|
40 |
-
</tr>
|
41 |
-
<tr>
|
42 |
-
<td>Ninguna función de OVNI</td>
|
43 |
-
<td>función OVNI añadido</td>
|
44 |
-
</tr>
|
45 |
-
<tr>
|
46 |
-
<td>Ninguna función de helicóptero</td>
|
47 |
-
<td>función de helicóptero añadido</td>
|
48 |
-
</tr>
|
49 |
-
<tr>
|
50 |
-
<td>Sin función lanzallamas</td>
|
51 |
-
<td>Función lanzallamas agregada</td>
|
52 |
-
</tr>
|
53 |
-
<tr>
|
54 |
-
<td>No hay función de rifle láser</td>
|
55 |
-
<td>Función de rifle láser agregada</td>
|
56 |
-
</tr>
|
57 |
-
<tr>
|
58 |
-
<td>Sin función de traje de acero</td>
|
59 |
-
<td>función de traje de acero añadido</td>
|
60 |
-
</tr>
|
61 |
-
<tr>
|
62 |
-
<td>Ninguna característica de carreras de coches</td>
|
63 |
-
<td>Característica de carreras de coches añadido</td>
|
64 |
-
</tr>
|
65 |
-
<tr>
|
66 |
-
<td>No hay función de hackeo de cajeros automáticos</td>
|
67 |
-
<td>función de hackeo de cajeros automáticos agregada</td>
|
68 |
-
</tr>
|
69 |
-
<tr>
|
70 |
-
<td>No hay función de huevos de Pascua</td>
|
71 |
-
<td>Se agregó la función de huevos de Pascua</td>
|
72 |
-
</tr>
|
73 |
-
<tr><td colspan="2">Fuente: </td></tr>
|
74 |
-
<tabla>
|
75 |
-
<p>Es posible que prefieras la versión antigua de Real Gangster Crime si te gusta el juego más simple y clásico, o si tienes un dispositivo de gama baja que no puede manejar las nuevas características. Es posible que prefieras la nueva versión de Real Gangster Crime si te gusta la jugabilidad más diversa y moderna, o si tienes un dispositivo de alta gama que puede admitir las nuevas características. </p>
|
76 |
-
<h2>¿Cómo descargar e instalar la versión antigua de Real Gangster Crime APKPure? </h2>
|
77 |
-
<p>Si desea descargar e instalar la versión antigua de Real Gangster Crime APKPure, debe seguir estos pasos:</p>
|
78 |
-
<h3>Los pasos para descargar e instalar Real Gangster Crime versión antigua APKPure</h3>
|
79 |
-
<ol>
|
80 |
-
<li>Ir al sitio web de APKPure y buscar Real Gangster Crime.</li>
|
81 |
-
|
82 |
-
<li>Haga clic en el botón "Descargar APK (99.8 MB)" bajo la sección "Versiones antiguas". </li>
|
83 |
-
<li>Espere a que la descarga termine y localice el archivo en su dispositivo. </li>
|
84 |
-
<li>Toque en el archivo y permita la instalación de fuentes desconocidas si se le solicita. </li>
|
85 |
-
<li>Siga las instrucciones en la pantalla y espere a que se complete la instalación. </li>
|
86 |
-
<li>Iniciar la aplicación y disfrutar de jugar Real Gangster Crime versión antigua APKPure.</li>
|
87 |
-
<ol>
|
88 |
-
<h3>Los consejos y trucos para disfrutar de Real Gangster Crime versión antigua APKPure</h3>
|
89 |
-
<p>Para disfrutar jugando Real Gangster Crime versión antigua APKPure, puede utilizar algunos consejos y trucos como:</p>
|
90 |
-
<p></p>
|
91 |
-
<ul>
|
92 |
-
<li>Usa el mapa para encontrar misiones, tiendas, vehículos y enemigos. </li>
|
93 |
-
<li>Usa la tienda para comprar armas, ropa y mejoras para tu personaje y vehículos. </li>
|
94 |
-
<li>Utilice el garaje para almacenar y personalizar sus vehículos. </li>
|
95 |
-
<li>Usa el teléfono para llamar a los miembros de tu pandilla u otros contactos para obtener ayuda o información. </li>
|
96 |
-
<li>Usa la configuración para ajustar los gráficos, el sonido, los controles y el idioma del juego. </li>
|
97 |
-
<li>Usa el menú de pausa para guardar, cargar o salir del juego. </li>
|
98 |
-
<ul>
|
99 |
-
<h2>Conclusión</h2>
|
100 |
-
|
101 |
-
<p>Esperamos que este artículo te haya ayudado a aprender más sobre Real Gangster Crime versión antigua APKPure y cómo descargarlo y jugarlo. Si tiene alguna pregunta o comentario, no dude en dejar un comentario a continuación. Gracias por leer y divertirse jugando Real Gangster Crime versión antigua APKPure! </p>
|
102 |
-
<h2>Preguntas frecuentes</h2>
|
103 |
-
<p>Aquí hay algunas preguntas frecuentes sobre Real Gangster Crime versión antigua APKPure:</p>
|
104 |
-
<ol>
|
105 |
-
<li> ¿Es seguro descargar e instalar la versión antigua de Real Gangster Crime APKPure? </li>
|
106 |
-
<p>Sí, Real Gangster Crime versión antigua APKPure es seguro para descargar e instalar desde el sitio web APKPure. APKPure es una fuente confiable que proporciona descargas gratuitas y seguras de aplicaciones Android en formato APK. Sin embargo, siempre debe tener cuidado al descargar aplicaciones de fuentes desconocidas y escanearlas en busca de virus o malware antes de instalarlas en su dispositivo. </p>
|
107 |
-
<li> ¿Cuáles son los requisitos del sistema para Real Gangster Crime versión antigua APKPure? </li>
|
108 |
-
<p>Los requisitos del sistema para Real Gangster Crime versión antigua APKPure son:</p>
|
109 |
-
<ul>
|
110 |
-
<li>Android 4.1 o superior</li>
|
111 |
-
<li>Al menos 100 MB de espacio de almacenamiento libre</li>
|
112 |
-
<li>Al menos 1 GB de RAM</li>
|
113 |
-
<li>Una conexión a Internet estable</li>
|
114 |
-
</ul>
|
115 |
-
<li> ¿Cómo puedo actualizar Real Gangster Crime versión antigua APKPure a la última versión? </li>
|
116 |
-
<p>Si desea actualizar Real Gangster Crime versión antigua APKPure a la última versión, puede hacerlo siguiendo estos pasos:</p>
|
117 |
-
<ul>
|
118 |
-
<li>Ir a Google Play Store y buscar Real Gangster Crime.</li>
|
119 |
-
<li>Seleccione la aplicación y pulse el botón "Actualizar". </li>
|
120 |
-
<li>Espere a que la actualización termine y inicie la aplicación. </li>
|
121 |
-
</ul>
|
122 |
-
<li> ¿Cómo puedo desinstalar Real Gangster Crime versión antigua APKPure desde mi dispositivo? </li>
|
123 |
-
<p>Si quieres desinstalar Real Gangster Crime versión antigua APKPure desde tu dispositivo, puedes hacerlo siguiendo estos pasos:</p>
|
124 |
-
<ul>
|
125 |
-
<li>Ir a la configuración del dispositivo y toque en "Aplicaciones" o "Aplicaciones". </li>
|
126 |
-
<li>Encuentra y selecciona Real Gangster Crime de la lista de aplicaciones. </li>
|
127 |
-
|
128 |
-
</ul>
|
129 |
-
<li>¿Dónde puedo encontrar más información sobre Real Gangster Crime versión antigua APKPure? </li>
|
130 |
-
<p>Si quieres encontrar más información sobre Real Gangster Crime versión antigua APKPure, puedes visitar estas fuentes:</p>
|
131 |
-
<ul>
|
132 |
-
<li>El sitio web oficial de Naxeex Studio: </li>
|
133 |
-
<li>La página oficial de Facebook de Naxeex Studio: </li>
|
134 |
-
<li>El canal oficial de YouTube de Naxeex Studio: </li>
|
135 |
-
</ul>
|
136 |
-
<ol>
|
137 |
-
: https://apkpure.com/real-gangster-crime/com.gta.real.gangster.crime. : https://play.google.com/stores/apps/apps/detail.=id=com.gta.real.gangster.crime&hl=en_US https:/www.youtube./watchv/watch.c=w7Q0mZg9M :tps:na.///s.105468371092348 : https://www.youtube.com/channel/UCoUZGTc5JfwzN8HfyNZwYzg</p> 64aa2da5cf<br />
|
138 |
-
<br />
|
139 |
-
<br />
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Benson/text-generation/Examples/Descargar Gratis Metro Surfistas Juego Para Windows 7 Softonic.md
DELETED
@@ -1,60 +0,0 @@
|
|
1 |
-
|
2 |
-
<h1>Subway Surfers Juego de descarga gratuita para Windows 7 Softonic</h1>
|
3 |
-
<p>Si estás buscando un divertido, adictivo y colorido juego de running sin fin que te mantendrá entretenido durante horas, entonces deberías probar Subway Surfers. Este juego es uno de los juegos más populares y descargados en el mundo, con más de mil millones de jugadores. En este juego, te unirás a Jake, Tricky, Fresh y otros personajes geniales mientras huyen del inspector gruñón y su perro en las vías del metro. Tendrás que deslizar hacia la izquierda, derecha, arriba y abajo para esquivar trenes, autobuses, barreras, túneles y otros obstáculos mientras recoges monedas, potenciadores, llaves y otros objetos. También tendrás que completar misiones, desafíos y eventos para ganar recompensas y desbloquear nuevos personajes, tableros, trajes, ubicaciones y más. Subway Surfers es un juego que nunca se vuelve aburrido o repetitivo porque siempre tiene algo nuevo y emocionante que ofrecer. </p>
|
4 |
-
<h2>descargar gratis metro surfistas juego para windows 7 softonic</h2><br /><p><b><b>Download Zip</b> ✔✔✔ <a href="https://bltlly.com/2v6LfL">https://bltlly.com/2v6LfL</a></b></p><br /><br />
|
5 |
-
<p>Pero ¿cómo se puede jugar a este increíble juego en su computadora con Windows 7? Bueno, puede descargarlo fácilmente desde Softonic, una de las fuentes de software y aplicaciones más confiables y confiables. Softonic es un sitio web que ofrece descargas gratuitas de varios juegos, programas, herramientas, utilidades y más para diferentes plataformas. Puedes encontrar Subway Surfers en Softonic junto con otros juegos similares como Temple Run, Sonic Dash, Minion Rush, etc. En este artículo, te mostraremos cómo descargar Subway Surfers para Windows 7 Softonic en unos sencillos pasos. También le daremos algunos consejos y trucos sobre cómo jugar Subway Surfers en Windows 7 Softonic como un profesional. ¡Así que vamos a empezar! </p>
|
6 |
-
<h2>Cómo descargar Subway Surfers para Windows 7 Softonic</h2>
|
7 |
-
<p>Descargar Subway Surfers para Windows 7 Softonic es muy fácil y rápido. Todo lo que necesita es una conexión a Internet estable y algo de espacio libre en su disco duro. Estos son los pasos que debe seguir:</p>
|
8 |
-
<ol>
|
9 |
-
<li>Visite el <a href="( 1 )">Sitio web Softonic</a> ( 1 ) y busque Subway Surfers en la barra de búsqueda. </li>
|
10 |
-
|
11 |
-
<li>Ejecute el instalador (archivo.exe) que descargó y siga las instrucciones para completar la instalación. Es posible que deba aceptar los términos y condiciones y elegir algunas opciones como el idioma, la carpeta de destino, los accesos directos, etc.</li>
|
12 |
-
</ol>
|
13 |
-
<p>Felicidades! Usted ha descargado con éxito Subway Surfers para Windows 7 Softonic. Ahora puedes disfrutar de este impresionante juego en tu PC cuando quieras. </p>
|
14 |
-
<h2>Cómo jugar Subway Surfers en Windows 7 Softonic</h2>
|
15 |
-
<p>Jugar a Subway Surfers en Windows 7 Softonic es muy fácil y divertido. El juego tiene una interfaz simple e intuitiva que te permite controlar a tu personaje con solo unos pocos gestos. Estos son los pasos que debes seguir:</p>
|
16 |
-
<ol>
|
17 |
-
<li>Iniciar el juego desde el acceso directo del escritorio o el menú de inicio. Verá el menú principal con varias opciones como juego, ajustes, tienda, equipo, etc.</li>
|
18 |
-
<li>Elige tu personaje y tablero entre los que hayas desbloqueado o comprado. También puedes personalizar el atuendo y los accesorios de tu personaje. </li>
|
19 |
-
<li>Comience a correr en las vías del metro haciendo clic en el botón de reproducción. Verás a tu personaje corriendo automáticamente y tendrás que deslizar hacia la izquierda, derecha, arriba y abajo para esquivar obstáculos, recoger monedas y power-ups. También puedes usar las flechas o el ratón para mover a tu personaje. </li>
|
20 |
-
<li>Completa misiones, desafíos y eventos para ganar recompensas y desbloquear nuevos artículos. Puede comprobar su progreso y objetivos haciendo clic en el botón de pausa o el icono de la misión en la esquina superior izquierda de la pantalla. </li>
|
21 |
-
</ol>
|
22 |
-
<p>¡Eso es todo! Ahora estás listo para jugar a Subway Surfers en Windows 7 Softonic como un profesional. ¡Diviértete y disfruta del juego! </p>
|
23 |
-
<p></p>
|
24 |
-
<h2>Consejos y trucos para surfistas de metro en Windows 7 Softonic</h2>
|
25 |
-
<p>Si quieres mejorar tus habilidades y rendimiento en Subway Surfers en Windows 7 Softonic, puedes seguir algunos consejos y trucos que te ayudarán a obtener puntuaciones más altas, más monedas y más diversión. Estos son algunos de ellos:</p>
|
26 |
-
<ul>
|
27 |
-
|
28 |
-
<li>Actualiza tus objetos y habilidades para que duren más y sean más eficaces. Puedes actualizar tus potenciadores, tableros, personajes y habilidades gastando monedas o llaves en la tienda. Actualizar aumentará la duración, velocidad, fuerza o frecuencia de tus objetos y habilidades. </li>
|
29 |
-
<li>Recoger las llaves y utilizarlos para revivir a ti mismo cuando se bloquea o ser atrapado por el inspector. Las llaves son objetos raros y valiosos que pueden salvar tu vida y permitirte continuar tu carrera. Puedes recoger las llaves encontrándolas en las pistas, completando misiones, viendo anuncios o comprándolas con dinero real. </li>
|
30 |
-
<li>Únete a un equipo o crear su propio y competir con otros jugadores de todo el mundo. Un equipo es un grupo de jugadores que comparten un nombre común, logotipo y objetivo. Puede unirse a un equipo o crear el suyo haciendo clic en el botón de equipo en el menú principal. Al ser parte de un equipo, puedes participar en competiciones semanales, ganar recompensas, chatear con otros miembros y mostrar tus habilidades. </li>
|
31 |
-
</ul>
|
32 |
-
<p>Estos son algunos de los consejos y trucos que te ayudarán a jugar Subway Surfers en Windows 7 Softonic mejor. Por supuesto, hay muchas más cosas que puedes descubrir y aprender jugando el juego tú mismo. Así que no dudes en probar cosas nuevas y experimentar con diferentes estrategias. </p>
|
33 |
-
<h2>Pros y contras de los surfistas de metro en Windows 7 Softonic</h2>
|
34 |
-
<p>Subway Surfers en Windows 7 Softonic es un gran juego que tiene muchas ventajas y beneficios para sus jugadores. Sin embargo, también tiene algunos inconvenientes y limitaciones que debe tener en cuenta antes de descargarlo. Éstos son algunos de los pros y contras de Subway Surfers en Windows 7 Softonic:</p>
|
35 |
-
<tabla>
|
36 |
-
<tr><th>Pros</th><th>Contras</th></tr>
|
37 |
-
<tr><td>Divertido, adictivo y colorido juego sin fin con gráficos suaves y controles</td><td>Puede contener anuncios y compras en la aplicación que pueden ser molestos o tentadores</td></tr>
|
38 |
-
|
39 |
-
<tr><td>Adecuado para todas las edades y niveles de habilidad con varios modos y opciones</td><td>Puede volverse repetitivo o aburrido después de un tiempo si no pruebas cosas nuevas o desafíos</td></tr>
|
40 |
-
</tabla>
|
41 |
-
<p>Como puedes ver, Subway Surfers en Windows 7 Softonic tiene sus pros y sus contras, pero en general, es un juego que vale la pena probar y jugar. Usted tendrá un montón de diversión y emoción en las vías del metro y escapar del inspector. También disfrutará de los gráficos coloridos, los controles suaves, los diversos personajes, tableros, ubicaciones y potenciadores, y las actualizaciones regulares y nuevas características. Subway Surfers en Windows 7 Softonic es un juego que te hará feliz y entretenido durante horas. </p>
|
42 |
-
<h2>Conclusión</h2>
|
43 |
-
<p>En conclusión, Subway Surfers es uno de los mejores y más populares juegos de running sin fin en el mundo. Es un juego que puedes descargar y jugar gratis en tu ordenador Windows 7 desde Softonic, una de las fuentes de software y aplicaciones más confiables y confiables. En este artículo, te mostramos cómo descargar Subway Surfers para Windows 7 Softonic en unos sencillos pasos. También te dimos algunos consejos y trucos sobre cómo jugar a Subway Surfers en Windows 7 Softonic como un profesional. También discutimos los pros y los contras de Subway Surfers en Windows 7 Softonic y por qué debería intentarlo. Esperamos que haya encontrado este artículo útil e informativo. Si lo hizo, por favor compártalo con sus amigos y familiares que podrían estar interesados en jugar Subway Surfers en Windows 7 Softonic. Y si tiene alguna pregunta o comentario, por favor déjelos en la sección de comentarios a continuación. Nos encantaría saber de usted. </p>
|
44 |
-
<p>Ahora que sabes todo sobre Subway Surfers en Windows 7 Softonic, ¿qué estás esperando? Sigue adelante y descárgalo de Softonic hoy y comienza a correr en las vías del metro con Jake, Tricky, Fresh y otros personajes interesantes. ¡Diviértete y disfruta del juego! </p>
|
45 |
-
<h3>Preguntas frecuentes</h3>
|
46 |
-
|
47 |
-
<ol>
|
48 |
-
<li>¿Es seguro descargar Subway Surfers desde Softonic? </li>
|
49 |
-
<p>Sí, Subway Surfers es seguro para descargar desde Softonic. Softonic es un sitio web que ofrece descargas gratuitas de varios juegos, programas, herramientas, utilidades y más para diferentes plataformas. Tiene un estricto sistema de control de calidad que garantiza que todas las descargas estén libres de virus y malware. Puede confiar en Softonic para proporcionarle descargas seguras y confiables de Subway Surfers y otros juegos similares. </p>
|
50 |
-
<li> ¿Cuánto espacio ocupa Subway Surfers en Windows 7?</li>
|
51 |
-
<p>Subway Surfers ocupa unos 200 MB de espacio en Windows 7. Sin embargo, esto puede variar dependiendo de las actualizaciones y las nuevas características que se agreguen al juego. Es posible que necesite despejar algo de espacio en su disco duro antes de descargar o instalar Subway Surfers en Windows 7.</p>
|
52 |
-
<li>¿Puedo jugar Subway Surfers sin conexión en Windows 7?</li>
|
53 |
-
<p>Sí, puede jugar Subway Surfers sin conexión en Windows 7. No necesita una conexión a Internet para ejecutar o jugar el juego. Sin embargo, es posible que necesite una conexión a Internet para descargar o actualizar el juego, para acceder a algunas funciones como la tienda o el equipo, o para sincronizar su progreso con su cuenta de Facebook. </p>
|
54 |
-
<li>¿Puedo jugar Subway Surfers con un teclado o un ratón en Windows 7?</li>
|
55 |
-
<p>Sí, puedes jugar a Subway Surfers con un teclado o un ratón en Windows 7. El juego tiene una interfaz sencilla e intuitiva que te permite controlar a tu personaje con solo unos pocos gestos. Puedes usar las flechas o el ratón para mover a tu personaje hacia la izquierda, derecha, arriba y abajo. También puede utilizar la barra espaciadora o el botón izquierdo del ratón para activar los power-ups. </p>
|
56 |
-
<li>¿Puedo transferir mi progreso desde mi dispositivo móvil a mi computadora con Windows 7? </li>
|
57 |
-
|
58 |
-
</ol></p> 64aa2da5cf<br />
|
59 |
-
<br />
|
60 |
-
<br />
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/BetterAPI/BetterChat/src/routes/conversation/[id]/+server.ts
DELETED
@@ -1,236 +0,0 @@
|
|
1 |
-
import { PUBLIC_SEP_TOKEN } from "$env/static/public";
|
2 |
-
import { buildPrompt } from "$lib/buildPrompt.js";
|
3 |
-
import { abortedGenerations } from "$lib/server/abortedGenerations.js";
|
4 |
-
import { collections } from "$lib/server/database.js";
|
5 |
-
import { modelEndpoint } from "$lib/server/modelEndpoint.js";
|
6 |
-
import type { Message } from "$lib/types/Message.js";
|
7 |
-
import { concatUint8Arrays } from "$lib/utils/concatUint8Arrays.js";
|
8 |
-
import { streamToAsyncIterable } from "$lib/utils/streamToAsyncIterable";
|
9 |
-
import { trimPrefix } from "$lib/utils/trimPrefix.js";
|
10 |
-
import { trimSuffix } from "$lib/utils/trimSuffix.js";
|
11 |
-
import type { TextGenerationStreamOutput } from "@huggingface/inference";
|
12 |
-
import { error } from "@sveltejs/kit";
|
13 |
-
import { ObjectId } from "mongodb";
|
14 |
-
import { z } from "zod";
|
15 |
-
|
16 |
-
export async function POST({ request, fetch, locals, params }) {
|
17 |
-
// todo: add validation on params.id
|
18 |
-
const convId = new ObjectId(params.id);
|
19 |
-
const date = new Date();
|
20 |
-
|
21 |
-
const conv = await collections.conversations.findOne({
|
22 |
-
_id: convId,
|
23 |
-
sessionId: locals.sessionId,
|
24 |
-
});
|
25 |
-
|
26 |
-
if (!conv) {
|
27 |
-
throw error(404, "Conversation not found");
|
28 |
-
}
|
29 |
-
|
30 |
-
const json = await request.json();
|
31 |
-
const {
|
32 |
-
inputs: newPrompt,
|
33 |
-
options: { id: messageId, is_retry },
|
34 |
-
} = z
|
35 |
-
.object({
|
36 |
-
inputs: z.string().trim().min(1),
|
37 |
-
options: z.object({
|
38 |
-
id: z.optional(z.string().uuid()),
|
39 |
-
is_retry: z.optional(z.boolean()),
|
40 |
-
}),
|
41 |
-
})
|
42 |
-
.parse(json);
|
43 |
-
|
44 |
-
const messages = (() => {
|
45 |
-
if (is_retry && messageId) {
|
46 |
-
let retryMessageIdx = conv.messages.findIndex((message) => message.id === messageId);
|
47 |
-
if (retryMessageIdx === -1) {
|
48 |
-
retryMessageIdx = conv.messages.length;
|
49 |
-
}
|
50 |
-
return [
|
51 |
-
...conv.messages.slice(0, retryMessageIdx),
|
52 |
-
{ content: newPrompt, from: "user", id: messageId as Message["id"] },
|
53 |
-
];
|
54 |
-
}
|
55 |
-
return [
|
56 |
-
...conv.messages,
|
57 |
-
{ content: newPrompt, from: "user", id: (messageId as Message["id"]) || crypto.randomUUID() },
|
58 |
-
];
|
59 |
-
})() satisfies Message[];
|
60 |
-
|
61 |
-
// Todo: on-the-fly migration, remove later
|
62 |
-
for (const message of messages) {
|
63 |
-
if (!message.id) {
|
64 |
-
message.id = crypto.randomUUID();
|
65 |
-
}
|
66 |
-
}
|
67 |
-
const prompt = buildPrompt(messages);
|
68 |
-
|
69 |
-
const randomEndpoint = modelEndpoint();
|
70 |
-
|
71 |
-
const abortController = new AbortController();
|
72 |
-
|
73 |
-
const resp = await fetch(randomEndpoint.endpoint, {
|
74 |
-
headers: {
|
75 |
-
"Content-Type": request.headers.get("Content-Type") ?? "application/json",
|
76 |
-
Authorization: randomEndpoint.authorization,
|
77 |
-
},
|
78 |
-
method: "POST",
|
79 |
-
body: JSON.stringify({
|
80 |
-
...json,
|
81 |
-
inputs: prompt,
|
82 |
-
}),
|
83 |
-
signal: abortController.signal,
|
84 |
-
});
|
85 |
-
|
86 |
-
const [stream1, stream2] = resp.body!.tee();
|
87 |
-
|
88 |
-
async function saveMessage() {
|
89 |
-
let generated_text = await parseGeneratedText(stream2, convId, date, abortController);
|
90 |
-
|
91 |
-
// We could also check if PUBLIC_ASSISTANT_MESSAGE_TOKEN is present and use it to slice the text
|
92 |
-
if (generated_text.startsWith(prompt)) {
|
93 |
-
generated_text = generated_text.slice(prompt.length);
|
94 |
-
}
|
95 |
-
|
96 |
-
generated_text = trimSuffix(trimPrefix(generated_text, "<|startoftext|>"), PUBLIC_SEP_TOKEN);
|
97 |
-
|
98 |
-
messages.push({ from: "assistant", content: generated_text, id: crypto.randomUUID() });
|
99 |
-
|
100 |
-
await collections.conversations.updateOne(
|
101 |
-
{
|
102 |
-
_id: convId,
|
103 |
-
},
|
104 |
-
{
|
105 |
-
$set: {
|
106 |
-
messages,
|
107 |
-
updatedAt: new Date(),
|
108 |
-
},
|
109 |
-
}
|
110 |
-
);
|
111 |
-
}
|
112 |
-
|
113 |
-
saveMessage().catch(console.error);
|
114 |
-
|
115 |
-
// Todo: maybe we should wait for the message to be saved before ending the response - in case of errors
|
116 |
-
return new Response(stream1, {
|
117 |
-
headers: Object.fromEntries(resp.headers.entries()),
|
118 |
-
status: resp.status,
|
119 |
-
statusText: resp.statusText,
|
120 |
-
});
|
121 |
-
}
|
122 |
-
|
123 |
-
export async function DELETE({ locals, params }) {
|
124 |
-
const convId = new ObjectId(params.id);
|
125 |
-
|
126 |
-
const conv = await collections.conversations.findOne({
|
127 |
-
_id: convId,
|
128 |
-
sessionId: locals.sessionId,
|
129 |
-
});
|
130 |
-
|
131 |
-
if (!conv) {
|
132 |
-
throw error(404, "Conversation not found");
|
133 |
-
}
|
134 |
-
|
135 |
-
await collections.conversations.deleteOne({ _id: conv._id });
|
136 |
-
|
137 |
-
return new Response();
|
138 |
-
}
|
139 |
-
|
140 |
-
async function parseGeneratedText(
|
141 |
-
stream: ReadableStream,
|
142 |
-
conversationId: ObjectId,
|
143 |
-
promptedAt: Date,
|
144 |
-
abortController: AbortController
|
145 |
-
): Promise<string> {
|
146 |
-
const inputs: Uint8Array[] = [];
|
147 |
-
for await (const input of streamToAsyncIterable(stream)) {
|
148 |
-
inputs.push(input);
|
149 |
-
|
150 |
-
const date = abortedGenerations.get(conversationId.toString());
|
151 |
-
|
152 |
-
if (date && date > promptedAt) {
|
153 |
-
abortController.abort("Cancelled by user");
|
154 |
-
const completeInput = concatUint8Arrays(inputs);
|
155 |
-
|
156 |
-
const lines = new TextDecoder()
|
157 |
-
.decode(completeInput)
|
158 |
-
.split("\n")
|
159 |
-
.filter((line) => line.startsWith("data:"));
|
160 |
-
|
161 |
-
const tokens = lines.map((line) => {
|
162 |
-
try {
|
163 |
-
const json: TextGenerationStreamOutput = JSON.parse(line.slice("data:".length));
|
164 |
-
return json.token.text;
|
165 |
-
} catch {
|
166 |
-
return "";
|
167 |
-
}
|
168 |
-
});
|
169 |
-
return tokens.join("");
|
170 |
-
}
|
171 |
-
}
|
172 |
-
|
173 |
-
// Merge inputs into a single Uint8Array
|
174 |
-
const completeInput = concatUint8Arrays(inputs);
|
175 |
-
|
176 |
-
// Get last line starting with "data:" and parse it as JSON to get the generated text
|
177 |
-
const message = new TextDecoder().decode(completeInput);
|
178 |
-
|
179 |
-
let lastIndex = message.lastIndexOf("\ndata:");
|
180 |
-
if (lastIndex === -1) {
|
181 |
-
lastIndex = message.indexOf("data");
|
182 |
-
}
|
183 |
-
|
184 |
-
if (lastIndex === -1) {
|
185 |
-
console.error("Could not parse in last message");
|
186 |
-
}
|
187 |
-
|
188 |
-
let lastMessage = message.slice(lastIndex).trim().slice("data:".length);
|
189 |
-
if (lastMessage.includes("\n")) {
|
190 |
-
lastMessage = lastMessage.slice(0, lastMessage.indexOf("\n"));
|
191 |
-
}
|
192 |
-
|
193 |
-
const lastMessageJSON = JSON.parse(lastMessage);
|
194 |
-
|
195 |
-
if (lastMessageJSON.error) {
|
196 |
-
throw new Error(lastMessageJSON.error);
|
197 |
-
}
|
198 |
-
|
199 |
-
const res = lastMessageJSON.generated_text;
|
200 |
-
|
201 |
-
if (typeof res !== "string") {
|
202 |
-
throw new Error("Could not parse generated text");
|
203 |
-
}
|
204 |
-
|
205 |
-
return res;
|
206 |
-
}
|
207 |
-
|
208 |
-
export async function PATCH({ request, locals, params }) {
|
209 |
-
const { title } = z
|
210 |
-
.object({ title: z.string().trim().min(1).max(100) })
|
211 |
-
.parse(await request.json());
|
212 |
-
|
213 |
-
const convId = new ObjectId(params.id);
|
214 |
-
|
215 |
-
const conv = await collections.conversations.findOne({
|
216 |
-
_id: convId,
|
217 |
-
sessionId: locals.sessionId,
|
218 |
-
});
|
219 |
-
|
220 |
-
if (!conv) {
|
221 |
-
throw error(404, "Conversation not found");
|
222 |
-
}
|
223 |
-
|
224 |
-
await collections.conversations.updateOne(
|
225 |
-
{
|
226 |
-
_id: convId,
|
227 |
-
},
|
228 |
-
{
|
229 |
-
$set: {
|
230 |
-
title,
|
231 |
-
},
|
232 |
-
}
|
233 |
-
);
|
234 |
-
|
235 |
-
return new Response();
|
236 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Big-Web/MMSD/env/Lib/site-packages/pip/_vendor/chardet/langturkishmodel.py
DELETED
@@ -1,4380 +0,0 @@
|
|
1 |
-
from pip._vendor.chardet.sbcharsetprober import SingleByteCharSetModel
|
2 |
-
|
3 |
-
# 3: Positive
|
4 |
-
# 2: Likely
|
5 |
-
# 1: Unlikely
|
6 |
-
# 0: Negative
|
7 |
-
|
8 |
-
TURKISH_LANG_MODEL = {
|
9 |
-
23: { # 'A'
|
10 |
-
23: 0, # 'A'
|
11 |
-
37: 0, # 'B'
|
12 |
-
47: 0, # 'C'
|
13 |
-
39: 0, # 'D'
|
14 |
-
29: 0, # 'E'
|
15 |
-
52: 0, # 'F'
|
16 |
-
36: 0, # 'G'
|
17 |
-
45: 0, # 'H'
|
18 |
-
53: 0, # 'I'
|
19 |
-
60: 0, # 'J'
|
20 |
-
16: 0, # 'K'
|
21 |
-
49: 0, # 'L'
|
22 |
-
20: 0, # 'M'
|
23 |
-
46: 0, # 'N'
|
24 |
-
42: 0, # 'O'
|
25 |
-
48: 0, # 'P'
|
26 |
-
44: 0, # 'R'
|
27 |
-
35: 0, # 'S'
|
28 |
-
31: 0, # 'T'
|
29 |
-
51: 0, # 'U'
|
30 |
-
38: 0, # 'V'
|
31 |
-
62: 0, # 'W'
|
32 |
-
43: 0, # 'Y'
|
33 |
-
56: 0, # 'Z'
|
34 |
-
1: 3, # 'a'
|
35 |
-
21: 0, # 'b'
|
36 |
-
28: 0, # 'c'
|
37 |
-
12: 2, # 'd'
|
38 |
-
2: 3, # 'e'
|
39 |
-
18: 0, # 'f'
|
40 |
-
27: 1, # 'g'
|
41 |
-
25: 1, # 'h'
|
42 |
-
3: 1, # 'i'
|
43 |
-
24: 0, # 'j'
|
44 |
-
10: 2, # 'k'
|
45 |
-
5: 1, # 'l'
|
46 |
-
13: 1, # 'm'
|
47 |
-
4: 1, # 'n'
|
48 |
-
15: 0, # 'o'
|
49 |
-
26: 0, # 'p'
|
50 |
-
7: 1, # 'r'
|
51 |
-
8: 1, # 's'
|
52 |
-
9: 1, # 't'
|
53 |
-
14: 1, # 'u'
|
54 |
-
32: 0, # 'v'
|
55 |
-
57: 0, # 'w'
|
56 |
-
58: 0, # 'x'
|
57 |
-
11: 3, # 'y'
|
58 |
-
22: 0, # 'z'
|
59 |
-
63: 0, # '·'
|
60 |
-
54: 0, # 'Ç'
|
61 |
-
50: 0, # 'Ö'
|
62 |
-
55: 0, # 'Ü'
|
63 |
-
59: 0, # 'â'
|
64 |
-
33: 1, # 'ç'
|
65 |
-
61: 0, # 'î'
|
66 |
-
34: 0, # 'ö'
|
67 |
-
17: 0, # 'ü'
|
68 |
-
30: 0, # 'ğ'
|
69 |
-
41: 0, # 'İ'
|
70 |
-
6: 0, # 'ı'
|
71 |
-
40: 0, # 'Ş'
|
72 |
-
19: 0, # 'ş'
|
73 |
-
},
|
74 |
-
37: { # 'B'
|
75 |
-
23: 0, # 'A'
|
76 |
-
37: 0, # 'B'
|
77 |
-
47: 2, # 'C'
|
78 |
-
39: 0, # 'D'
|
79 |
-
29: 0, # 'E'
|
80 |
-
52: 2, # 'F'
|
81 |
-
36: 0, # 'G'
|
82 |
-
45: 0, # 'H'
|
83 |
-
53: 0, # 'I'
|
84 |
-
60: 0, # 'J'
|
85 |
-
16: 1, # 'K'
|
86 |
-
49: 0, # 'L'
|
87 |
-
20: 0, # 'M'
|
88 |
-
46: 0, # 'N'
|
89 |
-
42: 0, # 'O'
|
90 |
-
48: 1, # 'P'
|
91 |
-
44: 0, # 'R'
|
92 |
-
35: 1, # 'S'
|
93 |
-
31: 0, # 'T'
|
94 |
-
51: 0, # 'U'
|
95 |
-
38: 1, # 'V'
|
96 |
-
62: 0, # 'W'
|
97 |
-
43: 1, # 'Y'
|
98 |
-
56: 0, # 'Z'
|
99 |
-
1: 2, # 'a'
|
100 |
-
21: 0, # 'b'
|
101 |
-
28: 2, # 'c'
|
102 |
-
12: 0, # 'd'
|
103 |
-
2: 3, # 'e'
|
104 |
-
18: 0, # 'f'
|
105 |
-
27: 0, # 'g'
|
106 |
-
25: 0, # 'h'
|
107 |
-
3: 0, # 'i'
|
108 |
-
24: 0, # 'j'
|
109 |
-
10: 0, # 'k'
|
110 |
-
5: 0, # 'l'
|
111 |
-
13: 1, # 'm'
|
112 |
-
4: 1, # 'n'
|
113 |
-
15: 0, # 'o'
|
114 |
-
26: 0, # 'p'
|
115 |
-
7: 0, # 'r'
|
116 |
-
8: 0, # 's'
|
117 |
-
9: 0, # 't'
|
118 |
-
14: 2, # 'u'
|
119 |
-
32: 0, # 'v'
|
120 |
-
57: 0, # 'w'
|
121 |
-
58: 0, # 'x'
|
122 |
-
11: 0, # 'y'
|
123 |
-
22: 1, # 'z'
|
124 |
-
63: 0, # '·'
|
125 |
-
54: 0, # 'Ç'
|
126 |
-
50: 1, # 'Ö'
|
127 |
-
55: 0, # 'Ü'
|
128 |
-
59: 0, # 'â'
|
129 |
-
33: 0, # 'ç'
|
130 |
-
61: 0, # 'î'
|
131 |
-
34: 1, # 'ö'
|
132 |
-
17: 0, # 'ü'
|
133 |
-
30: 0, # 'ğ'
|
134 |
-
41: 0, # 'İ'
|
135 |
-
6: 0, # 'ı'
|
136 |
-
40: 1, # 'Ş'
|
137 |
-
19: 1, # 'ş'
|
138 |
-
},
|
139 |
-
47: { # 'C'
|
140 |
-
23: 0, # 'A'
|
141 |
-
37: 0, # 'B'
|
142 |
-
47: 0, # 'C'
|
143 |
-
39: 0, # 'D'
|
144 |
-
29: 0, # 'E'
|
145 |
-
52: 1, # 'F'
|
146 |
-
36: 0, # 'G'
|
147 |
-
45: 0, # 'H'
|
148 |
-
53: 0, # 'I'
|
149 |
-
60: 0, # 'J'
|
150 |
-
16: 0, # 'K'
|
151 |
-
49: 1, # 'L'
|
152 |
-
20: 0, # 'M'
|
153 |
-
46: 1, # 'N'
|
154 |
-
42: 0, # 'O'
|
155 |
-
48: 1, # 'P'
|
156 |
-
44: 1, # 'R'
|
157 |
-
35: 0, # 'S'
|
158 |
-
31: 0, # 'T'
|
159 |
-
51: 0, # 'U'
|
160 |
-
38: 1, # 'V'
|
161 |
-
62: 0, # 'W'
|
162 |
-
43: 1, # 'Y'
|
163 |
-
56: 0, # 'Z'
|
164 |
-
1: 3, # 'a'
|
165 |
-
21: 0, # 'b'
|
166 |
-
28: 2, # 'c'
|
167 |
-
12: 0, # 'd'
|
168 |
-
2: 3, # 'e'
|
169 |
-
18: 0, # 'f'
|
170 |
-
27: 0, # 'g'
|
171 |
-
25: 0, # 'h'
|
172 |
-
3: 0, # 'i'
|
173 |
-
24: 2, # 'j'
|
174 |
-
10: 1, # 'k'
|
175 |
-
5: 2, # 'l'
|
176 |
-
13: 2, # 'm'
|
177 |
-
4: 2, # 'n'
|
178 |
-
15: 1, # 'o'
|
179 |
-
26: 0, # 'p'
|
180 |
-
7: 2, # 'r'
|
181 |
-
8: 0, # 's'
|
182 |
-
9: 0, # 't'
|
183 |
-
14: 3, # 'u'
|
184 |
-
32: 0, # 'v'
|
185 |
-
57: 0, # 'w'
|
186 |
-
58: 0, # 'x'
|
187 |
-
11: 0, # 'y'
|
188 |
-
22: 2, # 'z'
|
189 |
-
63: 0, # '·'
|
190 |
-
54: 0, # 'Ç'
|
191 |
-
50: 1, # 'Ö'
|
192 |
-
55: 0, # 'Ü'
|
193 |
-
59: 0, # 'â'
|
194 |
-
33: 1, # 'ç'
|
195 |
-
61: 0, # 'î'
|
196 |
-
34: 1, # 'ö'
|
197 |
-
17: 0, # 'ü'
|
198 |
-
30: 0, # 'ğ'
|
199 |
-
41: 1, # 'İ'
|
200 |
-
6: 3, # 'ı'
|
201 |
-
40: 0, # 'Ş'
|
202 |
-
19: 0, # 'ş'
|
203 |
-
},
|
204 |
-
39: { # 'D'
|
205 |
-
23: 0, # 'A'
|
206 |
-
37: 0, # 'B'
|
207 |
-
47: 0, # 'C'
|
208 |
-
39: 0, # 'D'
|
209 |
-
29: 0, # 'E'
|
210 |
-
52: 1, # 'F'
|
211 |
-
36: 0, # 'G'
|
212 |
-
45: 0, # 'H'
|
213 |
-
53: 0, # 'I'
|
214 |
-
60: 0, # 'J'
|
215 |
-
16: 1, # 'K'
|
216 |
-
49: 0, # 'L'
|
217 |
-
20: 0, # 'M'
|
218 |
-
46: 0, # 'N'
|
219 |
-
42: 0, # 'O'
|
220 |
-
48: 1, # 'P'
|
221 |
-
44: 0, # 'R'
|
222 |
-
35: 0, # 'S'
|
223 |
-
31: 0, # 'T'
|
224 |
-
51: 0, # 'U'
|
225 |
-
38: 0, # 'V'
|
226 |
-
62: 0, # 'W'
|
227 |
-
43: 0, # 'Y'
|
228 |
-
56: 0, # 'Z'
|
229 |
-
1: 2, # 'a'
|
230 |
-
21: 0, # 'b'
|
231 |
-
28: 2, # 'c'
|
232 |
-
12: 0, # 'd'
|
233 |
-
2: 2, # 'e'
|
234 |
-
18: 0, # 'f'
|
235 |
-
27: 0, # 'g'
|
236 |
-
25: 0, # 'h'
|
237 |
-
3: 0, # 'i'
|
238 |
-
24: 0, # 'j'
|
239 |
-
10: 0, # 'k'
|
240 |
-
5: 1, # 'l'
|
241 |
-
13: 3, # 'm'
|
242 |
-
4: 0, # 'n'
|
243 |
-
15: 1, # 'o'
|
244 |
-
26: 0, # 'p'
|
245 |
-
7: 0, # 'r'
|
246 |
-
8: 0, # 's'
|
247 |
-
9: 0, # 't'
|
248 |
-
14: 1, # 'u'
|
249 |
-
32: 0, # 'v'
|
250 |
-
57: 0, # 'w'
|
251 |
-
58: 0, # 'x'
|
252 |
-
11: 0, # 'y'
|
253 |
-
22: 1, # 'z'
|
254 |
-
63: 0, # '·'
|
255 |
-
54: 1, # 'Ç'
|
256 |
-
50: 0, # 'Ö'
|
257 |
-
55: 0, # 'Ü'
|
258 |
-
59: 0, # 'â'
|
259 |
-
33: 1, # 'ç'
|
260 |
-
61: 0, # 'î'
|
261 |
-
34: 0, # 'ö'
|
262 |
-
17: 0, # 'ü'
|
263 |
-
30: 1, # 'ğ'
|
264 |
-
41: 0, # 'İ'
|
265 |
-
6: 1, # 'ı'
|
266 |
-
40: 1, # 'Ş'
|
267 |
-
19: 0, # 'ş'
|
268 |
-
},
|
269 |
-
29: { # 'E'
|
270 |
-
23: 0, # 'A'
|
271 |
-
37: 0, # 'B'
|
272 |
-
47: 0, # 'C'
|
273 |
-
39: 0, # 'D'
|
274 |
-
29: 1, # 'E'
|
275 |
-
52: 0, # 'F'
|
276 |
-
36: 0, # 'G'
|
277 |
-
45: 0, # 'H'
|
278 |
-
53: 0, # 'I'
|
279 |
-
60: 0, # 'J'
|
280 |
-
16: 3, # 'K'
|
281 |
-
49: 0, # 'L'
|
282 |
-
20: 1, # 'M'
|
283 |
-
46: 0, # 'N'
|
284 |
-
42: 0, # 'O'
|
285 |
-
48: 0, # 'P'
|
286 |
-
44: 0, # 'R'
|
287 |
-
35: 0, # 'S'
|
288 |
-
31: 0, # 'T'
|
289 |
-
51: 0, # 'U'
|
290 |
-
38: 0, # 'V'
|
291 |
-
62: 0, # 'W'
|
292 |
-
43: 0, # 'Y'
|
293 |
-
56: 0, # 'Z'
|
294 |
-
1: 3, # 'a'
|
295 |
-
21: 0, # 'b'
|
296 |
-
28: 0, # 'c'
|
297 |
-
12: 2, # 'd'
|
298 |
-
2: 3, # 'e'
|
299 |
-
18: 0, # 'f'
|
300 |
-
27: 1, # 'g'
|
301 |
-
25: 0, # 'h'
|
302 |
-
3: 1, # 'i'
|
303 |
-
24: 1, # 'j'
|
304 |
-
10: 0, # 'k'
|
305 |
-
5: 3, # 'l'
|
306 |
-
13: 3, # 'm'
|
307 |
-
4: 3, # 'n'
|
308 |
-
15: 0, # 'o'
|
309 |
-
26: 0, # 'p'
|
310 |
-
7: 0, # 'r'
|
311 |
-
8: 1, # 's'
|
312 |
-
9: 1, # 't'
|
313 |
-
14: 1, # 'u'
|
314 |
-
32: 1, # 'v'
|
315 |
-
57: 0, # 'w'
|
316 |
-
58: 0, # 'x'
|
317 |
-
11: 2, # 'y'
|
318 |
-
22: 0, # 'z'
|
319 |
-
63: 0, # '·'
|
320 |
-
54: 0, # 'Ç'
|
321 |
-
50: 0, # 'Ö'
|
322 |
-
55: 0, # 'Ü'
|
323 |
-
59: 0, # 'â'
|
324 |
-
33: 0, # 'ç'
|
325 |
-
61: 0, # 'î'
|
326 |
-
34: 0, # 'ö'
|
327 |
-
17: 0, # 'ü'
|
328 |
-
30: 0, # 'ğ'
|
329 |
-
41: 0, # 'İ'
|
330 |
-
6: 3, # 'ı'
|
331 |
-
40: 0, # 'Ş'
|
332 |
-
19: 0, # 'ş'
|
333 |
-
},
|
334 |
-
52: { # 'F'
|
335 |
-
23: 0, # 'A'
|
336 |
-
37: 1, # 'B'
|
337 |
-
47: 1, # 'C'
|
338 |
-
39: 1, # 'D'
|
339 |
-
29: 1, # 'E'
|
340 |
-
52: 2, # 'F'
|
341 |
-
36: 0, # 'G'
|
342 |
-
45: 2, # 'H'
|
343 |
-
53: 1, # 'I'
|
344 |
-
60: 0, # 'J'
|
345 |
-
16: 0, # 'K'
|
346 |
-
49: 0, # 'L'
|
347 |
-
20: 1, # 'M'
|
348 |
-
46: 1, # 'N'
|
349 |
-
42: 1, # 'O'
|
350 |
-
48: 2, # 'P'
|
351 |
-
44: 1, # 'R'
|
352 |
-
35: 1, # 'S'
|
353 |
-
31: 1, # 'T'
|
354 |
-
51: 1, # 'U'
|
355 |
-
38: 1, # 'V'
|
356 |
-
62: 0, # 'W'
|
357 |
-
43: 2, # 'Y'
|
358 |
-
56: 0, # 'Z'
|
359 |
-
1: 0, # 'a'
|
360 |
-
21: 1, # 'b'
|
361 |
-
28: 1, # 'c'
|
362 |
-
12: 1, # 'd'
|
363 |
-
2: 0, # 'e'
|
364 |
-
18: 1, # 'f'
|
365 |
-
27: 0, # 'g'
|
366 |
-
25: 0, # 'h'
|
367 |
-
3: 2, # 'i'
|
368 |
-
24: 1, # 'j'
|
369 |
-
10: 0, # 'k'
|
370 |
-
5: 0, # 'l'
|
371 |
-
13: 1, # 'm'
|
372 |
-
4: 2, # 'n'
|
373 |
-
15: 1, # 'o'
|
374 |
-
26: 0, # 'p'
|
375 |
-
7: 2, # 'r'
|
376 |
-
8: 1, # 's'
|
377 |
-
9: 1, # 't'
|
378 |
-
14: 1, # 'u'
|
379 |
-
32: 0, # 'v'
|
380 |
-
57: 0, # 'w'
|
381 |
-
58: 0, # 'x'
|
382 |
-
11: 1, # 'y'
|
383 |
-
22: 1, # 'z'
|
384 |
-
63: 0, # '·'
|
385 |
-
54: 0, # 'Ç'
|
386 |
-
50: 1, # 'Ö'
|
387 |
-
55: 2, # 'Ü'
|
388 |
-
59: 0, # 'â'
|
389 |
-
33: 0, # 'ç'
|
390 |
-
61: 0, # 'î'
|
391 |
-
34: 2, # 'ö'
|
392 |
-
17: 0, # 'ü'
|
393 |
-
30: 1, # 'ğ'
|
394 |
-
41: 1, # 'İ'
|
395 |
-
6: 2, # 'ı'
|
396 |
-
40: 0, # 'Ş'
|
397 |
-
19: 2, # 'ş'
|
398 |
-
},
|
399 |
-
36: { # 'G'
|
400 |
-
23: 1, # 'A'
|
401 |
-
37: 0, # 'B'
|
402 |
-
47: 1, # 'C'
|
403 |
-
39: 0, # 'D'
|
404 |
-
29: 0, # 'E'
|
405 |
-
52: 1, # 'F'
|
406 |
-
36: 2, # 'G'
|
407 |
-
45: 0, # 'H'
|
408 |
-
53: 0, # 'I'
|
409 |
-
60: 0, # 'J'
|
410 |
-
16: 2, # 'K'
|
411 |
-
49: 0, # 'L'
|
412 |
-
20: 0, # 'M'
|
413 |
-
46: 2, # 'N'
|
414 |
-
42: 1, # 'O'
|
415 |
-
48: 1, # 'P'
|
416 |
-
44: 1, # 'R'
|
417 |
-
35: 1, # 'S'
|
418 |
-
31: 0, # 'T'
|
419 |
-
51: 1, # 'U'
|
420 |
-
38: 2, # 'V'
|
421 |
-
62: 0, # 'W'
|
422 |
-
43: 0, # 'Y'
|
423 |
-
56: 0, # 'Z'
|
424 |
-
1: 3, # 'a'
|
425 |
-
21: 0, # 'b'
|
426 |
-
28: 1, # 'c'
|
427 |
-
12: 0, # 'd'
|
428 |
-
2: 3, # 'e'
|
429 |
-
18: 0, # 'f'
|
430 |
-
27: 0, # 'g'
|
431 |
-
25: 0, # 'h'
|
432 |
-
3: 0, # 'i'
|
433 |
-
24: 1, # 'j'
|
434 |
-
10: 1, # 'k'
|
435 |
-
5: 0, # 'l'
|
436 |
-
13: 3, # 'm'
|
437 |
-
4: 2, # 'n'
|
438 |
-
15: 0, # 'o'
|
439 |
-
26: 1, # 'p'
|
440 |
-
7: 0, # 'r'
|
441 |
-
8: 1, # 's'
|
442 |
-
9: 1, # 't'
|
443 |
-
14: 3, # 'u'
|
444 |
-
32: 0, # 'v'
|
445 |
-
57: 0, # 'w'
|
446 |
-
58: 1, # 'x'
|
447 |
-
11: 0, # 'y'
|
448 |
-
22: 2, # 'z'
|
449 |
-
63: 0, # '·'
|
450 |
-
54: 1, # 'Ç'
|
451 |
-
50: 2, # 'Ö'
|
452 |
-
55: 0, # 'Ü'
|
453 |
-
59: 1, # 'â'
|
454 |
-
33: 2, # 'ç'
|
455 |
-
61: 0, # 'î'
|
456 |
-
34: 0, # 'ö'
|
457 |
-
17: 0, # 'ü'
|
458 |
-
30: 1, # 'ğ'
|
459 |
-
41: 1, # 'İ'
|
460 |
-
6: 2, # 'ı'
|
461 |
-
40: 2, # 'Ş'
|
462 |
-
19: 1, # 'ş'
|
463 |
-
},
|
464 |
-
45: { # 'H'
|
465 |
-
23: 0, # 'A'
|
466 |
-
37: 1, # 'B'
|
467 |
-
47: 0, # 'C'
|
468 |
-
39: 0, # 'D'
|
469 |
-
29: 0, # 'E'
|
470 |
-
52: 2, # 'F'
|
471 |
-
36: 2, # 'G'
|
472 |
-
45: 1, # 'H'
|
473 |
-
53: 1, # 'I'
|
474 |
-
60: 0, # 'J'
|
475 |
-
16: 2, # 'K'
|
476 |
-
49: 1, # 'L'
|
477 |
-
20: 0, # 'M'
|
478 |
-
46: 1, # 'N'
|
479 |
-
42: 1, # 'O'
|
480 |
-
48: 1, # 'P'
|
481 |
-
44: 0, # 'R'
|
482 |
-
35: 2, # 'S'
|
483 |
-
31: 0, # 'T'
|
484 |
-
51: 1, # 'U'
|
485 |
-
38: 2, # 'V'
|
486 |
-
62: 0, # 'W'
|
487 |
-
43: 0, # 'Y'
|
488 |
-
56: 0, # 'Z'
|
489 |
-
1: 3, # 'a'
|
490 |
-
21: 0, # 'b'
|
491 |
-
28: 2, # 'c'
|
492 |
-
12: 0, # 'd'
|
493 |
-
2: 3, # 'e'
|
494 |
-
18: 0, # 'f'
|
495 |
-
27: 0, # 'g'
|
496 |
-
25: 0, # 'h'
|
497 |
-
3: 2, # 'i'
|
498 |
-
24: 0, # 'j'
|
499 |
-
10: 1, # 'k'
|
500 |
-
5: 0, # 'l'
|
501 |
-
13: 2, # 'm'
|
502 |
-
4: 0, # 'n'
|
503 |
-
15: 1, # 'o'
|
504 |
-
26: 1, # 'p'
|
505 |
-
7: 1, # 'r'
|
506 |
-
8: 0, # 's'
|
507 |
-
9: 0, # 't'
|
508 |
-
14: 3, # 'u'
|
509 |
-
32: 0, # 'v'
|
510 |
-
57: 0, # 'w'
|
511 |
-
58: 0, # 'x'
|
512 |
-
11: 0, # 'y'
|
513 |
-
22: 2, # 'z'
|
514 |
-
63: 0, # '·'
|
515 |
-
54: 1, # 'Ç'
|
516 |
-
50: 1, # 'Ö'
|
517 |
-
55: 0, # 'Ü'
|
518 |
-
59: 0, # 'â'
|
519 |
-
33: 1, # 'ç'
|
520 |
-
61: 0, # 'î'
|
521 |
-
34: 1, # 'ö'
|
522 |
-
17: 0, # 'ü'
|
523 |
-
30: 2, # 'ğ'
|
524 |
-
41: 1, # 'İ'
|
525 |
-
6: 0, # 'ı'
|
526 |
-
40: 2, # 'Ş'
|
527 |
-
19: 1, # 'ş'
|
528 |
-
},
|
529 |
-
53: { # 'I'
|
530 |
-
23: 0, # 'A'
|
531 |
-
37: 0, # 'B'
|
532 |
-
47: 0, # 'C'
|
533 |
-
39: 0, # 'D'
|
534 |
-
29: 0, # 'E'
|
535 |
-
52: 1, # 'F'
|
536 |
-
36: 0, # 'G'
|
537 |
-
45: 0, # 'H'
|
538 |
-
53: 0, # 'I'
|
539 |
-
60: 0, # 'J'
|
540 |
-
16: 2, # 'K'
|
541 |
-
49: 0, # 'L'
|
542 |
-
20: 0, # 'M'
|
543 |
-
46: 0, # 'N'
|
544 |
-
42: 0, # 'O'
|
545 |
-
48: 1, # 'P'
|
546 |
-
44: 0, # 'R'
|
547 |
-
35: 0, # 'S'
|
548 |
-
31: 0, # 'T'
|
549 |
-
51: 0, # 'U'
|
550 |
-
38: 0, # 'V'
|
551 |
-
62: 0, # 'W'
|
552 |
-
43: 0, # 'Y'
|
553 |
-
56: 0, # 'Z'
|
554 |
-
1: 2, # 'a'
|
555 |
-
21: 0, # 'b'
|
556 |
-
28: 2, # 'c'
|
557 |
-
12: 0, # 'd'
|
558 |
-
2: 2, # 'e'
|
559 |
-
18: 0, # 'f'
|
560 |
-
27: 0, # 'g'
|
561 |
-
25: 0, # 'h'
|
562 |
-
3: 0, # 'i'
|
563 |
-
24: 0, # 'j'
|
564 |
-
10: 0, # 'k'
|
565 |
-
5: 2, # 'l'
|
566 |
-
13: 2, # 'm'
|
567 |
-
4: 0, # 'n'
|
568 |
-
15: 0, # 'o'
|
569 |
-
26: 0, # 'p'
|
570 |
-
7: 0, # 'r'
|
571 |
-
8: 0, # 's'
|
572 |
-
9: 0, # 't'
|
573 |
-
14: 2, # 'u'
|
574 |
-
32: 0, # 'v'
|
575 |
-
57: 0, # 'w'
|
576 |
-
58: 0, # 'x'
|
577 |
-
11: 0, # 'y'
|
578 |
-
22: 2, # 'z'
|
579 |
-
63: 0, # '·'
|
580 |
-
54: 1, # 'Ç'
|
581 |
-
50: 0, # 'Ö'
|
582 |
-
55: 0, # 'Ü'
|
583 |
-
59: 0, # 'â'
|
584 |
-
33: 2, # 'ç'
|
585 |
-
61: 0, # 'î'
|
586 |
-
34: 1, # 'ö'
|
587 |
-
17: 0, # 'ü'
|
588 |
-
30: 0, # 'ğ'
|
589 |
-
41: 0, # 'İ'
|
590 |
-
6: 0, # 'ı'
|
591 |
-
40: 1, # 'Ş'
|
592 |
-
19: 1, # 'ş'
|
593 |
-
},
|
594 |
-
60: { # 'J'
|
595 |
-
23: 0, # 'A'
|
596 |
-
37: 0, # 'B'
|
597 |
-
47: 0, # 'C'
|
598 |
-
39: 0, # 'D'
|
599 |
-
29: 0, # 'E'
|
600 |
-
52: 0, # 'F'
|
601 |
-
36: 0, # 'G'
|
602 |
-
45: 0, # 'H'
|
603 |
-
53: 0, # 'I'
|
604 |
-
60: 0, # 'J'
|
605 |
-
16: 0, # 'K'
|
606 |
-
49: 0, # 'L'
|
607 |
-
20: 1, # 'M'
|
608 |
-
46: 0, # 'N'
|
609 |
-
42: 0, # 'O'
|
610 |
-
48: 0, # 'P'
|
611 |
-
44: 0, # 'R'
|
612 |
-
35: 0, # 'S'
|
613 |
-
31: 0, # 'T'
|
614 |
-
51: 0, # 'U'
|
615 |
-
38: 0, # 'V'
|
616 |
-
62: 0, # 'W'
|
617 |
-
43: 0, # 'Y'
|
618 |
-
56: 0, # 'Z'
|
619 |
-
1: 0, # 'a'
|
620 |
-
21: 1, # 'b'
|
621 |
-
28: 0, # 'c'
|
622 |
-
12: 1, # 'd'
|
623 |
-
2: 0, # 'e'
|
624 |
-
18: 0, # 'f'
|
625 |
-
27: 0, # 'g'
|
626 |
-
25: 0, # 'h'
|
627 |
-
3: 1, # 'i'
|
628 |
-
24: 0, # 'j'
|
629 |
-
10: 0, # 'k'
|
630 |
-
5: 0, # 'l'
|
631 |
-
13: 0, # 'm'
|
632 |
-
4: 1, # 'n'
|
633 |
-
15: 0, # 'o'
|
634 |
-
26: 0, # 'p'
|
635 |
-
7: 0, # 'r'
|
636 |
-
8: 1, # 's'
|
637 |
-
9: 0, # 't'
|
638 |
-
14: 0, # 'u'
|
639 |
-
32: 0, # 'v'
|
640 |
-
57: 0, # 'w'
|
641 |
-
58: 0, # 'x'
|
642 |
-
11: 0, # 'y'
|
643 |
-
22: 0, # 'z'
|
644 |
-
63: 0, # '·'
|
645 |
-
54: 0, # 'Ç'
|
646 |
-
50: 0, # 'Ö'
|
647 |
-
55: 0, # 'Ü'
|
648 |
-
59: 0, # 'â'
|
649 |
-
33: 0, # 'ç'
|
650 |
-
61: 0, # 'î'
|
651 |
-
34: 0, # 'ö'
|
652 |
-
17: 0, # 'ü'
|
653 |
-
30: 0, # 'ğ'
|
654 |
-
41: 0, # 'İ'
|
655 |
-
6: 0, # 'ı'
|
656 |
-
40: 0, # 'Ş'
|
657 |
-
19: 0, # 'ş'
|
658 |
-
},
|
659 |
-
16: { # 'K'
|
660 |
-
23: 0, # 'A'
|
661 |
-
37: 0, # 'B'
|
662 |
-
47: 0, # 'C'
|
663 |
-
39: 0, # 'D'
|
664 |
-
29: 3, # 'E'
|
665 |
-
52: 0, # 'F'
|
666 |
-
36: 0, # 'G'
|
667 |
-
45: 0, # 'H'
|
668 |
-
53: 0, # 'I'
|
669 |
-
60: 0, # 'J'
|
670 |
-
16: 0, # 'K'
|
671 |
-
49: 0, # 'L'
|
672 |
-
20: 2, # 'M'
|
673 |
-
46: 0, # 'N'
|
674 |
-
42: 0, # 'O'
|
675 |
-
48: 0, # 'P'
|
676 |
-
44: 0, # 'R'
|
677 |
-
35: 0, # 'S'
|
678 |
-
31: 2, # 'T'
|
679 |
-
51: 0, # 'U'
|
680 |
-
38: 0, # 'V'
|
681 |
-
62: 0, # 'W'
|
682 |
-
43: 0, # 'Y'
|
683 |
-
56: 0, # 'Z'
|
684 |
-
1: 2, # 'a'
|
685 |
-
21: 3, # 'b'
|
686 |
-
28: 0, # 'c'
|
687 |
-
12: 3, # 'd'
|
688 |
-
2: 1, # 'e'
|
689 |
-
18: 3, # 'f'
|
690 |
-
27: 3, # 'g'
|
691 |
-
25: 3, # 'h'
|
692 |
-
3: 3, # 'i'
|
693 |
-
24: 2, # 'j'
|
694 |
-
10: 3, # 'k'
|
695 |
-
5: 0, # 'l'
|
696 |
-
13: 0, # 'm'
|
697 |
-
4: 3, # 'n'
|
698 |
-
15: 0, # 'o'
|
699 |
-
26: 1, # 'p'
|
700 |
-
7: 3, # 'r'
|
701 |
-
8: 3, # 's'
|
702 |
-
9: 3, # 't'
|
703 |
-
14: 0, # 'u'
|
704 |
-
32: 3, # 'v'
|
705 |
-
57: 0, # 'w'
|
706 |
-
58: 0, # 'x'
|
707 |
-
11: 2, # 'y'
|
708 |
-
22: 1, # 'z'
|
709 |
-
63: 0, # '·'
|
710 |
-
54: 0, # 'Ç'
|
711 |
-
50: 0, # 'Ö'
|
712 |
-
55: 0, # 'Ü'
|
713 |
-
59: 0, # 'â'
|
714 |
-
33: 0, # 'ç'
|
715 |
-
61: 0, # 'î'
|
716 |
-
34: 0, # 'ö'
|
717 |
-
17: 2, # 'ü'
|
718 |
-
30: 0, # 'ğ'
|
719 |
-
41: 1, # 'İ'
|
720 |
-
6: 3, # 'ı'
|
721 |
-
40: 0, # 'Ş'
|
722 |
-
19: 0, # 'ş'
|
723 |
-
},
|
724 |
-
49: { # 'L'
|
725 |
-
23: 0, # 'A'
|
726 |
-
37: 0, # 'B'
|
727 |
-
47: 0, # 'C'
|
728 |
-
39: 0, # 'D'
|
729 |
-
29: 2, # 'E'
|
730 |
-
52: 0, # 'F'
|
731 |
-
36: 1, # 'G'
|
732 |
-
45: 1, # 'H'
|
733 |
-
53: 0, # 'I'
|
734 |
-
60: 0, # 'J'
|
735 |
-
16: 0, # 'K'
|
736 |
-
49: 0, # 'L'
|
737 |
-
20: 1, # 'M'
|
738 |
-
46: 0, # 'N'
|
739 |
-
42: 2, # 'O'
|
740 |
-
48: 0, # 'P'
|
741 |
-
44: 0, # 'R'
|
742 |
-
35: 0, # 'S'
|
743 |
-
31: 0, # 'T'
|
744 |
-
51: 0, # 'U'
|
745 |
-
38: 0, # 'V'
|
746 |
-
62: 0, # 'W'
|
747 |
-
43: 1, # 'Y'
|
748 |
-
56: 0, # 'Z'
|
749 |
-
1: 0, # 'a'
|
750 |
-
21: 3, # 'b'
|
751 |
-
28: 0, # 'c'
|
752 |
-
12: 2, # 'd'
|
753 |
-
2: 0, # 'e'
|
754 |
-
18: 0, # 'f'
|
755 |
-
27: 0, # 'g'
|
756 |
-
25: 0, # 'h'
|
757 |
-
3: 2, # 'i'
|
758 |
-
24: 0, # 'j'
|
759 |
-
10: 1, # 'k'
|
760 |
-
5: 0, # 'l'
|
761 |
-
13: 0, # 'm'
|
762 |
-
4: 2, # 'n'
|
763 |
-
15: 1, # 'o'
|
764 |
-
26: 1, # 'p'
|
765 |
-
7: 1, # 'r'
|
766 |
-
8: 1, # 's'
|
767 |
-
9: 1, # 't'
|
768 |
-
14: 0, # 'u'
|
769 |
-
32: 0, # 'v'
|
770 |
-
57: 0, # 'w'
|
771 |
-
58: 0, # 'x'
|
772 |
-
11: 2, # 'y'
|
773 |
-
22: 0, # 'z'
|
774 |
-
63: 0, # '·'
|
775 |
-
54: 0, # 'Ç'
|
776 |
-
50: 0, # 'Ö'
|
777 |
-
55: 2, # 'Ü'
|
778 |
-
59: 0, # 'â'
|
779 |
-
33: 0, # 'ç'
|
780 |
-
61: 0, # 'î'
|
781 |
-
34: 1, # 'ö'
|
782 |
-
17: 1, # 'ü'
|
783 |
-
30: 1, # 'ğ'
|
784 |
-
41: 0, # 'İ'
|
785 |
-
6: 2, # 'ı'
|
786 |
-
40: 0, # 'Ş'
|
787 |
-
19: 0, # 'ş'
|
788 |
-
},
|
789 |
-
20: { # 'M'
|
790 |
-
23: 1, # 'A'
|
791 |
-
37: 0, # 'B'
|
792 |
-
47: 0, # 'C'
|
793 |
-
39: 0, # 'D'
|
794 |
-
29: 0, # 'E'
|
795 |
-
52: 0, # 'F'
|
796 |
-
36: 0, # 'G'
|
797 |
-
45: 0, # 'H'
|
798 |
-
53: 0, # 'I'
|
799 |
-
60: 1, # 'J'
|
800 |
-
16: 3, # 'K'
|
801 |
-
49: 0, # 'L'
|
802 |
-
20: 2, # 'M'
|
803 |
-
46: 0, # 'N'
|
804 |
-
42: 0, # 'O'
|
805 |
-
48: 0, # 'P'
|
806 |
-
44: 0, # 'R'
|
807 |
-
35: 0, # 'S'
|
808 |
-
31: 1, # 'T'
|
809 |
-
51: 0, # 'U'
|
810 |
-
38: 0, # 'V'
|
811 |
-
62: 0, # 'W'
|
812 |
-
43: 0, # 'Y'
|
813 |
-
56: 0, # 'Z'
|
814 |
-
1: 3, # 'a'
|
815 |
-
21: 2, # 'b'
|
816 |
-
28: 0, # 'c'
|
817 |
-
12: 3, # 'd'
|
818 |
-
2: 3, # 'e'
|
819 |
-
18: 0, # 'f'
|
820 |
-
27: 1, # 'g'
|
821 |
-
25: 1, # 'h'
|
822 |
-
3: 2, # 'i'
|
823 |
-
24: 2, # 'j'
|
824 |
-
10: 2, # 'k'
|
825 |
-
5: 2, # 'l'
|
826 |
-
13: 3, # 'm'
|
827 |
-
4: 3, # 'n'
|
828 |
-
15: 0, # 'o'
|
829 |
-
26: 1, # 'p'
|
830 |
-
7: 3, # 'r'
|
831 |
-
8: 0, # 's'
|
832 |
-
9: 2, # 't'
|
833 |
-
14: 3, # 'u'
|
834 |
-
32: 0, # 'v'
|
835 |
-
57: 0, # 'w'
|
836 |
-
58: 0, # 'x'
|
837 |
-
11: 2, # 'y'
|
838 |
-
22: 0, # 'z'
|
839 |
-
63: 0, # '·'
|
840 |
-
54: 0, # 'Ç'
|
841 |
-
50: 0, # 'Ö'
|
842 |
-
55: 0, # 'Ü'
|
843 |
-
59: 0, # 'â'
|
844 |
-
33: 3, # 'ç'
|
845 |
-
61: 0, # 'î'
|
846 |
-
34: 0, # 'ö'
|
847 |
-
17: 0, # 'ü'
|
848 |
-
30: 0, # 'ğ'
|
849 |
-
41: 0, # 'İ'
|
850 |
-
6: 3, # 'ı'
|
851 |
-
40: 0, # 'Ş'
|
852 |
-
19: 0, # 'ş'
|
853 |
-
},
|
854 |
-
46: { # 'N'
|
855 |
-
23: 0, # 'A'
|
856 |
-
37: 1, # 'B'
|
857 |
-
47: 0, # 'C'
|
858 |
-
39: 0, # 'D'
|
859 |
-
29: 0, # 'E'
|
860 |
-
52: 1, # 'F'
|
861 |
-
36: 1, # 'G'
|
862 |
-
45: 1, # 'H'
|
863 |
-
53: 0, # 'I'
|
864 |
-
60: 0, # 'J'
|
865 |
-
16: 2, # 'K'
|
866 |
-
49: 0, # 'L'
|
867 |
-
20: 0, # 'M'
|
868 |
-
46: 1, # 'N'
|
869 |
-
42: 0, # 'O'
|
870 |
-
48: 0, # 'P'
|
871 |
-
44: 1, # 'R'
|
872 |
-
35: 1, # 'S'
|
873 |
-
31: 0, # 'T'
|
874 |
-
51: 1, # 'U'
|
875 |
-
38: 2, # 'V'
|
876 |
-
62: 0, # 'W'
|
877 |
-
43: 1, # 'Y'
|
878 |
-
56: 0, # 'Z'
|
879 |
-
1: 3, # 'a'
|
880 |
-
21: 0, # 'b'
|
881 |
-
28: 2, # 'c'
|
882 |
-
12: 0, # 'd'
|
883 |
-
2: 3, # 'e'
|
884 |
-
18: 0, # 'f'
|
885 |
-
27: 1, # 'g'
|
886 |
-
25: 0, # 'h'
|
887 |
-
3: 0, # 'i'
|
888 |
-
24: 2, # 'j'
|
889 |
-
10: 1, # 'k'
|
890 |
-
5: 1, # 'l'
|
891 |
-
13: 3, # 'm'
|
892 |
-
4: 2, # 'n'
|
893 |
-
15: 1, # 'o'
|
894 |
-
26: 1, # 'p'
|
895 |
-
7: 1, # 'r'
|
896 |
-
8: 0, # 's'
|
897 |
-
9: 0, # 't'
|
898 |
-
14: 3, # 'u'
|
899 |
-
32: 0, # 'v'
|
900 |
-
57: 0, # 'w'
|
901 |
-
58: 1, # 'x'
|
902 |
-
11: 1, # 'y'
|
903 |
-
22: 2, # 'z'
|
904 |
-
63: 0, # '·'
|
905 |
-
54: 1, # 'Ç'
|
906 |
-
50: 1, # 'Ö'
|
907 |
-
55: 0, # 'Ü'
|
908 |
-
59: 0, # 'â'
|
909 |
-
33: 0, # 'ç'
|
910 |
-
61: 0, # 'î'
|
911 |
-
34: 1, # 'ö'
|
912 |
-
17: 0, # 'ü'
|
913 |
-
30: 0, # 'ğ'
|
914 |
-
41: 1, # 'İ'
|
915 |
-
6: 2, # 'ı'
|
916 |
-
40: 1, # 'Ş'
|
917 |
-
19: 1, # 'ş'
|
918 |
-
},
|
919 |
-
42: { # 'O'
|
920 |
-
23: 0, # 'A'
|
921 |
-
37: 0, # 'B'
|
922 |
-
47: 0, # 'C'
|
923 |
-
39: 0, # 'D'
|
924 |
-
29: 0, # 'E'
|
925 |
-
52: 1, # 'F'
|
926 |
-
36: 0, # 'G'
|
927 |
-
45: 1, # 'H'
|
928 |
-
53: 0, # 'I'
|
929 |
-
60: 0, # 'J'
|
930 |
-
16: 2, # 'K'
|
931 |
-
49: 1, # 'L'
|
932 |
-
20: 0, # 'M'
|
933 |
-
46: 0, # 'N'
|
934 |
-
42: 0, # 'O'
|
935 |
-
48: 2, # 'P'
|
936 |
-
44: 1, # 'R'
|
937 |
-
35: 1, # 'S'
|
938 |
-
31: 0, # 'T'
|
939 |
-
51: 1, # 'U'
|
940 |
-
38: 1, # 'V'
|
941 |
-
62: 0, # 'W'
|
942 |
-
43: 0, # 'Y'
|
943 |
-
56: 0, # 'Z'
|
944 |
-
1: 3, # 'a'
|
945 |
-
21: 0, # 'b'
|
946 |
-
28: 2, # 'c'
|
947 |
-
12: 0, # 'd'
|
948 |
-
2: 2, # 'e'
|
949 |
-
18: 0, # 'f'
|
950 |
-
27: 0, # 'g'
|
951 |
-
25: 0, # 'h'
|
952 |
-
3: 0, # 'i'
|
953 |
-
24: 0, # 'j'
|
954 |
-
10: 0, # 'k'
|
955 |
-
5: 3, # 'l'
|
956 |
-
13: 3, # 'm'
|
957 |
-
4: 0, # 'n'
|
958 |
-
15: 1, # 'o'
|
959 |
-
26: 0, # 'p'
|
960 |
-
7: 0, # 'r'
|
961 |
-
8: 0, # 's'
|
962 |
-
9: 0, # 't'
|
963 |
-
14: 2, # 'u'
|
964 |
-
32: 0, # 'v'
|
965 |
-
57: 0, # 'w'
|
966 |
-
58: 0, # 'x'
|
967 |
-
11: 0, # 'y'
|
968 |
-
22: 2, # 'z'
|
969 |
-
63: 0, # '·'
|
970 |
-
54: 2, # 'Ç'
|
971 |
-
50: 1, # 'Ö'
|
972 |
-
55: 0, # 'Ü'
|
973 |
-
59: 0, # 'â'
|
974 |
-
33: 2, # 'ç'
|
975 |
-
61: 0, # 'î'
|
976 |
-
34: 1, # 'ö'
|
977 |
-
17: 0, # 'ü'
|
978 |
-
30: 1, # 'ğ'
|
979 |
-
41: 2, # 'İ'
|
980 |
-
6: 1, # 'ı'
|
981 |
-
40: 1, # 'Ş'
|
982 |
-
19: 1, # 'ş'
|
983 |
-
},
|
984 |
-
48: { # 'P'
|
985 |
-
23: 0, # 'A'
|
986 |
-
37: 0, # 'B'
|
987 |
-
47: 2, # 'C'
|
988 |
-
39: 0, # 'D'
|
989 |
-
29: 0, # 'E'
|
990 |
-
52: 2, # 'F'
|
991 |
-
36: 1, # 'G'
|
992 |
-
45: 1, # 'H'
|
993 |
-
53: 0, # 'I'
|
994 |
-
60: 0, # 'J'
|
995 |
-
16: 2, # 'K'
|
996 |
-
49: 0, # 'L'
|
997 |
-
20: 0, # 'M'
|
998 |
-
46: 1, # 'N'
|
999 |
-
42: 1, # 'O'
|
1000 |
-
48: 1, # 'P'
|
1001 |
-
44: 0, # 'R'
|
1002 |
-
35: 1, # 'S'
|
1003 |
-
31: 0, # 'T'
|
1004 |
-
51: 0, # 'U'
|
1005 |
-
38: 1, # 'V'
|
1006 |
-
62: 0, # 'W'
|
1007 |
-
43: 0, # 'Y'
|
1008 |
-
56: 0, # 'Z'
|
1009 |
-
1: 2, # 'a'
|
1010 |
-
21: 0, # 'b'
|
1011 |
-
28: 2, # 'c'
|
1012 |
-
12: 0, # 'd'
|
1013 |
-
2: 3, # 'e'
|
1014 |
-
18: 0, # 'f'
|
1015 |
-
27: 0, # 'g'
|
1016 |
-
25: 0, # 'h'
|
1017 |
-
3: 0, # 'i'
|
1018 |
-
24: 0, # 'j'
|
1019 |
-
10: 1, # 'k'
|
1020 |
-
5: 0, # 'l'
|
1021 |
-
13: 2, # 'm'
|
1022 |
-
4: 0, # 'n'
|
1023 |
-
15: 2, # 'o'
|
1024 |
-
26: 0, # 'p'
|
1025 |
-
7: 0, # 'r'
|
1026 |
-
8: 0, # 's'
|
1027 |
-
9: 0, # 't'
|
1028 |
-
14: 2, # 'u'
|
1029 |
-
32: 0, # 'v'
|
1030 |
-
57: 0, # 'w'
|
1031 |
-
58: 2, # 'x'
|
1032 |
-
11: 0, # 'y'
|
1033 |
-
22: 2, # 'z'
|
1034 |
-
63: 0, # '·'
|
1035 |
-
54: 1, # 'Ç'
|
1036 |
-
50: 2, # 'Ö'
|
1037 |
-
55: 0, # 'Ü'
|
1038 |
-
59: 0, # 'â'
|
1039 |
-
33: 0, # 'ç'
|
1040 |
-
61: 0, # 'î'
|
1041 |
-
34: 2, # 'ö'
|
1042 |
-
17: 0, # 'ü'
|
1043 |
-
30: 1, # 'ğ'
|
1044 |
-
41: 1, # 'İ'
|
1045 |
-
6: 0, # 'ı'
|
1046 |
-
40: 2, # 'Ş'
|
1047 |
-
19: 1, # 'ş'
|
1048 |
-
},
|
1049 |
-
44: { # 'R'
|
1050 |
-
23: 0, # 'A'
|
1051 |
-
37: 0, # 'B'
|
1052 |
-
47: 1, # 'C'
|
1053 |
-
39: 0, # 'D'
|
1054 |
-
29: 0, # 'E'
|
1055 |
-
52: 1, # 'F'
|
1056 |
-
36: 0, # 'G'
|
1057 |
-
45: 0, # 'H'
|
1058 |
-
53: 0, # 'I'
|
1059 |
-
60: 0, # 'J'
|
1060 |
-
16: 3, # 'K'
|
1061 |
-
49: 0, # 'L'
|
1062 |
-
20: 0, # 'M'
|
1063 |
-
46: 0, # 'N'
|
1064 |
-
42: 0, # 'O'
|
1065 |
-
48: 1, # 'P'
|
1066 |
-
44: 0, # 'R'
|
1067 |
-
35: 0, # 'S'
|
1068 |
-
31: 0, # 'T'
|
1069 |
-
51: 0, # 'U'
|
1070 |
-
38: 0, # 'V'
|
1071 |
-
62: 0, # 'W'
|
1072 |
-
43: 1, # 'Y'
|
1073 |
-
56: 0, # 'Z'
|
1074 |
-
1: 3, # 'a'
|
1075 |
-
21: 1, # 'b'
|
1076 |
-
28: 1, # 'c'
|
1077 |
-
12: 0, # 'd'
|
1078 |
-
2: 2, # 'e'
|
1079 |
-
18: 0, # 'f'
|
1080 |
-
27: 0, # 'g'
|
1081 |
-
25: 0, # 'h'
|
1082 |
-
3: 0, # 'i'
|
1083 |
-
24: 0, # 'j'
|
1084 |
-
10: 1, # 'k'
|
1085 |
-
5: 2, # 'l'
|
1086 |
-
13: 2, # 'm'
|
1087 |
-
4: 0, # 'n'
|
1088 |
-
15: 1, # 'o'
|
1089 |
-
26: 0, # 'p'
|
1090 |
-
7: 0, # 'r'
|
1091 |
-
8: 0, # 's'
|
1092 |
-
9: 0, # 't'
|
1093 |
-
14: 2, # 'u'
|
1094 |
-
32: 0, # 'v'
|
1095 |
-
57: 0, # 'w'
|
1096 |
-
58: 0, # 'x'
|
1097 |
-
11: 1, # 'y'
|
1098 |
-
22: 2, # 'z'
|
1099 |
-
63: 0, # '·'
|
1100 |
-
54: 0, # 'Ç'
|
1101 |
-
50: 1, # 'Ö'
|
1102 |
-
55: 0, # 'Ü'
|
1103 |
-
59: 0, # 'â'
|
1104 |
-
33: 1, # 'ç'
|
1105 |
-
61: 0, # 'î'
|
1106 |
-
34: 1, # 'ö'
|
1107 |
-
17: 1, # 'ü'
|
1108 |
-
30: 1, # 'ğ'
|
1109 |
-
41: 0, # 'İ'
|
1110 |
-
6: 2, # 'ı'
|
1111 |
-
40: 1, # 'Ş'
|
1112 |
-
19: 1, # 'ş'
|
1113 |
-
},
|
1114 |
-
35: { # 'S'
|
1115 |
-
23: 0, # 'A'
|
1116 |
-
37: 0, # 'B'
|
1117 |
-
47: 1, # 'C'
|
1118 |
-
39: 0, # 'D'
|
1119 |
-
29: 0, # 'E'
|
1120 |
-
52: 1, # 'F'
|
1121 |
-
36: 1, # 'G'
|
1122 |
-
45: 1, # 'H'
|
1123 |
-
53: 0, # 'I'
|
1124 |
-
60: 0, # 'J'
|
1125 |
-
16: 3, # 'K'
|
1126 |
-
49: 1, # 'L'
|
1127 |
-
20: 1, # 'M'
|
1128 |
-
46: 0, # 'N'
|
1129 |
-
42: 0, # 'O'
|
1130 |
-
48: 1, # 'P'
|
1131 |
-
44: 0, # 'R'
|
1132 |
-
35: 0, # 'S'
|
1133 |
-
31: 0, # 'T'
|
1134 |
-
51: 1, # 'U'
|
1135 |
-
38: 1, # 'V'
|
1136 |
-
62: 0, # 'W'
|
1137 |
-
43: 1, # 'Y'
|
1138 |
-
56: 0, # 'Z'
|
1139 |
-
1: 3, # 'a'
|
1140 |
-
21: 0, # 'b'
|
1141 |
-
28: 2, # 'c'
|
1142 |
-
12: 0, # 'd'
|
1143 |
-
2: 3, # 'e'
|
1144 |
-
18: 0, # 'f'
|
1145 |
-
27: 0, # 'g'
|
1146 |
-
25: 0, # 'h'
|
1147 |
-
3: 0, # 'i'
|
1148 |
-
24: 0, # 'j'
|
1149 |
-
10: 1, # 'k'
|
1150 |
-
5: 1, # 'l'
|
1151 |
-
13: 2, # 'm'
|
1152 |
-
4: 1, # 'n'
|
1153 |
-
15: 0, # 'o'
|
1154 |
-
26: 0, # 'p'
|
1155 |
-
7: 0, # 'r'
|
1156 |
-
8: 0, # 's'
|
1157 |
-
9: 1, # 't'
|
1158 |
-
14: 2, # 'u'
|
1159 |
-
32: 0, # 'v'
|
1160 |
-
57: 0, # 'w'
|
1161 |
-
58: 0, # 'x'
|
1162 |
-
11: 0, # 'y'
|
1163 |
-
22: 1, # 'z'
|
1164 |
-
63: 0, # '·'
|
1165 |
-
54: 2, # 'Ç'
|
1166 |
-
50: 2, # 'Ö'
|
1167 |
-
55: 0, # 'Ü'
|
1168 |
-
59: 0, # 'â'
|
1169 |
-
33: 3, # 'ç'
|
1170 |
-
61: 0, # 'î'
|
1171 |
-
34: 1, # 'ö'
|
1172 |
-
17: 0, # 'ü'
|
1173 |
-
30: 0, # 'ğ'
|
1174 |
-
41: 0, # 'İ'
|
1175 |
-
6: 3, # 'ı'
|
1176 |
-
40: 2, # 'Ş'
|
1177 |
-
19: 1, # 'ş'
|
1178 |
-
},
|
1179 |
-
31: { # 'T'
|
1180 |
-
23: 0, # 'A'
|
1181 |
-
37: 0, # 'B'
|
1182 |
-
47: 0, # 'C'
|
1183 |
-
39: 0, # 'D'
|
1184 |
-
29: 0, # 'E'
|
1185 |
-
52: 0, # 'F'
|
1186 |
-
36: 0, # 'G'
|
1187 |
-
45: 0, # 'H'
|
1188 |
-
53: 0, # 'I'
|
1189 |
-
60: 1, # 'J'
|
1190 |
-
16: 2, # 'K'
|
1191 |
-
49: 0, # 'L'
|
1192 |
-
20: 1, # 'M'
|
1193 |
-
46: 0, # 'N'
|
1194 |
-
42: 0, # 'O'
|
1195 |
-
48: 0, # 'P'
|
1196 |
-
44: 0, # 'R'
|
1197 |
-
35: 0, # 'S'
|
1198 |
-
31: 2, # 'T'
|
1199 |
-
51: 0, # 'U'
|
1200 |
-
38: 0, # 'V'
|
1201 |
-
62: 0, # 'W'
|
1202 |
-
43: 0, # 'Y'
|
1203 |
-
56: 0, # 'Z'
|
1204 |
-
1: 3, # 'a'
|
1205 |
-
21: 2, # 'b'
|
1206 |
-
28: 0, # 'c'
|
1207 |
-
12: 1, # 'd'
|
1208 |
-
2: 3, # 'e'
|
1209 |
-
18: 2, # 'f'
|
1210 |
-
27: 2, # 'g'
|
1211 |
-
25: 0, # 'h'
|
1212 |
-
3: 1, # 'i'
|
1213 |
-
24: 1, # 'j'
|
1214 |
-
10: 2, # 'k'
|
1215 |
-
5: 2, # 'l'
|
1216 |
-
13: 3, # 'm'
|
1217 |
-
4: 3, # 'n'
|
1218 |
-
15: 0, # 'o'
|
1219 |
-
26: 2, # 'p'
|
1220 |
-
7: 2, # 'r'
|
1221 |
-
8: 0, # 's'
|
1222 |
-
9: 2, # 't'
|
1223 |
-
14: 2, # 'u'
|
1224 |
-
32: 1, # 'v'
|
1225 |
-
57: 1, # 'w'
|
1226 |
-
58: 1, # 'x'
|
1227 |
-
11: 2, # 'y'
|
1228 |
-
22: 0, # 'z'
|
1229 |
-
63: 0, # '·'
|
1230 |
-
54: 0, # 'Ç'
|
1231 |
-
50: 0, # 'Ö'
|
1232 |
-
55: 0, # 'Ü'
|
1233 |
-
59: 0, # 'â'
|
1234 |
-
33: 0, # 'ç'
|
1235 |
-
61: 0, # 'î'
|
1236 |
-
34: 0, # 'ö'
|
1237 |
-
17: 1, # 'ü'
|
1238 |
-
30: 0, # 'ğ'
|
1239 |
-
41: 0, # 'İ'
|
1240 |
-
6: 3, # 'ı'
|
1241 |
-
40: 0, # 'Ş'
|
1242 |
-
19: 0, # 'ş'
|
1243 |
-
},
|
1244 |
-
51: { # 'U'
|
1245 |
-
23: 0, # 'A'
|
1246 |
-
37: 0, # 'B'
|
1247 |
-
47: 0, # 'C'
|
1248 |
-
39: 0, # 'D'
|
1249 |
-
29: 0, # 'E'
|
1250 |
-
52: 1, # 'F'
|
1251 |
-
36: 1, # 'G'
|
1252 |
-
45: 0, # 'H'
|
1253 |
-
53: 0, # 'I'
|
1254 |
-
60: 0, # 'J'
|
1255 |
-
16: 1, # 'K'
|
1256 |
-
49: 0, # 'L'
|
1257 |
-
20: 0, # 'M'
|
1258 |
-
46: 1, # 'N'
|
1259 |
-
42: 0, # 'O'
|
1260 |
-
48: 1, # 'P'
|
1261 |
-
44: 0, # 'R'
|
1262 |
-
35: 0, # 'S'
|
1263 |
-
31: 0, # 'T'
|
1264 |
-
51: 1, # 'U'
|
1265 |
-
38: 1, # 'V'
|
1266 |
-
62: 0, # 'W'
|
1267 |
-
43: 0, # 'Y'
|
1268 |
-
56: 0, # 'Z'
|
1269 |
-
1: 3, # 'a'
|
1270 |
-
21: 0, # 'b'
|
1271 |
-
28: 1, # 'c'
|
1272 |
-
12: 0, # 'd'
|
1273 |
-
2: 3, # 'e'
|
1274 |
-
18: 0, # 'f'
|
1275 |
-
27: 2, # 'g'
|
1276 |
-
25: 0, # 'h'
|
1277 |
-
3: 0, # 'i'
|
1278 |
-
24: 0, # 'j'
|
1279 |
-
10: 1, # 'k'
|
1280 |
-
5: 1, # 'l'
|
1281 |
-
13: 3, # 'm'
|
1282 |
-
4: 2, # 'n'
|
1283 |
-
15: 0, # 'o'
|
1284 |
-
26: 1, # 'p'
|
1285 |
-
7: 0, # 'r'
|
1286 |
-
8: 0, # 's'
|
1287 |
-
9: 0, # 't'
|
1288 |
-
14: 2, # 'u'
|
1289 |
-
32: 0, # 'v'
|
1290 |
-
57: 0, # 'w'
|
1291 |
-
58: 0, # 'x'
|
1292 |
-
11: 0, # 'y'
|
1293 |
-
22: 2, # 'z'
|
1294 |
-
63: 0, # '·'
|
1295 |
-
54: 1, # 'Ç'
|
1296 |
-
50: 1, # 'Ö'
|
1297 |
-
55: 0, # 'Ü'
|
1298 |
-
59: 0, # 'â'
|
1299 |
-
33: 0, # 'ç'
|
1300 |
-
61: 0, # 'î'
|
1301 |
-
34: 0, # 'ö'
|
1302 |
-
17: 0, # 'ü'
|
1303 |
-
30: 1, # 'ğ'
|
1304 |
-
41: 1, # 'İ'
|
1305 |
-
6: 2, # 'ı'
|
1306 |
-
40: 0, # 'Ş'
|
1307 |
-
19: 1, # 'ş'
|
1308 |
-
},
|
1309 |
-
38: { # 'V'
|
1310 |
-
23: 1, # 'A'
|
1311 |
-
37: 1, # 'B'
|
1312 |
-
47: 1, # 'C'
|
1313 |
-
39: 0, # 'D'
|
1314 |
-
29: 0, # 'E'
|
1315 |
-
52: 2, # 'F'
|
1316 |
-
36: 0, # 'G'
|
1317 |
-
45: 0, # 'H'
|
1318 |
-
53: 0, # 'I'
|
1319 |
-
60: 0, # 'J'
|
1320 |
-
16: 3, # 'K'
|
1321 |
-
49: 0, # 'L'
|
1322 |
-
20: 3, # 'M'
|
1323 |
-
46: 0, # 'N'
|
1324 |
-
42: 0, # 'O'
|
1325 |
-
48: 1, # 'P'
|
1326 |
-
44: 1, # 'R'
|
1327 |
-
35: 0, # 'S'
|
1328 |
-
31: 0, # 'T'
|
1329 |
-
51: 1, # 'U'
|
1330 |
-
38: 1, # 'V'
|
1331 |
-
62: 0, # 'W'
|
1332 |
-
43: 0, # 'Y'
|
1333 |
-
56: 0, # 'Z'
|
1334 |
-
1: 3, # 'a'
|
1335 |
-
21: 0, # 'b'
|
1336 |
-
28: 2, # 'c'
|
1337 |
-
12: 0, # 'd'
|
1338 |
-
2: 3, # 'e'
|
1339 |
-
18: 0, # 'f'
|
1340 |
-
27: 0, # 'g'
|
1341 |
-
25: 0, # 'h'
|
1342 |
-
3: 0, # 'i'
|
1343 |
-
24: 0, # 'j'
|
1344 |
-
10: 0, # 'k'
|
1345 |
-
5: 2, # 'l'
|
1346 |
-
13: 2, # 'm'
|
1347 |
-
4: 0, # 'n'
|
1348 |
-
15: 2, # 'o'
|
1349 |
-
26: 0, # 'p'
|
1350 |
-
7: 0, # 'r'
|
1351 |
-
8: 0, # 's'
|
1352 |
-
9: 1, # 't'
|
1353 |
-
14: 3, # 'u'
|
1354 |
-
32: 0, # 'v'
|
1355 |
-
57: 0, # 'w'
|
1356 |
-
58: 0, # 'x'
|
1357 |
-
11: 1, # 'y'
|
1358 |
-
22: 2, # 'z'
|
1359 |
-
63: 0, # '·'
|
1360 |
-
54: 1, # 'Ç'
|
1361 |
-
50: 1, # 'Ö'
|
1362 |
-
55: 0, # 'Ü'
|
1363 |
-
59: 1, # 'â'
|
1364 |
-
33: 2, # 'ç'
|
1365 |
-
61: 0, # 'î'
|
1366 |
-
34: 1, # 'ö'
|
1367 |
-
17: 0, # 'ü'
|
1368 |
-
30: 1, # 'ğ'
|
1369 |
-
41: 1, # 'İ'
|
1370 |
-
6: 3, # 'ı'
|
1371 |
-
40: 2, # 'Ş'
|
1372 |
-
19: 1, # 'ş'
|
1373 |
-
},
|
1374 |
-
62: { # 'W'
|
1375 |
-
23: 0, # 'A'
|
1376 |
-
37: 0, # 'B'
|
1377 |
-
47: 0, # 'C'
|
1378 |
-
39: 0, # 'D'
|
1379 |
-
29: 0, # 'E'
|
1380 |
-
52: 0, # 'F'
|
1381 |
-
36: 0, # 'G'
|
1382 |
-
45: 0, # 'H'
|
1383 |
-
53: 0, # 'I'
|
1384 |
-
60: 0, # 'J'
|
1385 |
-
16: 0, # 'K'
|
1386 |
-
49: 0, # 'L'
|
1387 |
-
20: 0, # 'M'
|
1388 |
-
46: 0, # 'N'
|
1389 |
-
42: 0, # 'O'
|
1390 |
-
48: 0, # 'P'
|
1391 |
-
44: 0, # 'R'
|
1392 |
-
35: 0, # 'S'
|
1393 |
-
31: 0, # 'T'
|
1394 |
-
51: 0, # 'U'
|
1395 |
-
38: 0, # 'V'
|
1396 |
-
62: 0, # 'W'
|
1397 |
-
43: 0, # 'Y'
|
1398 |
-
56: 0, # 'Z'
|
1399 |
-
1: 0, # 'a'
|
1400 |
-
21: 0, # 'b'
|
1401 |
-
28: 0, # 'c'
|
1402 |
-
12: 0, # 'd'
|
1403 |
-
2: 0, # 'e'
|
1404 |
-
18: 0, # 'f'
|
1405 |
-
27: 0, # 'g'
|
1406 |
-
25: 0, # 'h'
|
1407 |
-
3: 0, # 'i'
|
1408 |
-
24: 0, # 'j'
|
1409 |
-
10: 0, # 'k'
|
1410 |
-
5: 0, # 'l'
|
1411 |
-
13: 0, # 'm'
|
1412 |
-
4: 0, # 'n'
|
1413 |
-
15: 0, # 'o'
|
1414 |
-
26: 0, # 'p'
|
1415 |
-
7: 0, # 'r'
|
1416 |
-
8: 0, # 's'
|
1417 |
-
9: 0, # 't'
|
1418 |
-
14: 0, # 'u'
|
1419 |
-
32: 0, # 'v'
|
1420 |
-
57: 0, # 'w'
|
1421 |
-
58: 0, # 'x'
|
1422 |
-
11: 0, # 'y'
|
1423 |
-
22: 0, # 'z'
|
1424 |
-
63: 0, # '·'
|
1425 |
-
54: 0, # 'Ç'
|
1426 |
-
50: 0, # 'Ö'
|
1427 |
-
55: 0, # 'Ü'
|
1428 |
-
59: 0, # 'â'
|
1429 |
-
33: 0, # 'ç'
|
1430 |
-
61: 0, # 'î'
|
1431 |
-
34: 0, # 'ö'
|
1432 |
-
17: 0, # 'ü'
|
1433 |
-
30: 0, # 'ğ'
|
1434 |
-
41: 0, # 'İ'
|
1435 |
-
6: 0, # 'ı'
|
1436 |
-
40: 0, # 'Ş'
|
1437 |
-
19: 0, # 'ş'
|
1438 |
-
},
|
1439 |
-
43: { # 'Y'
|
1440 |
-
23: 0, # 'A'
|
1441 |
-
37: 0, # 'B'
|
1442 |
-
47: 1, # 'C'
|
1443 |
-
39: 0, # 'D'
|
1444 |
-
29: 0, # 'E'
|
1445 |
-
52: 2, # 'F'
|
1446 |
-
36: 0, # 'G'
|
1447 |
-
45: 1, # 'H'
|
1448 |
-
53: 1, # 'I'
|
1449 |
-
60: 0, # 'J'
|
1450 |
-
16: 2, # 'K'
|
1451 |
-
49: 0, # 'L'
|
1452 |
-
20: 0, # 'M'
|
1453 |
-
46: 2, # 'N'
|
1454 |
-
42: 0, # 'O'
|
1455 |
-
48: 2, # 'P'
|
1456 |
-
44: 1, # 'R'
|
1457 |
-
35: 1, # 'S'
|
1458 |
-
31: 0, # 'T'
|
1459 |
-
51: 1, # 'U'
|
1460 |
-
38: 2, # 'V'
|
1461 |
-
62: 0, # 'W'
|
1462 |
-
43: 0, # 'Y'
|
1463 |
-
56: 0, # 'Z'
|
1464 |
-
1: 3, # 'a'
|
1465 |
-
21: 0, # 'b'
|
1466 |
-
28: 2, # 'c'
|
1467 |
-
12: 0, # 'd'
|
1468 |
-
2: 2, # 'e'
|
1469 |
-
18: 0, # 'f'
|
1470 |
-
27: 0, # 'g'
|
1471 |
-
25: 0, # 'h'
|
1472 |
-
3: 0, # 'i'
|
1473 |
-
24: 1, # 'j'
|
1474 |
-
10: 1, # 'k'
|
1475 |
-
5: 1, # 'l'
|
1476 |
-
13: 3, # 'm'
|
1477 |
-
4: 0, # 'n'
|
1478 |
-
15: 2, # 'o'
|
1479 |
-
26: 0, # 'p'
|
1480 |
-
7: 0, # 'r'
|
1481 |
-
8: 0, # 's'
|
1482 |
-
9: 0, # 't'
|
1483 |
-
14: 3, # 'u'
|
1484 |
-
32: 0, # 'v'
|
1485 |
-
57: 0, # 'w'
|
1486 |
-
58: 1, # 'x'
|
1487 |
-
11: 0, # 'y'
|
1488 |
-
22: 2, # 'z'
|
1489 |
-
63: 0, # '·'
|
1490 |
-
54: 1, # 'Ç'
|
1491 |
-
50: 2, # 'Ö'
|
1492 |
-
55: 1, # 'Ü'
|
1493 |
-
59: 1, # 'â'
|
1494 |
-
33: 0, # 'ç'
|
1495 |
-
61: 0, # 'î'
|
1496 |
-
34: 1, # 'ö'
|
1497 |
-
17: 0, # 'ü'
|
1498 |
-
30: 1, # 'ğ'
|
1499 |
-
41: 1, # 'İ'
|
1500 |
-
6: 0, # 'ı'
|
1501 |
-
40: 2, # 'Ş'
|
1502 |
-
19: 1, # 'ş'
|
1503 |
-
},
|
1504 |
-
56: { # 'Z'
|
1505 |
-
23: 0, # 'A'
|
1506 |
-
37: 0, # 'B'
|
1507 |
-
47: 0, # 'C'
|
1508 |
-
39: 0, # 'D'
|
1509 |
-
29: 0, # 'E'
|
1510 |
-
52: 0, # 'F'
|
1511 |
-
36: 0, # 'G'
|
1512 |
-
45: 0, # 'H'
|
1513 |
-
53: 0, # 'I'
|
1514 |
-
60: 0, # 'J'
|
1515 |
-
16: 0, # 'K'
|
1516 |
-
49: 0, # 'L'
|
1517 |
-
20: 0, # 'M'
|
1518 |
-
46: 0, # 'N'
|
1519 |
-
42: 0, # 'O'
|
1520 |
-
48: 0, # 'P'
|
1521 |
-
44: 0, # 'R'
|
1522 |
-
35: 0, # 'S'
|
1523 |
-
31: 0, # 'T'
|
1524 |
-
51: 0, # 'U'
|
1525 |
-
38: 0, # 'V'
|
1526 |
-
62: 0, # 'W'
|
1527 |
-
43: 0, # 'Y'
|
1528 |
-
56: 2, # 'Z'
|
1529 |
-
1: 2, # 'a'
|
1530 |
-
21: 1, # 'b'
|
1531 |
-
28: 0, # 'c'
|
1532 |
-
12: 0, # 'd'
|
1533 |
-
2: 2, # 'e'
|
1534 |
-
18: 0, # 'f'
|
1535 |
-
27: 0, # 'g'
|
1536 |
-
25: 0, # 'h'
|
1537 |
-
3: 2, # 'i'
|
1538 |
-
24: 1, # 'j'
|
1539 |
-
10: 0, # 'k'
|
1540 |
-
5: 0, # 'l'
|
1541 |
-
13: 1, # 'm'
|
1542 |
-
4: 1, # 'n'
|
1543 |
-
15: 0, # 'o'
|
1544 |
-
26: 0, # 'p'
|
1545 |
-
7: 1, # 'r'
|
1546 |
-
8: 1, # 's'
|
1547 |
-
9: 0, # 't'
|
1548 |
-
14: 2, # 'u'
|
1549 |
-
32: 0, # 'v'
|
1550 |
-
57: 0, # 'w'
|
1551 |
-
58: 0, # 'x'
|
1552 |
-
11: 0, # 'y'
|
1553 |
-
22: 0, # 'z'
|
1554 |
-
63: 0, # '·'
|
1555 |
-
54: 0, # 'Ç'
|
1556 |
-
50: 0, # 'Ö'
|
1557 |
-
55: 0, # 'Ü'
|
1558 |
-
59: 0, # 'â'
|
1559 |
-
33: 0, # 'ç'
|
1560 |
-
61: 0, # 'î'
|
1561 |
-
34: 0, # 'ö'
|
1562 |
-
17: 1, # 'ü'
|
1563 |
-
30: 0, # 'ğ'
|
1564 |
-
41: 0, # 'İ'
|
1565 |
-
6: 1, # 'ı'
|
1566 |
-
40: 0, # 'Ş'
|
1567 |
-
19: 0, # 'ş'
|
1568 |
-
},
|
1569 |
-
1: { # 'a'
|
1570 |
-
23: 3, # 'A'
|
1571 |
-
37: 0, # 'B'
|
1572 |
-
47: 1, # 'C'
|
1573 |
-
39: 0, # 'D'
|
1574 |
-
29: 3, # 'E'
|
1575 |
-
52: 0, # 'F'
|
1576 |
-
36: 1, # 'G'
|
1577 |
-
45: 1, # 'H'
|
1578 |
-
53: 0, # 'I'
|
1579 |
-
60: 0, # 'J'
|
1580 |
-
16: 0, # 'K'
|
1581 |
-
49: 0, # 'L'
|
1582 |
-
20: 3, # 'M'
|
1583 |
-
46: 1, # 'N'
|
1584 |
-
42: 0, # 'O'
|
1585 |
-
48: 1, # 'P'
|
1586 |
-
44: 0, # 'R'
|
1587 |
-
35: 0, # 'S'
|
1588 |
-
31: 3, # 'T'
|
1589 |
-
51: 0, # 'U'
|
1590 |
-
38: 1, # 'V'
|
1591 |
-
62: 0, # 'W'
|
1592 |
-
43: 0, # 'Y'
|
1593 |
-
56: 2, # 'Z'
|
1594 |
-
1: 2, # 'a'
|
1595 |
-
21: 3, # 'b'
|
1596 |
-
28: 0, # 'c'
|
1597 |
-
12: 3, # 'd'
|
1598 |
-
2: 2, # 'e'
|
1599 |
-
18: 3, # 'f'
|
1600 |
-
27: 3, # 'g'
|
1601 |
-
25: 3, # 'h'
|
1602 |
-
3: 3, # 'i'
|
1603 |
-
24: 3, # 'j'
|
1604 |
-
10: 3, # 'k'
|
1605 |
-
5: 0, # 'l'
|
1606 |
-
13: 2, # 'm'
|
1607 |
-
4: 3, # 'n'
|
1608 |
-
15: 1, # 'o'
|
1609 |
-
26: 3, # 'p'
|
1610 |
-
7: 3, # 'r'
|
1611 |
-
8: 3, # 's'
|
1612 |
-
9: 3, # 't'
|
1613 |
-
14: 3, # 'u'
|
1614 |
-
32: 3, # 'v'
|
1615 |
-
57: 2, # 'w'
|
1616 |
-
58: 0, # 'x'
|
1617 |
-
11: 3, # 'y'
|
1618 |
-
22: 0, # 'z'
|
1619 |
-
63: 1, # '·'
|
1620 |
-
54: 0, # 'Ç'
|
1621 |
-
50: 0, # 'Ö'
|
1622 |
-
55: 0, # 'Ü'
|
1623 |
-
59: 0, # 'â'
|
1624 |
-
33: 1, # 'ç'
|
1625 |
-
61: 1, # 'î'
|
1626 |
-
34: 1, # 'ö'
|
1627 |
-
17: 3, # 'ü'
|
1628 |
-
30: 0, # 'ğ'
|
1629 |
-
41: 0, # 'İ'
|
1630 |
-
6: 3, # 'ı'
|
1631 |
-
40: 0, # 'Ş'
|
1632 |
-
19: 1, # 'ş'
|
1633 |
-
},
|
1634 |
-
21: { # 'b'
|
1635 |
-
23: 0, # 'A'
|
1636 |
-
37: 0, # 'B'
|
1637 |
-
47: 0, # 'C'
|
1638 |
-
39: 0, # 'D'
|
1639 |
-
29: 0, # 'E'
|
1640 |
-
52: 0, # 'F'
|
1641 |
-
36: 1, # 'G'
|
1642 |
-
45: 0, # 'H'
|
1643 |
-
53: 0, # 'I'
|
1644 |
-
60: 1, # 'J'
|
1645 |
-
16: 2, # 'K'
|
1646 |
-
49: 0, # 'L'
|
1647 |
-
20: 2, # 'M'
|
1648 |
-
46: 0, # 'N'
|
1649 |
-
42: 0, # 'O'
|
1650 |
-
48: 0, # 'P'
|
1651 |
-
44: 0, # 'R'
|
1652 |
-
35: 0, # 'S'
|
1653 |
-
31: 1, # 'T'
|
1654 |
-
51: 0, # 'U'
|
1655 |
-
38: 0, # 'V'
|
1656 |
-
62: 0, # 'W'
|
1657 |
-
43: 1, # 'Y'
|
1658 |
-
56: 0, # 'Z'
|
1659 |
-
1: 3, # 'a'
|
1660 |
-
21: 2, # 'b'
|
1661 |
-
28: 0, # 'c'
|
1662 |
-
12: 3, # 'd'
|
1663 |
-
2: 3, # 'e'
|
1664 |
-
18: 0, # 'f'
|
1665 |
-
27: 3, # 'g'
|
1666 |
-
25: 1, # 'h'
|
1667 |
-
3: 3, # 'i'
|
1668 |
-
24: 2, # 'j'
|
1669 |
-
10: 3, # 'k'
|
1670 |
-
5: 3, # 'l'
|
1671 |
-
13: 3, # 'm'
|
1672 |
-
4: 3, # 'n'
|
1673 |
-
15: 0, # 'o'
|
1674 |
-
26: 3, # 'p'
|
1675 |
-
7: 1, # 'r'
|
1676 |
-
8: 2, # 's'
|
1677 |
-
9: 2, # 't'
|
1678 |
-
14: 2, # 'u'
|
1679 |
-
32: 1, # 'v'
|
1680 |
-
57: 0, # 'w'
|
1681 |
-
58: 1, # 'x'
|
1682 |
-
11: 3, # 'y'
|
1683 |
-
22: 0, # 'z'
|
1684 |
-
63: 0, # '·'
|
1685 |
-
54: 0, # 'Ç'
|
1686 |
-
50: 0, # 'Ö'
|
1687 |
-
55: 0, # 'Ü'
|
1688 |
-
59: 0, # 'â'
|
1689 |
-
33: 1, # 'ç'
|
1690 |
-
61: 0, # 'î'
|
1691 |
-
34: 0, # 'ö'
|
1692 |
-
17: 0, # 'ü'
|
1693 |
-
30: 1, # 'ğ'
|
1694 |
-
41: 0, # 'İ'
|
1695 |
-
6: 2, # 'ı'
|
1696 |
-
40: 0, # 'Ş'
|
1697 |
-
19: 0, # 'ş'
|
1698 |
-
},
|
1699 |
-
28: { # 'c'
|
1700 |
-
23: 0, # 'A'
|
1701 |
-
37: 1, # 'B'
|
1702 |
-
47: 1, # 'C'
|
1703 |
-
39: 1, # 'D'
|
1704 |
-
29: 2, # 'E'
|
1705 |
-
52: 0, # 'F'
|
1706 |
-
36: 2, # 'G'
|
1707 |
-
45: 2, # 'H'
|
1708 |
-
53: 1, # 'I'
|
1709 |
-
60: 0, # 'J'
|
1710 |
-
16: 0, # 'K'
|
1711 |
-
49: 0, # 'L'
|
1712 |
-
20: 2, # 'M'
|
1713 |
-
46: 1, # 'N'
|
1714 |
-
42: 1, # 'O'
|
1715 |
-
48: 2, # 'P'
|
1716 |
-
44: 1, # 'R'
|
1717 |
-
35: 1, # 'S'
|
1718 |
-
31: 2, # 'T'
|
1719 |
-
51: 2, # 'U'
|
1720 |
-
38: 2, # 'V'
|
1721 |
-
62: 0, # 'W'
|
1722 |
-
43: 3, # 'Y'
|
1723 |
-
56: 0, # 'Z'
|
1724 |
-
1: 1, # 'a'
|
1725 |
-
21: 1, # 'b'
|
1726 |
-
28: 2, # 'c'
|
1727 |
-
12: 2, # 'd'
|
1728 |
-
2: 1, # 'e'
|
1729 |
-
18: 1, # 'f'
|
1730 |
-
27: 2, # 'g'
|
1731 |
-
25: 2, # 'h'
|
1732 |
-
3: 3, # 'i'
|
1733 |
-
24: 1, # 'j'
|
1734 |
-
10: 3, # 'k'
|
1735 |
-
5: 0, # 'l'
|
1736 |
-
13: 2, # 'm'
|
1737 |
-
4: 3, # 'n'
|
1738 |
-
15: 2, # 'o'
|
1739 |
-
26: 2, # 'p'
|
1740 |
-
7: 3, # 'r'
|
1741 |
-
8: 3, # 's'
|
1742 |
-
9: 3, # 't'
|
1743 |
-
14: 1, # 'u'
|
1744 |
-
32: 0, # 'v'
|
1745 |
-
57: 1, # 'w'
|
1746 |
-
58: 0, # 'x'
|
1747 |
-
11: 2, # 'y'
|
1748 |
-
22: 1, # 'z'
|
1749 |
-
63: 1, # '·'
|
1750 |
-
54: 0, # 'Ç'
|
1751 |
-
50: 0, # 'Ö'
|
1752 |
-
55: 1, # 'Ü'
|
1753 |
-
59: 0, # 'â'
|
1754 |
-
33: 0, # 'ç'
|
1755 |
-
61: 1, # 'î'
|
1756 |
-
34: 2, # 'ö'
|
1757 |
-
17: 2, # 'ü'
|
1758 |
-
30: 2, # 'ğ'
|
1759 |
-
41: 1, # 'İ'
|
1760 |
-
6: 3, # 'ı'
|
1761 |
-
40: 0, # 'Ş'
|
1762 |
-
19: 2, # 'ş'
|
1763 |
-
},
|
1764 |
-
12: { # 'd'
|
1765 |
-
23: 1, # 'A'
|
1766 |
-
37: 0, # 'B'
|
1767 |
-
47: 0, # 'C'
|
1768 |
-
39: 0, # 'D'
|
1769 |
-
29: 0, # 'E'
|
1770 |
-
52: 0, # 'F'
|
1771 |
-
36: 0, # 'G'
|
1772 |
-
45: 0, # 'H'
|
1773 |
-
53: 0, # 'I'
|
1774 |
-
60: 2, # 'J'
|
1775 |
-
16: 3, # 'K'
|
1776 |
-
49: 0, # 'L'
|
1777 |
-
20: 3, # 'M'
|
1778 |
-
46: 0, # 'N'
|
1779 |
-
42: 0, # 'O'
|
1780 |
-
48: 0, # 'P'
|
1781 |
-
44: 0, # 'R'
|
1782 |
-
35: 1, # 'S'
|
1783 |
-
31: 1, # 'T'
|
1784 |
-
51: 0, # 'U'
|
1785 |
-
38: 0, # 'V'
|
1786 |
-
62: 0, # 'W'
|
1787 |
-
43: 0, # 'Y'
|
1788 |
-
56: 0, # 'Z'
|
1789 |
-
1: 3, # 'a'
|
1790 |
-
21: 2, # 'b'
|
1791 |
-
28: 1, # 'c'
|
1792 |
-
12: 3, # 'd'
|
1793 |
-
2: 3, # 'e'
|
1794 |
-
18: 1, # 'f'
|
1795 |
-
27: 3, # 'g'
|
1796 |
-
25: 3, # 'h'
|
1797 |
-
3: 2, # 'i'
|
1798 |
-
24: 3, # 'j'
|
1799 |
-
10: 2, # 'k'
|
1800 |
-
5: 3, # 'l'
|
1801 |
-
13: 3, # 'm'
|
1802 |
-
4: 3, # 'n'
|
1803 |
-
15: 1, # 'o'
|
1804 |
-
26: 2, # 'p'
|
1805 |
-
7: 3, # 'r'
|
1806 |
-
8: 2, # 's'
|
1807 |
-
9: 2, # 't'
|
1808 |
-
14: 3, # 'u'
|
1809 |
-
32: 1, # 'v'
|
1810 |
-
57: 0, # 'w'
|
1811 |
-
58: 1, # 'x'
|
1812 |
-
11: 3, # 'y'
|
1813 |
-
22: 1, # 'z'
|
1814 |
-
63: 1, # '·'
|
1815 |
-
54: 0, # 'Ç'
|
1816 |
-
50: 0, # 'Ö'
|
1817 |
-
55: 0, # 'Ü'
|
1818 |
-
59: 0, # 'â'
|
1819 |
-
33: 0, # 'ç'
|
1820 |
-
61: 0, # 'î'
|
1821 |
-
34: 0, # 'ö'
|
1822 |
-
17: 1, # 'ü'
|
1823 |
-
30: 0, # 'ğ'
|
1824 |
-
41: 0, # 'İ'
|
1825 |
-
6: 2, # 'ı'
|
1826 |
-
40: 0, # 'Ş'
|
1827 |
-
19: 0, # 'ş'
|
1828 |
-
},
|
1829 |
-
2: { # 'e'
|
1830 |
-
23: 2, # 'A'
|
1831 |
-
37: 0, # 'B'
|
1832 |
-
47: 2, # 'C'
|
1833 |
-
39: 0, # 'D'
|
1834 |
-
29: 3, # 'E'
|
1835 |
-
52: 1, # 'F'
|
1836 |
-
36: 0, # 'G'
|
1837 |
-
45: 0, # 'H'
|
1838 |
-
53: 0, # 'I'
|
1839 |
-
60: 0, # 'J'
|
1840 |
-
16: 1, # 'K'
|
1841 |
-
49: 0, # 'L'
|
1842 |
-
20: 3, # 'M'
|
1843 |
-
46: 1, # 'N'
|
1844 |
-
42: 0, # 'O'
|
1845 |
-
48: 1, # 'P'
|
1846 |
-
44: 1, # 'R'
|
1847 |
-
35: 0, # 'S'
|
1848 |
-
31: 3, # 'T'
|
1849 |
-
51: 0, # 'U'
|
1850 |
-
38: 1, # 'V'
|
1851 |
-
62: 0, # 'W'
|
1852 |
-
43: 1, # 'Y'
|
1853 |
-
56: 0, # 'Z'
|
1854 |
-
1: 3, # 'a'
|
1855 |
-
21: 3, # 'b'
|
1856 |
-
28: 0, # 'c'
|
1857 |
-
12: 3, # 'd'
|
1858 |
-
2: 2, # 'e'
|
1859 |
-
18: 3, # 'f'
|
1860 |
-
27: 3, # 'g'
|
1861 |
-
25: 3, # 'h'
|
1862 |
-
3: 3, # 'i'
|
1863 |
-
24: 3, # 'j'
|
1864 |
-
10: 3, # 'k'
|
1865 |
-
5: 0, # 'l'
|
1866 |
-
13: 2, # 'm'
|
1867 |
-
4: 3, # 'n'
|
1868 |
-
15: 1, # 'o'
|
1869 |
-
26: 3, # 'p'
|
1870 |
-
7: 3, # 'r'
|
1871 |
-
8: 3, # 's'
|
1872 |
-
9: 3, # 't'
|
1873 |
-
14: 3, # 'u'
|
1874 |
-
32: 3, # 'v'
|
1875 |
-
57: 2, # 'w'
|
1876 |
-
58: 0, # 'x'
|
1877 |
-
11: 3, # 'y'
|
1878 |
-
22: 1, # 'z'
|
1879 |
-
63: 1, # '·'
|
1880 |
-
54: 0, # 'Ç'
|
1881 |
-
50: 0, # 'Ö'
|
1882 |
-
55: 0, # 'Ü'
|
1883 |
-
59: 0, # 'â'
|
1884 |
-
33: 1, # 'ç'
|
1885 |
-
61: 0, # 'î'
|
1886 |
-
34: 1, # 'ö'
|
1887 |
-
17: 3, # 'ü'
|
1888 |
-
30: 0, # 'ğ'
|
1889 |
-
41: 0, # 'İ'
|
1890 |
-
6: 3, # 'ı'
|
1891 |
-
40: 0, # 'Ş'
|
1892 |
-
19: 0, # 'ş'
|
1893 |
-
},
|
1894 |
-
18: { # 'f'
|
1895 |
-
23: 0, # 'A'
|
1896 |
-
37: 0, # 'B'
|
1897 |
-
47: 0, # 'C'
|
1898 |
-
39: 0, # 'D'
|
1899 |
-
29: 0, # 'E'
|
1900 |
-
52: 0, # 'F'
|
1901 |
-
36: 0, # 'G'
|
1902 |
-
45: 0, # 'H'
|
1903 |
-
53: 0, # 'I'
|
1904 |
-
60: 0, # 'J'
|
1905 |
-
16: 2, # 'K'
|
1906 |
-
49: 0, # 'L'
|
1907 |
-
20: 2, # 'M'
|
1908 |
-
46: 0, # 'N'
|
1909 |
-
42: 0, # 'O'
|
1910 |
-
48: 0, # 'P'
|
1911 |
-
44: 0, # 'R'
|
1912 |
-
35: 0, # 'S'
|
1913 |
-
31: 2, # 'T'
|
1914 |
-
51: 0, # 'U'
|
1915 |
-
38: 0, # 'V'
|
1916 |
-
62: 0, # 'W'
|
1917 |
-
43: 0, # 'Y'
|
1918 |
-
56: 0, # 'Z'
|
1919 |
-
1: 3, # 'a'
|
1920 |
-
21: 1, # 'b'
|
1921 |
-
28: 0, # 'c'
|
1922 |
-
12: 3, # 'd'
|
1923 |
-
2: 3, # 'e'
|
1924 |
-
18: 2, # 'f'
|
1925 |
-
27: 1, # 'g'
|
1926 |
-
25: 1, # 'h'
|
1927 |
-
3: 1, # 'i'
|
1928 |
-
24: 1, # 'j'
|
1929 |
-
10: 1, # 'k'
|
1930 |
-
5: 3, # 'l'
|
1931 |
-
13: 3, # 'm'
|
1932 |
-
4: 3, # 'n'
|
1933 |
-
15: 0, # 'o'
|
1934 |
-
26: 2, # 'p'
|
1935 |
-
7: 1, # 'r'
|
1936 |
-
8: 3, # 's'
|
1937 |
-
9: 3, # 't'
|
1938 |
-
14: 1, # 'u'
|
1939 |
-
32: 2, # 'v'
|
1940 |
-
57: 0, # 'w'
|
1941 |
-
58: 0, # 'x'
|
1942 |
-
11: 1, # 'y'
|
1943 |
-
22: 0, # 'z'
|
1944 |
-
63: 0, # '·'
|
1945 |
-
54: 0, # 'Ç'
|
1946 |
-
50: 0, # 'Ö'
|
1947 |
-
55: 0, # 'Ü'
|
1948 |
-
59: 0, # 'â'
|
1949 |
-
33: 1, # 'ç'
|
1950 |
-
61: 0, # 'î'
|
1951 |
-
34: 0, # 'ö'
|
1952 |
-
17: 1, # 'ü'
|
1953 |
-
30: 0, # 'ğ'
|
1954 |
-
41: 0, # 'İ'
|
1955 |
-
6: 1, # 'ı'
|
1956 |
-
40: 0, # 'Ş'
|
1957 |
-
19: 0, # 'ş'
|
1958 |
-
},
|
1959 |
-
27: { # 'g'
|
1960 |
-
23: 0, # 'A'
|
1961 |
-
37: 0, # 'B'
|
1962 |
-
47: 0, # 'C'
|
1963 |
-
39: 0, # 'D'
|
1964 |
-
29: 0, # 'E'
|
1965 |
-
52: 0, # 'F'
|
1966 |
-
36: 0, # 'G'
|
1967 |
-
45: 0, # 'H'
|
1968 |
-
53: 0, # 'I'
|
1969 |
-
60: 0, # 'J'
|
1970 |
-
16: 3, # 'K'
|
1971 |
-
49: 0, # 'L'
|
1972 |
-
20: 0, # 'M'
|
1973 |
-
46: 0, # 'N'
|
1974 |
-
42: 0, # 'O'
|
1975 |
-
48: 0, # 'P'
|
1976 |
-
44: 0, # 'R'
|
1977 |
-
35: 1, # 'S'
|
1978 |
-
31: 1, # 'T'
|
1979 |
-
51: 0, # 'U'
|
1980 |
-
38: 2, # 'V'
|
1981 |
-
62: 0, # 'W'
|
1982 |
-
43: 0, # 'Y'
|
1983 |
-
56: 0, # 'Z'
|
1984 |
-
1: 3, # 'a'
|
1985 |
-
21: 1, # 'b'
|
1986 |
-
28: 0, # 'c'
|
1987 |
-
12: 1, # 'd'
|
1988 |
-
2: 3, # 'e'
|
1989 |
-
18: 0, # 'f'
|
1990 |
-
27: 2, # 'g'
|
1991 |
-
25: 1, # 'h'
|
1992 |
-
3: 2, # 'i'
|
1993 |
-
24: 3, # 'j'
|
1994 |
-
10: 2, # 'k'
|
1995 |
-
5: 3, # 'l'
|
1996 |
-
13: 3, # 'm'
|
1997 |
-
4: 2, # 'n'
|
1998 |
-
15: 0, # 'o'
|
1999 |
-
26: 1, # 'p'
|
2000 |
-
7: 2, # 'r'
|
2001 |
-
8: 2, # 's'
|
2002 |
-
9: 3, # 't'
|
2003 |
-
14: 3, # 'u'
|
2004 |
-
32: 1, # 'v'
|
2005 |
-
57: 0, # 'w'
|
2006 |
-
58: 0, # 'x'
|
2007 |
-
11: 1, # 'y'
|
2008 |
-
22: 0, # 'z'
|
2009 |
-
63: 1, # '·'
|
2010 |
-
54: 0, # 'Ç'
|
2011 |
-
50: 0, # 'Ö'
|
2012 |
-
55: 0, # 'Ü'
|
2013 |
-
59: 0, # 'â'
|
2014 |
-
33: 0, # 'ç'
|
2015 |
-
61: 0, # 'î'
|
2016 |
-
34: 0, # 'ö'
|
2017 |
-
17: 0, # 'ü'
|
2018 |
-
30: 0, # 'ğ'
|
2019 |
-
41: 0, # 'İ'
|
2020 |
-
6: 2, # 'ı'
|
2021 |
-
40: 0, # 'Ş'
|
2022 |
-
19: 0, # 'ş'
|
2023 |
-
},
|
2024 |
-
25: { # 'h'
|
2025 |
-
23: 0, # 'A'
|
2026 |
-
37: 0, # 'B'
|
2027 |
-
47: 0, # 'C'
|
2028 |
-
39: 0, # 'D'
|
2029 |
-
29: 0, # 'E'
|
2030 |
-
52: 0, # 'F'
|
2031 |
-
36: 0, # 'G'
|
2032 |
-
45: 0, # 'H'
|
2033 |
-
53: 0, # 'I'
|
2034 |
-
60: 0, # 'J'
|
2035 |
-
16: 2, # 'K'
|
2036 |
-
49: 0, # 'L'
|
2037 |
-
20: 0, # 'M'
|
2038 |
-
46: 0, # 'N'
|
2039 |
-
42: 0, # 'O'
|
2040 |
-
48: 0, # 'P'
|
2041 |
-
44: 0, # 'R'
|
2042 |
-
35: 0, # 'S'
|
2043 |
-
31: 0, # 'T'
|
2044 |
-
51: 0, # 'U'
|
2045 |
-
38: 0, # 'V'
|
2046 |
-
62: 0, # 'W'
|
2047 |
-
43: 0, # 'Y'
|
2048 |
-
56: 0, # 'Z'
|
2049 |
-
1: 3, # 'a'
|
2050 |
-
21: 0, # 'b'
|
2051 |
-
28: 0, # 'c'
|
2052 |
-
12: 2, # 'd'
|
2053 |
-
2: 3, # 'e'
|
2054 |
-
18: 0, # 'f'
|
2055 |
-
27: 1, # 'g'
|
2056 |
-
25: 2, # 'h'
|
2057 |
-
3: 2, # 'i'
|
2058 |
-
24: 3, # 'j'
|
2059 |
-
10: 3, # 'k'
|
2060 |
-
5: 3, # 'l'
|
2061 |
-
13: 3, # 'm'
|
2062 |
-
4: 3, # 'n'
|
2063 |
-
15: 1, # 'o'
|
2064 |
-
26: 1, # 'p'
|
2065 |
-
7: 3, # 'r'
|
2066 |
-
8: 3, # 's'
|
2067 |
-
9: 2, # 't'
|
2068 |
-
14: 3, # 'u'
|
2069 |
-
32: 2, # 'v'
|
2070 |
-
57: 1, # 'w'
|
2071 |
-
58: 0, # 'x'
|
2072 |
-
11: 1, # 'y'
|
2073 |
-
22: 0, # 'z'
|
2074 |
-
63: 0, # '·'
|
2075 |
-
54: 0, # 'Ç'
|
2076 |
-
50: 0, # 'Ö'
|
2077 |
-
55: 0, # 'Ü'
|
2078 |
-
59: 0, # 'â'
|
2079 |
-
33: 0, # 'ç'
|
2080 |
-
61: 0, # 'î'
|
2081 |
-
34: 0, # 'ö'
|
2082 |
-
17: 0, # 'ü'
|
2083 |
-
30: 0, # 'ğ'
|
2084 |
-
41: 0, # 'İ'
|
2085 |
-
6: 3, # 'ı'
|
2086 |
-
40: 0, # 'Ş'
|
2087 |
-
19: 0, # 'ş'
|
2088 |
-
},
|
2089 |
-
3: { # 'i'
|
2090 |
-
23: 2, # 'A'
|
2091 |
-
37: 0, # 'B'
|
2092 |
-
47: 0, # 'C'
|
2093 |
-
39: 0, # 'D'
|
2094 |
-
29: 0, # 'E'
|
2095 |
-
52: 0, # 'F'
|
2096 |
-
36: 0, # 'G'
|
2097 |
-
45: 0, # 'H'
|
2098 |
-
53: 0, # 'I'
|
2099 |
-
60: 1, # 'J'
|
2100 |
-
16: 3, # 'K'
|
2101 |
-
49: 0, # 'L'
|
2102 |
-
20: 3, # 'M'
|
2103 |
-
46: 0, # 'N'
|
2104 |
-
42: 1, # 'O'
|
2105 |
-
48: 0, # 'P'
|
2106 |
-
44: 0, # 'R'
|
2107 |
-
35: 1, # 'S'
|
2108 |
-
31: 2, # 'T'
|
2109 |
-
51: 0, # 'U'
|
2110 |
-
38: 1, # 'V'
|
2111 |
-
62: 0, # 'W'
|
2112 |
-
43: 0, # 'Y'
|
2113 |
-
56: 0, # 'Z'
|
2114 |
-
1: 3, # 'a'
|
2115 |
-
21: 2, # 'b'
|
2116 |
-
28: 0, # 'c'
|
2117 |
-
12: 3, # 'd'
|
2118 |
-
2: 3, # 'e'
|
2119 |
-
18: 2, # 'f'
|
2120 |
-
27: 3, # 'g'
|
2121 |
-
25: 1, # 'h'
|
2122 |
-
3: 3, # 'i'
|
2123 |
-
24: 2, # 'j'
|
2124 |
-
10: 3, # 'k'
|
2125 |
-
5: 3, # 'l'
|
2126 |
-
13: 3, # 'm'
|
2127 |
-
4: 3, # 'n'
|
2128 |
-
15: 1, # 'o'
|
2129 |
-
26: 3, # 'p'
|
2130 |
-
7: 3, # 'r'
|
2131 |
-
8: 3, # 's'
|
2132 |
-
9: 3, # 't'
|
2133 |
-
14: 3, # 'u'
|
2134 |
-
32: 2, # 'v'
|
2135 |
-
57: 1, # 'w'
|
2136 |
-
58: 1, # 'x'
|
2137 |
-
11: 3, # 'y'
|
2138 |
-
22: 1, # 'z'
|
2139 |
-
63: 1, # '·'
|
2140 |
-
54: 0, # 'Ç'
|
2141 |
-
50: 0, # 'Ö'
|
2142 |
-
55: 1, # 'Ü'
|
2143 |
-
59: 0, # 'â'
|
2144 |
-
33: 2, # 'ç'
|
2145 |
-
61: 0, # 'î'
|
2146 |
-
34: 0, # 'ö'
|
2147 |
-
17: 3, # 'ü'
|
2148 |
-
30: 0, # 'ğ'
|
2149 |
-
41: 1, # 'İ'
|
2150 |
-
6: 2, # 'ı'
|
2151 |
-
40: 0, # 'Ş'
|
2152 |
-
19: 0, # 'ş'
|
2153 |
-
},
|
2154 |
-
24: { # 'j'
|
2155 |
-
23: 0, # 'A'
|
2156 |
-
37: 0, # 'B'
|
2157 |
-
47: 0, # 'C'
|
2158 |
-
39: 0, # 'D'
|
2159 |
-
29: 0, # 'E'
|
2160 |
-
52: 0, # 'F'
|
2161 |
-
36: 0, # 'G'
|
2162 |
-
45: 0, # 'H'
|
2163 |
-
53: 0, # 'I'
|
2164 |
-
60: 1, # 'J'
|
2165 |
-
16: 2, # 'K'
|
2166 |
-
49: 0, # 'L'
|
2167 |
-
20: 2, # 'M'
|
2168 |
-
46: 0, # 'N'
|
2169 |
-
42: 0, # 'O'
|
2170 |
-
48: 1, # 'P'
|
2171 |
-
44: 0, # 'R'
|
2172 |
-
35: 0, # 'S'
|
2173 |
-
31: 1, # 'T'
|
2174 |
-
51: 0, # 'U'
|
2175 |
-
38: 0, # 'V'
|
2176 |
-
62: 0, # 'W'
|
2177 |
-
43: 0, # 'Y'
|
2178 |
-
56: 1, # 'Z'
|
2179 |
-
1: 3, # 'a'
|
2180 |
-
21: 1, # 'b'
|
2181 |
-
28: 1, # 'c'
|
2182 |
-
12: 3, # 'd'
|
2183 |
-
2: 3, # 'e'
|
2184 |
-
18: 2, # 'f'
|
2185 |
-
27: 1, # 'g'
|
2186 |
-
25: 1, # 'h'
|
2187 |
-
3: 2, # 'i'
|
2188 |
-
24: 1, # 'j'
|
2189 |
-
10: 2, # 'k'
|
2190 |
-
5: 2, # 'l'
|
2191 |
-
13: 3, # 'm'
|
2192 |
-
4: 2, # 'n'
|
2193 |
-
15: 0, # 'o'
|
2194 |
-
26: 1, # 'p'
|
2195 |
-
7: 2, # 'r'
|
2196 |
-
8: 3, # 's'
|
2197 |
-
9: 2, # 't'
|
2198 |
-
14: 3, # 'u'
|
2199 |
-
32: 2, # 'v'
|
2200 |
-
57: 0, # 'w'
|
2201 |
-
58: 2, # 'x'
|
2202 |
-
11: 1, # 'y'
|
2203 |
-
22: 0, # 'z'
|
2204 |
-
63: 0, # '·'
|
2205 |
-
54: 0, # 'Ç'
|
2206 |
-
50: 0, # 'Ö'
|
2207 |
-
55: 0, # 'Ü'
|
2208 |
-
59: 0, # 'â'
|
2209 |
-
33: 1, # 'ç'
|
2210 |
-
61: 0, # 'î'
|
2211 |
-
34: 0, # 'ö'
|
2212 |
-
17: 1, # 'ü'
|
2213 |
-
30: 0, # 'ğ'
|
2214 |
-
41: 0, # 'İ'
|
2215 |
-
6: 3, # 'ı'
|
2216 |
-
40: 0, # 'Ş'
|
2217 |
-
19: 0, # 'ş'
|
2218 |
-
},
|
2219 |
-
10: { # 'k'
|
2220 |
-
23: 0, # 'A'
|
2221 |
-
37: 0, # 'B'
|
2222 |
-
47: 0, # 'C'
|
2223 |
-
39: 0, # 'D'
|
2224 |
-
29: 0, # 'E'
|
2225 |
-
52: 0, # 'F'
|
2226 |
-
36: 0, # 'G'
|
2227 |
-
45: 0, # 'H'
|
2228 |
-
53: 0, # 'I'
|
2229 |
-
60: 0, # 'J'
|
2230 |
-
16: 3, # 'K'
|
2231 |
-
49: 0, # 'L'
|
2232 |
-
20: 2, # 'M'
|
2233 |
-
46: 0, # 'N'
|
2234 |
-
42: 0, # 'O'
|
2235 |
-
48: 0, # 'P'
|
2236 |
-
44: 0, # 'R'
|
2237 |
-
35: 0, # 'S'
|
2238 |
-
31: 3, # 'T'
|
2239 |
-
51: 0, # 'U'
|
2240 |
-
38: 1, # 'V'
|
2241 |
-
62: 0, # 'W'
|
2242 |
-
43: 0, # 'Y'
|
2243 |
-
56: 1, # 'Z'
|
2244 |
-
1: 3, # 'a'
|
2245 |
-
21: 2, # 'b'
|
2246 |
-
28: 0, # 'c'
|
2247 |
-
12: 2, # 'd'
|
2248 |
-
2: 3, # 'e'
|
2249 |
-
18: 1, # 'f'
|
2250 |
-
27: 2, # 'g'
|
2251 |
-
25: 2, # 'h'
|
2252 |
-
3: 3, # 'i'
|
2253 |
-
24: 2, # 'j'
|
2254 |
-
10: 2, # 'k'
|
2255 |
-
5: 3, # 'l'
|
2256 |
-
13: 3, # 'm'
|
2257 |
-
4: 3, # 'n'
|
2258 |
-
15: 0, # 'o'
|
2259 |
-
26: 3, # 'p'
|
2260 |
-
7: 2, # 'r'
|
2261 |
-
8: 2, # 's'
|
2262 |
-
9: 2, # 't'
|
2263 |
-
14: 3, # 'u'
|
2264 |
-
32: 0, # 'v'
|
2265 |
-
57: 0, # 'w'
|
2266 |
-
58: 1, # 'x'
|
2267 |
-
11: 3, # 'y'
|
2268 |
-
22: 0, # 'z'
|
2269 |
-
63: 1, # '·'
|
2270 |
-
54: 0, # 'Ç'
|
2271 |
-
50: 0, # 'Ö'
|
2272 |
-
55: 0, # 'Ü'
|
2273 |
-
59: 0, # 'â'
|
2274 |
-
33: 3, # 'ç'
|
2275 |
-
61: 0, # 'î'
|
2276 |
-
34: 1, # 'ö'
|
2277 |
-
17: 3, # 'ü'
|
2278 |
-
30: 1, # 'ğ'
|
2279 |
-
41: 0, # 'İ'
|
2280 |
-
6: 3, # 'ı'
|
2281 |
-
40: 0, # 'Ş'
|
2282 |
-
19: 1, # 'ş'
|
2283 |
-
},
|
2284 |
-
5: { # 'l'
|
2285 |
-
23: 0, # 'A'
|
2286 |
-
37: 0, # 'B'
|
2287 |
-
47: 0, # 'C'
|
2288 |
-
39: 0, # 'D'
|
2289 |
-
29: 3, # 'E'
|
2290 |
-
52: 0, # 'F'
|
2291 |
-
36: 0, # 'G'
|
2292 |
-
45: 0, # 'H'
|
2293 |
-
53: 0, # 'I'
|
2294 |
-
60: 0, # 'J'
|
2295 |
-
16: 0, # 'K'
|
2296 |
-
49: 0, # 'L'
|
2297 |
-
20: 2, # 'M'
|
2298 |
-
46: 0, # 'N'
|
2299 |
-
42: 0, # 'O'
|
2300 |
-
48: 0, # 'P'
|
2301 |
-
44: 0, # 'R'
|
2302 |
-
35: 0, # 'S'
|
2303 |
-
31: 1, # 'T'
|
2304 |
-
51: 0, # 'U'
|
2305 |
-
38: 0, # 'V'
|
2306 |
-
62: 0, # 'W'
|
2307 |
-
43: 0, # 'Y'
|
2308 |
-
56: 0, # 'Z'
|
2309 |
-
1: 0, # 'a'
|
2310 |
-
21: 3, # 'b'
|
2311 |
-
28: 0, # 'c'
|
2312 |
-
12: 3, # 'd'
|
2313 |
-
2: 1, # 'e'
|
2314 |
-
18: 3, # 'f'
|
2315 |
-
27: 3, # 'g'
|
2316 |
-
25: 2, # 'h'
|
2317 |
-
3: 3, # 'i'
|
2318 |
-
24: 2, # 'j'
|
2319 |
-
10: 3, # 'k'
|
2320 |
-
5: 1, # 'l'
|
2321 |
-
13: 1, # 'm'
|
2322 |
-
4: 3, # 'n'
|
2323 |
-
15: 0, # 'o'
|
2324 |
-
26: 2, # 'p'
|
2325 |
-
7: 3, # 'r'
|
2326 |
-
8: 3, # 's'
|
2327 |
-
9: 3, # 't'
|
2328 |
-
14: 2, # 'u'
|
2329 |
-
32: 2, # 'v'
|
2330 |
-
57: 0, # 'w'
|
2331 |
-
58: 0, # 'x'
|
2332 |
-
11: 3, # 'y'
|
2333 |
-
22: 0, # 'z'
|
2334 |
-
63: 0, # '·'
|
2335 |
-
54: 0, # 'Ç'
|
2336 |
-
50: 0, # 'Ö'
|
2337 |
-
55: 0, # 'Ü'
|
2338 |
-
59: 0, # 'â'
|
2339 |
-
33: 1, # 'ç'
|
2340 |
-
61: 0, # 'î'
|
2341 |
-
34: 0, # 'ö'
|
2342 |
-
17: 2, # 'ü'
|
2343 |
-
30: 0, # 'ğ'
|
2344 |
-
41: 0, # 'İ'
|
2345 |
-
6: 3, # 'ı'
|
2346 |
-
40: 0, # 'Ş'
|
2347 |
-
19: 0, # 'ş'
|
2348 |
-
},
|
2349 |
-
13: { # 'm'
|
2350 |
-
23: 1, # 'A'
|
2351 |
-
37: 0, # 'B'
|
2352 |
-
47: 0, # 'C'
|
2353 |
-
39: 0, # 'D'
|
2354 |
-
29: 3, # 'E'
|
2355 |
-
52: 0, # 'F'
|
2356 |
-
36: 0, # 'G'
|
2357 |
-
45: 0, # 'H'
|
2358 |
-
53: 0, # 'I'
|
2359 |
-
60: 0, # 'J'
|
2360 |
-
16: 0, # 'K'
|
2361 |
-
49: 0, # 'L'
|
2362 |
-
20: 3, # 'M'
|
2363 |
-
46: 0, # 'N'
|
2364 |
-
42: 0, # 'O'
|
2365 |
-
48: 0, # 'P'
|
2366 |
-
44: 0, # 'R'
|
2367 |
-
35: 0, # 'S'
|
2368 |
-
31: 3, # 'T'
|
2369 |
-
51: 0, # 'U'
|
2370 |
-
38: 0, # 'V'
|
2371 |
-
62: 0, # 'W'
|
2372 |
-
43: 1, # 'Y'
|
2373 |
-
56: 0, # 'Z'
|
2374 |
-
1: 2, # 'a'
|
2375 |
-
21: 3, # 'b'
|
2376 |
-
28: 0, # 'c'
|
2377 |
-
12: 3, # 'd'
|
2378 |
-
2: 2, # 'e'
|
2379 |
-
18: 3, # 'f'
|
2380 |
-
27: 3, # 'g'
|
2381 |
-
25: 3, # 'h'
|
2382 |
-
3: 3, # 'i'
|
2383 |
-
24: 3, # 'j'
|
2384 |
-
10: 3, # 'k'
|
2385 |
-
5: 0, # 'l'
|
2386 |
-
13: 2, # 'm'
|
2387 |
-
4: 3, # 'n'
|
2388 |
-
15: 1, # 'o'
|
2389 |
-
26: 2, # 'p'
|
2390 |
-
7: 3, # 'r'
|
2391 |
-
8: 3, # 's'
|
2392 |
-
9: 3, # 't'
|
2393 |
-
14: 2, # 'u'
|
2394 |
-
32: 2, # 'v'
|
2395 |
-
57: 1, # 'w'
|
2396 |
-
58: 0, # 'x'
|
2397 |
-
11: 3, # 'y'
|
2398 |
-
22: 0, # 'z'
|
2399 |
-
63: 0, # '·'
|
2400 |
-
54: 0, # 'Ç'
|
2401 |
-
50: 0, # 'Ö'
|
2402 |
-
55: 0, # 'Ü'
|
2403 |
-
59: 0, # 'â'
|
2404 |
-
33: 0, # 'ç'
|
2405 |
-
61: 0, # 'î'
|
2406 |
-
34: 0, # 'ö'
|
2407 |
-
17: 3, # 'ü'
|
2408 |
-
30: 0, # 'ğ'
|
2409 |
-
41: 0, # 'İ'
|
2410 |
-
6: 3, # 'ı'
|
2411 |
-
40: 0, # 'Ş'
|
2412 |
-
19: 1, # 'ş'
|
2413 |
-
},
|
2414 |
-
4: { # 'n'
|
2415 |
-
23: 1, # 'A'
|
2416 |
-
37: 0, # 'B'
|
2417 |
-
47: 0, # 'C'
|
2418 |
-
39: 0, # 'D'
|
2419 |
-
29: 0, # 'E'
|
2420 |
-
52: 0, # 'F'
|
2421 |
-
36: 0, # 'G'
|
2422 |
-
45: 1, # 'H'
|
2423 |
-
53: 0, # 'I'
|
2424 |
-
60: 2, # 'J'
|
2425 |
-
16: 3, # 'K'
|
2426 |
-
49: 0, # 'L'
|
2427 |
-
20: 3, # 'M'
|
2428 |
-
46: 0, # 'N'
|
2429 |
-
42: 0, # 'O'
|
2430 |
-
48: 0, # 'P'
|
2431 |
-
44: 0, # 'R'
|
2432 |
-
35: 0, # 'S'
|
2433 |
-
31: 2, # 'T'
|
2434 |
-
51: 0, # 'U'
|
2435 |
-
38: 0, # 'V'
|
2436 |
-
62: 0, # 'W'
|
2437 |
-
43: 0, # 'Y'
|
2438 |
-
56: 0, # 'Z'
|
2439 |
-
1: 3, # 'a'
|
2440 |
-
21: 2, # 'b'
|
2441 |
-
28: 1, # 'c'
|
2442 |
-
12: 3, # 'd'
|
2443 |
-
2: 3, # 'e'
|
2444 |
-
18: 1, # 'f'
|
2445 |
-
27: 2, # 'g'
|
2446 |
-
25: 3, # 'h'
|
2447 |
-
3: 2, # 'i'
|
2448 |
-
24: 2, # 'j'
|
2449 |
-
10: 3, # 'k'
|
2450 |
-
5: 3, # 'l'
|
2451 |
-
13: 3, # 'm'
|
2452 |
-
4: 3, # 'n'
|
2453 |
-
15: 1, # 'o'
|
2454 |
-
26: 3, # 'p'
|
2455 |
-
7: 2, # 'r'
|
2456 |
-
8: 3, # 's'
|
2457 |
-
9: 3, # 't'
|
2458 |
-
14: 3, # 'u'
|
2459 |
-
32: 2, # 'v'
|
2460 |
-
57: 0, # 'w'
|
2461 |
-
58: 2, # 'x'
|
2462 |
-
11: 3, # 'y'
|
2463 |
-
22: 0, # 'z'
|
2464 |
-
63: 0, # '·'
|
2465 |
-
54: 0, # 'Ç'
|
2466 |
-
50: 0, # 'Ö'
|
2467 |
-
55: 0, # 'Ü'
|
2468 |
-
59: 0, # 'â'
|
2469 |
-
33: 1, # 'ç'
|
2470 |
-
61: 0, # 'î'
|
2471 |
-
34: 0, # 'ö'
|
2472 |
-
17: 2, # 'ü'
|
2473 |
-
30: 0, # 'ğ'
|
2474 |
-
41: 0, # 'İ'
|
2475 |
-
6: 1, # 'ı'
|
2476 |
-
40: 0, # 'Ş'
|
2477 |
-
19: 0, # 'ş'
|
2478 |
-
},
|
2479 |
-
15: { # 'o'
|
2480 |
-
23: 0, # 'A'
|
2481 |
-
37: 0, # 'B'
|
2482 |
-
47: 1, # 'C'
|
2483 |
-
39: 0, # 'D'
|
2484 |
-
29: 0, # 'E'
|
2485 |
-
52: 2, # 'F'
|
2486 |
-
36: 1, # 'G'
|
2487 |
-
45: 1, # 'H'
|
2488 |
-
53: 1, # 'I'
|
2489 |
-
60: 0, # 'J'
|
2490 |
-
16: 3, # 'K'
|
2491 |
-
49: 2, # 'L'
|
2492 |
-
20: 0, # 'M'
|
2493 |
-
46: 2, # 'N'
|
2494 |
-
42: 1, # 'O'
|
2495 |
-
48: 2, # 'P'
|
2496 |
-
44: 1, # 'R'
|
2497 |
-
35: 0, # 'S'
|
2498 |
-
31: 0, # 'T'
|
2499 |
-
51: 0, # 'U'
|
2500 |
-
38: 0, # 'V'
|
2501 |
-
62: 0, # 'W'
|
2502 |
-
43: 0, # 'Y'
|
2503 |
-
56: 0, # 'Z'
|
2504 |
-
1: 3, # 'a'
|
2505 |
-
21: 0, # 'b'
|
2506 |
-
28: 2, # 'c'
|
2507 |
-
12: 0, # 'd'
|
2508 |
-
2: 3, # 'e'
|
2509 |
-
18: 0, # 'f'
|
2510 |
-
27: 0, # 'g'
|
2511 |
-
25: 0, # 'h'
|
2512 |
-
3: 1, # 'i'
|
2513 |
-
24: 2, # 'j'
|
2514 |
-
10: 1, # 'k'
|
2515 |
-
5: 3, # 'l'
|
2516 |
-
13: 3, # 'm'
|
2517 |
-
4: 2, # 'n'
|
2518 |
-
15: 2, # 'o'
|
2519 |
-
26: 0, # 'p'
|
2520 |
-
7: 1, # 'r'
|
2521 |
-
8: 0, # 's'
|
2522 |
-
9: 0, # 't'
|
2523 |
-
14: 3, # 'u'
|
2524 |
-
32: 0, # 'v'
|
2525 |
-
57: 0, # 'w'
|
2526 |
-
58: 2, # 'x'
|
2527 |
-
11: 0, # 'y'
|
2528 |
-
22: 2, # 'z'
|
2529 |
-
63: 0, # '·'
|
2530 |
-
54: 1, # 'Ç'
|
2531 |
-
50: 2, # 'Ö'
|
2532 |
-
55: 0, # 'Ü'
|
2533 |
-
59: 0, # 'â'
|
2534 |
-
33: 3, # 'ç'
|
2535 |
-
61: 0, # 'î'
|
2536 |
-
34: 1, # 'ö'
|
2537 |
-
17: 0, # 'ü'
|
2538 |
-
30: 2, # 'ğ'
|
2539 |
-
41: 2, # 'İ'
|
2540 |
-
6: 3, # 'ı'
|
2541 |
-
40: 2, # 'Ş'
|
2542 |
-
19: 2, # 'ş'
|
2543 |
-
},
|
2544 |
-
26: { # 'p'
|
2545 |
-
23: 0, # 'A'
|
2546 |
-
37: 0, # 'B'
|
2547 |
-
47: 0, # 'C'
|
2548 |
-
39: 0, # 'D'
|
2549 |
-
29: 0, # 'E'
|
2550 |
-
52: 0, # 'F'
|
2551 |
-
36: 0, # 'G'
|
2552 |
-
45: 0, # 'H'
|
2553 |
-
53: 0, # 'I'
|
2554 |
-
60: 0, # 'J'
|
2555 |
-
16: 3, # 'K'
|
2556 |
-
49: 0, # 'L'
|
2557 |
-
20: 1, # 'M'
|
2558 |
-
46: 0, # 'N'
|
2559 |
-
42: 0, # 'O'
|
2560 |
-
48: 0, # 'P'
|
2561 |
-
44: 0, # 'R'
|
2562 |
-
35: 0, # 'S'
|
2563 |
-
31: 0, # 'T'
|
2564 |
-
51: 0, # 'U'
|
2565 |
-
38: 0, # 'V'
|
2566 |
-
62: 0, # 'W'
|
2567 |
-
43: 0, # 'Y'
|
2568 |
-
56: 0, # 'Z'
|
2569 |
-
1: 3, # 'a'
|
2570 |
-
21: 1, # 'b'
|
2571 |
-
28: 0, # 'c'
|
2572 |
-
12: 1, # 'd'
|
2573 |
-
2: 3, # 'e'
|
2574 |
-
18: 0, # 'f'
|
2575 |
-
27: 1, # 'g'
|
2576 |
-
25: 1, # 'h'
|
2577 |
-
3: 2, # 'i'
|
2578 |
-
24: 3, # 'j'
|
2579 |
-
10: 1, # 'k'
|
2580 |
-
5: 3, # 'l'
|
2581 |
-
13: 3, # 'm'
|
2582 |
-
4: 2, # 'n'
|
2583 |
-
15: 0, # 'o'
|
2584 |
-
26: 2, # 'p'
|
2585 |
-
7: 2, # 'r'
|
2586 |
-
8: 1, # 's'
|
2587 |
-
9: 1, # 't'
|
2588 |
-
14: 3, # 'u'
|
2589 |
-
32: 0, # 'v'
|
2590 |
-
57: 0, # 'w'
|
2591 |
-
58: 1, # 'x'
|
2592 |
-
11: 1, # 'y'
|
2593 |
-
22: 0, # 'z'
|
2594 |
-
63: 0, # '·'
|
2595 |
-
54: 0, # 'Ç'
|
2596 |
-
50: 0, # 'Ö'
|
2597 |
-
55: 0, # 'Ü'
|
2598 |
-
59: 0, # 'â'
|
2599 |
-
33: 3, # 'ç'
|
2600 |
-
61: 0, # 'î'
|
2601 |
-
34: 0, # 'ö'
|
2602 |
-
17: 1, # 'ü'
|
2603 |
-
30: 0, # 'ğ'
|
2604 |
-
41: 0, # 'İ'
|
2605 |
-
6: 3, # 'ı'
|
2606 |
-
40: 0, # 'Ş'
|
2607 |
-
19: 0, # 'ş'
|
2608 |
-
},
|
2609 |
-
7: { # 'r'
|
2610 |
-
23: 0, # 'A'
|
2611 |
-
37: 0, # 'B'
|
2612 |
-
47: 0, # 'C'
|
2613 |
-
39: 0, # 'D'
|
2614 |
-
29: 0, # 'E'
|
2615 |
-
52: 1, # 'F'
|
2616 |
-
36: 0, # 'G'
|
2617 |
-
45: 0, # 'H'
|
2618 |
-
53: 0, # 'I'
|
2619 |
-
60: 2, # 'J'
|
2620 |
-
16: 3, # 'K'
|
2621 |
-
49: 0, # 'L'
|
2622 |
-
20: 2, # 'M'
|
2623 |
-
46: 0, # 'N'
|
2624 |
-
42: 0, # 'O'
|
2625 |
-
48: 0, # 'P'
|
2626 |
-
44: 0, # 'R'
|
2627 |
-
35: 0, # 'S'
|
2628 |
-
31: 2, # 'T'
|
2629 |
-
51: 1, # 'U'
|
2630 |
-
38: 0, # 'V'
|
2631 |
-
62: 0, # 'W'
|
2632 |
-
43: 0, # 'Y'
|
2633 |
-
56: 1, # 'Z'
|
2634 |
-
1: 3, # 'a'
|
2635 |
-
21: 1, # 'b'
|
2636 |
-
28: 0, # 'c'
|
2637 |
-
12: 3, # 'd'
|
2638 |
-
2: 3, # 'e'
|
2639 |
-
18: 0, # 'f'
|
2640 |
-
27: 2, # 'g'
|
2641 |
-
25: 3, # 'h'
|
2642 |
-
3: 2, # 'i'
|
2643 |
-
24: 2, # 'j'
|
2644 |
-
10: 3, # 'k'
|
2645 |
-
5: 3, # 'l'
|
2646 |
-
13: 3, # 'm'
|
2647 |
-
4: 3, # 'n'
|
2648 |
-
15: 0, # 'o'
|
2649 |
-
26: 2, # 'p'
|
2650 |
-
7: 3, # 'r'
|
2651 |
-
8: 3, # 's'
|
2652 |
-
9: 3, # 't'
|
2653 |
-
14: 3, # 'u'
|
2654 |
-
32: 2, # 'v'
|
2655 |
-
57: 0, # 'w'
|
2656 |
-
58: 1, # 'x'
|
2657 |
-
11: 2, # 'y'
|
2658 |
-
22: 0, # 'z'
|
2659 |
-
63: 1, # '·'
|
2660 |
-
54: 0, # 'Ç'
|
2661 |
-
50: 0, # 'Ö'
|
2662 |
-
55: 0, # 'Ü'
|
2663 |
-
59: 0, # 'â'
|
2664 |
-
33: 2, # 'ç'
|
2665 |
-
61: 0, # 'î'
|
2666 |
-
34: 0, # 'ö'
|
2667 |
-
17: 3, # 'ü'
|
2668 |
-
30: 0, # 'ğ'
|
2669 |
-
41: 0, # 'İ'
|
2670 |
-
6: 2, # 'ı'
|
2671 |
-
40: 0, # 'Ş'
|
2672 |
-
19: 0, # 'ş'
|
2673 |
-
},
|
2674 |
-
8: { # 's'
|
2675 |
-
23: 1, # 'A'
|
2676 |
-
37: 0, # 'B'
|
2677 |
-
47: 0, # 'C'
|
2678 |
-
39: 0, # 'D'
|
2679 |
-
29: 0, # 'E'
|
2680 |
-
52: 0, # 'F'
|
2681 |
-
36: 1, # 'G'
|
2682 |
-
45: 0, # 'H'
|
2683 |
-
53: 0, # 'I'
|
2684 |
-
60: 0, # 'J'
|
2685 |
-
16: 3, # 'K'
|
2686 |
-
49: 0, # 'L'
|
2687 |
-
20: 3, # 'M'
|
2688 |
-
46: 0, # 'N'
|
2689 |
-
42: 0, # 'O'
|
2690 |
-
48: 0, # 'P'
|
2691 |
-
44: 0, # 'R'
|
2692 |
-
35: 0, # 'S'
|
2693 |
-
31: 2, # 'T'
|
2694 |
-
51: 0, # 'U'
|
2695 |
-
38: 0, # 'V'
|
2696 |
-
62: 0, # 'W'
|
2697 |
-
43: 0, # 'Y'
|
2698 |
-
56: 1, # 'Z'
|
2699 |
-
1: 3, # 'a'
|
2700 |
-
21: 2, # 'b'
|
2701 |
-
28: 1, # 'c'
|
2702 |
-
12: 3, # 'd'
|
2703 |
-
2: 3, # 'e'
|
2704 |
-
18: 0, # 'f'
|
2705 |
-
27: 2, # 'g'
|
2706 |
-
25: 2, # 'h'
|
2707 |
-
3: 2, # 'i'
|
2708 |
-
24: 3, # 'j'
|
2709 |
-
10: 3, # 'k'
|
2710 |
-
5: 3, # 'l'
|
2711 |
-
13: 3, # 'm'
|
2712 |
-
4: 3, # 'n'
|
2713 |
-
15: 0, # 'o'
|
2714 |
-
26: 3, # 'p'
|
2715 |
-
7: 3, # 'r'
|
2716 |
-
8: 3, # 's'
|
2717 |
-
9: 3, # 't'
|
2718 |
-
14: 3, # 'u'
|
2719 |
-
32: 2, # 'v'
|
2720 |
-
57: 0, # 'w'
|
2721 |
-
58: 1, # 'x'
|
2722 |
-
11: 2, # 'y'
|
2723 |
-
22: 1, # 'z'
|
2724 |
-
63: 0, # '·'
|
2725 |
-
54: 0, # 'Ç'
|
2726 |
-
50: 0, # 'Ö'
|
2727 |
-
55: 0, # 'Ü'
|
2728 |
-
59: 0, # 'â'
|
2729 |
-
33: 2, # 'ç'
|
2730 |
-
61: 0, # 'î'
|
2731 |
-
34: 0, # 'ö'
|
2732 |
-
17: 2, # 'ü'
|
2733 |
-
30: 0, # 'ğ'
|
2734 |
-
41: 0, # 'İ'
|
2735 |
-
6: 3, # 'ı'
|
2736 |
-
40: 0, # 'Ş'
|
2737 |
-
19: 1, # 'ş'
|
2738 |
-
},
|
2739 |
-
9: { # 't'
|
2740 |
-
23: 0, # 'A'
|
2741 |
-
37: 0, # 'B'
|
2742 |
-
47: 0, # 'C'
|
2743 |
-
39: 0, # 'D'
|
2744 |
-
29: 0, # 'E'
|
2745 |
-
52: 0, # 'F'
|
2746 |
-
36: 0, # 'G'
|
2747 |
-
45: 0, # 'H'
|
2748 |
-
53: 0, # 'I'
|
2749 |
-
60: 1, # 'J'
|
2750 |
-
16: 3, # 'K'
|
2751 |
-
49: 0, # 'L'
|
2752 |
-
20: 2, # 'M'
|
2753 |
-
46: 0, # 'N'
|
2754 |
-
42: 0, # 'O'
|
2755 |
-
48: 0, # 'P'
|
2756 |
-
44: 0, # 'R'
|
2757 |
-
35: 0, # 'S'
|
2758 |
-
31: 2, # 'T'
|
2759 |
-
51: 0, # 'U'
|
2760 |
-
38: 0, # 'V'
|
2761 |
-
62: 0, # 'W'
|
2762 |
-
43: 0, # 'Y'
|
2763 |
-
56: 1, # 'Z'
|
2764 |
-
1: 3, # 'a'
|
2765 |
-
21: 3, # 'b'
|
2766 |
-
28: 0, # 'c'
|
2767 |
-
12: 3, # 'd'
|
2768 |
-
2: 3, # 'e'
|
2769 |
-
18: 2, # 'f'
|
2770 |
-
27: 2, # 'g'
|
2771 |
-
25: 2, # 'h'
|
2772 |
-
3: 2, # 'i'
|
2773 |
-
24: 2, # 'j'
|
2774 |
-
10: 3, # 'k'
|
2775 |
-
5: 3, # 'l'
|
2776 |
-
13: 3, # 'm'
|
2777 |
-
4: 3, # 'n'
|
2778 |
-
15: 0, # 'o'
|
2779 |
-
26: 2, # 'p'
|
2780 |
-
7: 3, # 'r'
|
2781 |
-
8: 3, # 's'
|
2782 |
-
9: 3, # 't'
|
2783 |
-
14: 3, # 'u'
|
2784 |
-
32: 3, # 'v'
|
2785 |
-
57: 0, # 'w'
|
2786 |
-
58: 2, # 'x'
|
2787 |
-
11: 2, # 'y'
|
2788 |
-
22: 0, # 'z'
|
2789 |
-
63: 0, # '·'
|
2790 |
-
54: 0, # 'Ç'
|
2791 |
-
50: 0, # 'Ö'
|
2792 |
-
55: 0, # 'Ü'
|
2793 |
-
59: 0, # 'â'
|
2794 |
-
33: 3, # 'ç'
|
2795 |
-
61: 0, # 'î'
|
2796 |
-
34: 0, # 'ö'
|
2797 |
-
17: 2, # 'ü'
|
2798 |
-
30: 0, # 'ğ'
|
2799 |
-
41: 0, # 'İ'
|
2800 |
-
6: 3, # 'ı'
|
2801 |
-
40: 0, # 'Ş'
|
2802 |
-
19: 0, # 'ş'
|
2803 |
-
},
|
2804 |
-
14: { # 'u'
|
2805 |
-
23: 3, # 'A'
|
2806 |
-
37: 0, # 'B'
|
2807 |
-
47: 0, # 'C'
|
2808 |
-
39: 0, # 'D'
|
2809 |
-
29: 3, # 'E'
|
2810 |
-
52: 0, # 'F'
|
2811 |
-
36: 0, # 'G'
|
2812 |
-
45: 1, # 'H'
|
2813 |
-
53: 0, # 'I'
|
2814 |
-
60: 1, # 'J'
|
2815 |
-
16: 0, # 'K'
|
2816 |
-
49: 0, # 'L'
|
2817 |
-
20: 3, # 'M'
|
2818 |
-
46: 2, # 'N'
|
2819 |
-
42: 0, # 'O'
|
2820 |
-
48: 1, # 'P'
|
2821 |
-
44: 0, # 'R'
|
2822 |
-
35: 0, # 'S'
|
2823 |
-
31: 3, # 'T'
|
2824 |
-
51: 0, # 'U'
|
2825 |
-
38: 0, # 'V'
|
2826 |
-
62: 0, # 'W'
|
2827 |
-
43: 1, # 'Y'
|
2828 |
-
56: 2, # 'Z'
|
2829 |
-
1: 2, # 'a'
|
2830 |
-
21: 3, # 'b'
|
2831 |
-
28: 0, # 'c'
|
2832 |
-
12: 3, # 'd'
|
2833 |
-
2: 2, # 'e'
|
2834 |
-
18: 2, # 'f'
|
2835 |
-
27: 3, # 'g'
|
2836 |
-
25: 3, # 'h'
|
2837 |
-
3: 3, # 'i'
|
2838 |
-
24: 2, # 'j'
|
2839 |
-
10: 3, # 'k'
|
2840 |
-
5: 0, # 'l'
|
2841 |
-
13: 3, # 'm'
|
2842 |
-
4: 3, # 'n'
|
2843 |
-
15: 0, # 'o'
|
2844 |
-
26: 3, # 'p'
|
2845 |
-
7: 3, # 'r'
|
2846 |
-
8: 3, # 's'
|
2847 |
-
9: 3, # 't'
|
2848 |
-
14: 3, # 'u'
|
2849 |
-
32: 2, # 'v'
|
2850 |
-
57: 2, # 'w'
|
2851 |
-
58: 0, # 'x'
|
2852 |
-
11: 3, # 'y'
|
2853 |
-
22: 0, # 'z'
|
2854 |
-
63: 1, # '·'
|
2855 |
-
54: 0, # 'Ç'
|
2856 |
-
50: 0, # 'Ö'
|
2857 |
-
55: 0, # 'Ü'
|
2858 |
-
59: 0, # 'â'
|
2859 |
-
33: 0, # 'ç'
|
2860 |
-
61: 0, # 'î'
|
2861 |
-
34: 0, # 'ö'
|
2862 |
-
17: 3, # 'ü'
|
2863 |
-
30: 1, # 'ğ'
|
2864 |
-
41: 0, # 'İ'
|
2865 |
-
6: 3, # 'ı'
|
2866 |
-
40: 0, # 'Ş'
|
2867 |
-
19: 0, # 'ş'
|
2868 |
-
},
|
2869 |
-
32: { # 'v'
|
2870 |
-
23: 0, # 'A'
|
2871 |
-
37: 0, # 'B'
|
2872 |
-
47: 0, # 'C'
|
2873 |
-
39: 0, # 'D'
|
2874 |
-
29: 0, # 'E'
|
2875 |
-
52: 0, # 'F'
|
2876 |
-
36: 0, # 'G'
|
2877 |
-
45: 0, # 'H'
|
2878 |
-
53: 0, # 'I'
|
2879 |
-
60: 0, # 'J'
|
2880 |
-
16: 3, # 'K'
|
2881 |
-
49: 0, # 'L'
|
2882 |
-
20: 1, # 'M'
|
2883 |
-
46: 0, # 'N'
|
2884 |
-
42: 0, # 'O'
|
2885 |
-
48: 0, # 'P'
|
2886 |
-
44: 0, # 'R'
|
2887 |
-
35: 0, # 'S'
|
2888 |
-
31: 0, # 'T'
|
2889 |
-
51: 0, # 'U'
|
2890 |
-
38: 0, # 'V'
|
2891 |
-
62: 0, # 'W'
|
2892 |
-
43: 0, # 'Y'
|
2893 |
-
56: 0, # 'Z'
|
2894 |
-
1: 3, # 'a'
|
2895 |
-
21: 0, # 'b'
|
2896 |
-
28: 0, # 'c'
|
2897 |
-
12: 3, # 'd'
|
2898 |
-
2: 3, # 'e'
|
2899 |
-
18: 0, # 'f'
|
2900 |
-
27: 0, # 'g'
|
2901 |
-
25: 0, # 'h'
|
2902 |
-
3: 0, # 'i'
|
2903 |
-
24: 1, # 'j'
|
2904 |
-
10: 1, # 'k'
|
2905 |
-
5: 3, # 'l'
|
2906 |
-
13: 2, # 'm'
|
2907 |
-
4: 3, # 'n'
|
2908 |
-
15: 0, # 'o'
|
2909 |
-
26: 1, # 'p'
|
2910 |
-
7: 1, # 'r'
|
2911 |
-
8: 2, # 's'
|
2912 |
-
9: 3, # 't'
|
2913 |
-
14: 3, # 'u'
|
2914 |
-
32: 1, # 'v'
|
2915 |
-
57: 0, # 'w'
|
2916 |
-
58: 0, # 'x'
|
2917 |
-
11: 0, # 'y'
|
2918 |
-
22: 0, # 'z'
|
2919 |
-
63: 0, # '·'
|
2920 |
-
54: 0, # 'Ç'
|
2921 |
-
50: 0, # 'Ö'
|
2922 |
-
55: 0, # 'Ü'
|
2923 |
-
59: 0, # 'â'
|
2924 |
-
33: 2, # 'ç'
|
2925 |
-
61: 0, # 'î'
|
2926 |
-
34: 0, # 'ö'
|
2927 |
-
17: 0, # 'ü'
|
2928 |
-
30: 0, # 'ğ'
|
2929 |
-
41: 0, # 'İ'
|
2930 |
-
6: 1, # 'ı'
|
2931 |
-
40: 0, # 'Ş'
|
2932 |
-
19: 0, # 'ş'
|
2933 |
-
},
|
2934 |
-
57: { # 'w'
|
2935 |
-
23: 0, # 'A'
|
2936 |
-
37: 0, # 'B'
|
2937 |
-
47: 0, # 'C'
|
2938 |
-
39: 0, # 'D'
|
2939 |
-
29: 0, # 'E'
|
2940 |
-
52: 0, # 'F'
|
2941 |
-
36: 0, # 'G'
|
2942 |
-
45: 0, # 'H'
|
2943 |
-
53: 0, # 'I'
|
2944 |
-
60: 0, # 'J'
|
2945 |
-
16: 0, # 'K'
|
2946 |
-
49: 0, # 'L'
|
2947 |
-
20: 0, # 'M'
|
2948 |
-
46: 0, # 'N'
|
2949 |
-
42: 0, # 'O'
|
2950 |
-
48: 0, # 'P'
|
2951 |
-
44: 0, # 'R'
|
2952 |
-
35: 0, # 'S'
|
2953 |
-
31: 0, # 'T'
|
2954 |
-
51: 1, # 'U'
|
2955 |
-
38: 0, # 'V'
|
2956 |
-
62: 0, # 'W'
|
2957 |
-
43: 0, # 'Y'
|
2958 |
-
56: 0, # 'Z'
|
2959 |
-
1: 1, # 'a'
|
2960 |
-
21: 0, # 'b'
|
2961 |
-
28: 0, # 'c'
|
2962 |
-
12: 0, # 'd'
|
2963 |
-
2: 2, # 'e'
|
2964 |
-
18: 0, # 'f'
|
2965 |
-
27: 0, # 'g'
|
2966 |
-
25: 1, # 'h'
|
2967 |
-
3: 0, # 'i'
|
2968 |
-
24: 0, # 'j'
|
2969 |
-
10: 1, # 'k'
|
2970 |
-
5: 0, # 'l'
|
2971 |
-
13: 0, # 'm'
|
2972 |
-
4: 1, # 'n'
|
2973 |
-
15: 0, # 'o'
|
2974 |
-
26: 0, # 'p'
|
2975 |
-
7: 0, # 'r'
|
2976 |
-
8: 1, # 's'
|
2977 |
-
9: 0, # 't'
|
2978 |
-
14: 1, # 'u'
|
2979 |
-
32: 0, # 'v'
|
2980 |
-
57: 2, # 'w'
|
2981 |
-
58: 0, # 'x'
|
2982 |
-
11: 0, # 'y'
|
2983 |
-
22: 0, # 'z'
|
2984 |
-
63: 1, # '·'
|
2985 |
-
54: 0, # 'Ç'
|
2986 |
-
50: 0, # 'Ö'
|
2987 |
-
55: 0, # 'Ü'
|
2988 |
-
59: 0, # 'â'
|
2989 |
-
33: 0, # 'ç'
|
2990 |
-
61: 0, # 'î'
|
2991 |
-
34: 0, # 'ö'
|
2992 |
-
17: 1, # 'ü'
|
2993 |
-
30: 0, # 'ğ'
|
2994 |
-
41: 0, # 'İ'
|
2995 |
-
6: 0, # 'ı'
|
2996 |
-
40: 0, # 'Ş'
|
2997 |
-
19: 0, # 'ş'
|
2998 |
-
},
|
2999 |
-
58: { # 'x'
|
3000 |
-
23: 0, # 'A'
|
3001 |
-
37: 0, # 'B'
|
3002 |
-
47: 0, # 'C'
|
3003 |
-
39: 0, # 'D'
|
3004 |
-
29: 1, # 'E'
|
3005 |
-
52: 0, # 'F'
|
3006 |
-
36: 0, # 'G'
|
3007 |
-
45: 0, # 'H'
|
3008 |
-
53: 0, # 'I'
|
3009 |
-
60: 1, # 'J'
|
3010 |
-
16: 0, # 'K'
|
3011 |
-
49: 0, # 'L'
|
3012 |
-
20: 1, # 'M'
|
3013 |
-
46: 0, # 'N'
|
3014 |
-
42: 0, # 'O'
|
3015 |
-
48: 0, # 'P'
|
3016 |
-
44: 0, # 'R'
|
3017 |
-
35: 0, # 'S'
|
3018 |
-
31: 0, # 'T'
|
3019 |
-
51: 0, # 'U'
|
3020 |
-
38: 0, # 'V'
|
3021 |
-
62: 0, # 'W'
|
3022 |
-
43: 0, # 'Y'
|
3023 |
-
56: 0, # 'Z'
|
3024 |
-
1: 0, # 'a'
|
3025 |
-
21: 1, # 'b'
|
3026 |
-
28: 0, # 'c'
|
3027 |
-
12: 2, # 'd'
|
3028 |
-
2: 1, # 'e'
|
3029 |
-
18: 0, # 'f'
|
3030 |
-
27: 0, # 'g'
|
3031 |
-
25: 0, # 'h'
|
3032 |
-
3: 2, # 'i'
|
3033 |
-
24: 2, # 'j'
|
3034 |
-
10: 1, # 'k'
|
3035 |
-
5: 0, # 'l'
|
3036 |
-
13: 0, # 'm'
|
3037 |
-
4: 2, # 'n'
|
3038 |
-
15: 0, # 'o'
|
3039 |
-
26: 0, # 'p'
|
3040 |
-
7: 1, # 'r'
|
3041 |
-
8: 2, # 's'
|
3042 |
-
9: 1, # 't'
|
3043 |
-
14: 0, # 'u'
|
3044 |
-
32: 0, # 'v'
|
3045 |
-
57: 0, # 'w'
|
3046 |
-
58: 0, # 'x'
|
3047 |
-
11: 2, # 'y'
|
3048 |
-
22: 0, # 'z'
|
3049 |
-
63: 0, # '·'
|
3050 |
-
54: 0, # 'Ç'
|
3051 |
-
50: 0, # 'Ö'
|
3052 |
-
55: 0, # 'Ü'
|
3053 |
-
59: 0, # 'â'
|
3054 |
-
33: 0, # 'ç'
|
3055 |
-
61: 0, # 'î'
|
3056 |
-
34: 0, # 'ö'
|
3057 |
-
17: 1, # 'ü'
|
3058 |
-
30: 0, # 'ğ'
|
3059 |
-
41: 0, # 'İ'
|
3060 |
-
6: 2, # 'ı'
|
3061 |
-
40: 0, # 'Ş'
|
3062 |
-
19: 0, # 'ş'
|
3063 |
-
},
|
3064 |
-
11: { # 'y'
|
3065 |
-
23: 1, # 'A'
|
3066 |
-
37: 0, # 'B'
|
3067 |
-
47: 0, # 'C'
|
3068 |
-
39: 0, # 'D'
|
3069 |
-
29: 0, # 'E'
|
3070 |
-
52: 0, # 'F'
|
3071 |
-
36: 0, # 'G'
|
3072 |
-
45: 0, # 'H'
|
3073 |
-
53: 0, # 'I'
|
3074 |
-
60: 1, # 'J'
|
3075 |
-
16: 3, # 'K'
|
3076 |
-
49: 0, # 'L'
|
3077 |
-
20: 1, # 'M'
|
3078 |
-
46: 0, # 'N'
|
3079 |
-
42: 0, # 'O'
|
3080 |
-
48: 0, # 'P'
|
3081 |
-
44: 0, # 'R'
|
3082 |
-
35: 0, # 'S'
|
3083 |
-
31: 1, # 'T'
|
3084 |
-
51: 0, # 'U'
|
3085 |
-
38: 0, # 'V'
|
3086 |
-
62: 0, # 'W'
|
3087 |
-
43: 1, # 'Y'
|
3088 |
-
56: 1, # 'Z'
|
3089 |
-
1: 3, # 'a'
|
3090 |
-
21: 1, # 'b'
|
3091 |
-
28: 0, # 'c'
|
3092 |
-
12: 2, # 'd'
|
3093 |
-
2: 3, # 'e'
|
3094 |
-
18: 0, # 'f'
|
3095 |
-
27: 2, # 'g'
|
3096 |
-
25: 2, # 'h'
|
3097 |
-
3: 2, # 'i'
|
3098 |
-
24: 1, # 'j'
|
3099 |
-
10: 2, # 'k'
|
3100 |
-
5: 3, # 'l'
|
3101 |
-
13: 3, # 'm'
|
3102 |
-
4: 3, # 'n'
|
3103 |
-
15: 0, # 'o'
|
3104 |
-
26: 1, # 'p'
|
3105 |
-
7: 2, # 'r'
|
3106 |
-
8: 1, # 's'
|
3107 |
-
9: 2, # 't'
|
3108 |
-
14: 3, # 'u'
|
3109 |
-
32: 0, # 'v'
|
3110 |
-
57: 0, # 'w'
|
3111 |
-
58: 1, # 'x'
|
3112 |
-
11: 3, # 'y'
|
3113 |
-
22: 0, # 'z'
|
3114 |
-
63: 0, # '·'
|
3115 |
-
54: 0, # 'Ç'
|
3116 |
-
50: 0, # 'Ö'
|
3117 |
-
55: 0, # 'Ü'
|
3118 |
-
59: 0, # 'â'
|
3119 |
-
33: 3, # 'ç'
|
3120 |
-
61: 0, # 'î'
|
3121 |
-
34: 0, # 'ö'
|
3122 |
-
17: 2, # 'ü'
|
3123 |
-
30: 0, # 'ğ'
|
3124 |
-
41: 0, # 'İ'
|
3125 |
-
6: 3, # 'ı'
|
3126 |
-
40: 0, # 'Ş'
|
3127 |
-
19: 0, # 'ş'
|
3128 |
-
},
|
3129 |
-
22: { # 'z'
|
3130 |
-
23: 2, # 'A'
|
3131 |
-
37: 2, # 'B'
|
3132 |
-
47: 1, # 'C'
|
3133 |
-
39: 2, # 'D'
|
3134 |
-
29: 3, # 'E'
|
3135 |
-
52: 1, # 'F'
|
3136 |
-
36: 2, # 'G'
|
3137 |
-
45: 2, # 'H'
|
3138 |
-
53: 1, # 'I'
|
3139 |
-
60: 0, # 'J'
|
3140 |
-
16: 0, # 'K'
|
3141 |
-
49: 0, # 'L'
|
3142 |
-
20: 3, # 'M'
|
3143 |
-
46: 2, # 'N'
|
3144 |
-
42: 2, # 'O'
|
3145 |
-
48: 2, # 'P'
|
3146 |
-
44: 1, # 'R'
|
3147 |
-
35: 1, # 'S'
|
3148 |
-
31: 3, # 'T'
|
3149 |
-
51: 2, # 'U'
|
3150 |
-
38: 2, # 'V'
|
3151 |
-
62: 0, # 'W'
|
3152 |
-
43: 2, # 'Y'
|
3153 |
-
56: 1, # 'Z'
|
3154 |
-
1: 1, # 'a'
|
3155 |
-
21: 2, # 'b'
|
3156 |
-
28: 1, # 'c'
|
3157 |
-
12: 2, # 'd'
|
3158 |
-
2: 2, # 'e'
|
3159 |
-
18: 3, # 'f'
|
3160 |
-
27: 2, # 'g'
|
3161 |
-
25: 2, # 'h'
|
3162 |
-
3: 3, # 'i'
|
3163 |
-
24: 2, # 'j'
|
3164 |
-
10: 3, # 'k'
|
3165 |
-
5: 0, # 'l'
|
3166 |
-
13: 2, # 'm'
|
3167 |
-
4: 3, # 'n'
|
3168 |
-
15: 2, # 'o'
|
3169 |
-
26: 2, # 'p'
|
3170 |
-
7: 3, # 'r'
|
3171 |
-
8: 3, # 's'
|
3172 |
-
9: 3, # 't'
|
3173 |
-
14: 0, # 'u'
|
3174 |
-
32: 2, # 'v'
|
3175 |
-
57: 0, # 'w'
|
3176 |
-
58: 0, # 'x'
|
3177 |
-
11: 3, # 'y'
|
3178 |
-
22: 2, # 'z'
|
3179 |
-
63: 1, # '·'
|
3180 |
-
54: 0, # 'Ç'
|
3181 |
-
50: 0, # 'Ö'
|
3182 |
-
55: 2, # 'Ü'
|
3183 |
-
59: 1, # 'â'
|
3184 |
-
33: 0, # 'ç'
|
3185 |
-
61: 0, # 'î'
|
3186 |
-
34: 2, # 'ö'
|
3187 |
-
17: 2, # 'ü'
|
3188 |
-
30: 2, # 'ğ'
|
3189 |
-
41: 1, # 'İ'
|
3190 |
-
6: 3, # 'ı'
|
3191 |
-
40: 1, # 'Ş'
|
3192 |
-
19: 2, # 'ş'
|
3193 |
-
},
|
3194 |
-
63: { # '·'
|
3195 |
-
23: 0, # 'A'
|
3196 |
-
37: 0, # 'B'
|
3197 |
-
47: 0, # 'C'
|
3198 |
-
39: 0, # 'D'
|
3199 |
-
29: 0, # 'E'
|
3200 |
-
52: 0, # 'F'
|
3201 |
-
36: 0, # 'G'
|
3202 |
-
45: 0, # 'H'
|
3203 |
-
53: 0, # 'I'
|
3204 |
-
60: 0, # 'J'
|
3205 |
-
16: 0, # 'K'
|
3206 |
-
49: 0, # 'L'
|
3207 |
-
20: 0, # 'M'
|
3208 |
-
46: 0, # 'N'
|
3209 |
-
42: 0, # 'O'
|
3210 |
-
48: 0, # 'P'
|
3211 |
-
44: 0, # 'R'
|
3212 |
-
35: 0, # 'S'
|
3213 |
-
31: 0, # 'T'
|
3214 |
-
51: 0, # 'U'
|
3215 |
-
38: 0, # 'V'
|
3216 |
-
62: 0, # 'W'
|
3217 |
-
43: 0, # 'Y'
|
3218 |
-
56: 0, # 'Z'
|
3219 |
-
1: 0, # 'a'
|
3220 |
-
21: 0, # 'b'
|
3221 |
-
28: 0, # 'c'
|
3222 |
-
12: 0, # 'd'
|
3223 |
-
2: 1, # 'e'
|
3224 |
-
18: 0, # 'f'
|
3225 |
-
27: 0, # 'g'
|
3226 |
-
25: 0, # 'h'
|
3227 |
-
3: 0, # 'i'
|
3228 |
-
24: 0, # 'j'
|
3229 |
-
10: 0, # 'k'
|
3230 |
-
5: 0, # 'l'
|
3231 |
-
13: 2, # 'm'
|
3232 |
-
4: 0, # 'n'
|
3233 |
-
15: 0, # 'o'
|
3234 |
-
26: 0, # 'p'
|
3235 |
-
7: 0, # 'r'
|
3236 |
-
8: 0, # 's'
|
3237 |
-
9: 0, # 't'
|
3238 |
-
14: 2, # 'u'
|
3239 |
-
32: 0, # 'v'
|
3240 |
-
57: 0, # 'w'
|
3241 |
-
58: 0, # 'x'
|
3242 |
-
11: 0, # 'y'
|
3243 |
-
22: 0, # 'z'
|
3244 |
-
63: 0, # '·'
|
3245 |
-
54: 0, # 'Ç'
|
3246 |
-
50: 0, # 'Ö'
|
3247 |
-
55: 0, # 'Ü'
|
3248 |
-
59: 0, # 'â'
|
3249 |
-
33: 0, # 'ç'
|
3250 |
-
61: 0, # 'î'
|
3251 |
-
34: 0, # 'ö'
|
3252 |
-
17: 0, # 'ü'
|
3253 |
-
30: 0, # 'ğ'
|
3254 |
-
41: 0, # 'İ'
|
3255 |
-
6: 0, # 'ı'
|
3256 |
-
40: 0, # 'Ş'
|
3257 |
-
19: 0, # 'ş'
|
3258 |
-
},
|
3259 |
-
54: { # 'Ç'
|
3260 |
-
23: 0, # 'A'
|
3261 |
-
37: 0, # 'B'
|
3262 |
-
47: 1, # 'C'
|
3263 |
-
39: 1, # 'D'
|
3264 |
-
29: 0, # 'E'
|
3265 |
-
52: 0, # 'F'
|
3266 |
-
36: 1, # 'G'
|
3267 |
-
45: 1, # 'H'
|
3268 |
-
53: 1, # 'I'
|
3269 |
-
60: 0, # 'J'
|
3270 |
-
16: 0, # 'K'
|
3271 |
-
49: 0, # 'L'
|
3272 |
-
20: 0, # 'M'
|
3273 |
-
46: 0, # 'N'
|
3274 |
-
42: 1, # 'O'
|
3275 |
-
48: 1, # 'P'
|
3276 |
-
44: 0, # 'R'
|
3277 |
-
35: 0, # 'S'
|
3278 |
-
31: 0, # 'T'
|
3279 |
-
51: 1, # 'U'
|
3280 |
-
38: 1, # 'V'
|
3281 |
-
62: 0, # 'W'
|
3282 |
-
43: 2, # 'Y'
|
3283 |
-
56: 0, # 'Z'
|
3284 |
-
1: 0, # 'a'
|
3285 |
-
21: 1, # 'b'
|
3286 |
-
28: 0, # 'c'
|
3287 |
-
12: 1, # 'd'
|
3288 |
-
2: 0, # 'e'
|
3289 |
-
18: 0, # 'f'
|
3290 |
-
27: 1, # 'g'
|
3291 |
-
25: 0, # 'h'
|
3292 |
-
3: 3, # 'i'
|
3293 |
-
24: 0, # 'j'
|
3294 |
-
10: 1, # 'k'
|
3295 |
-
5: 0, # 'l'
|
3296 |
-
13: 0, # 'm'
|
3297 |
-
4: 2, # 'n'
|
3298 |
-
15: 1, # 'o'
|
3299 |
-
26: 0, # 'p'
|
3300 |
-
7: 2, # 'r'
|
3301 |
-
8: 0, # 's'
|
3302 |
-
9: 1, # 't'
|
3303 |
-
14: 0, # 'u'
|
3304 |
-
32: 2, # 'v'
|
3305 |
-
57: 0, # 'w'
|
3306 |
-
58: 0, # 'x'
|
3307 |
-
11: 0, # 'y'
|
3308 |
-
22: 0, # 'z'
|
3309 |
-
63: 0, # '·'
|
3310 |
-
54: 0, # 'Ç'
|
3311 |
-
50: 0, # 'Ö'
|
3312 |
-
55: 2, # 'Ü'
|
3313 |
-
59: 0, # 'â'
|
3314 |
-
33: 0, # 'ç'
|
3315 |
-
61: 0, # 'î'
|
3316 |
-
34: 1, # 'ö'
|
3317 |
-
17: 0, # 'ü'
|
3318 |
-
30: 0, # 'ğ'
|
3319 |
-
41: 0, # 'İ'
|
3320 |
-
6: 2, # 'ı'
|
3321 |
-
40: 0, # 'Ş'
|
3322 |
-
19: 1, # 'ş'
|
3323 |
-
},
|
3324 |
-
50: { # 'Ö'
|
3325 |
-
23: 0, # 'A'
|
3326 |
-
37: 0, # 'B'
|
3327 |
-
47: 1, # 'C'
|
3328 |
-
39: 1, # 'D'
|
3329 |
-
29: 2, # 'E'
|
3330 |
-
52: 0, # 'F'
|
3331 |
-
36: 1, # 'G'
|
3332 |
-
45: 2, # 'H'
|
3333 |
-
53: 0, # 'I'
|
3334 |
-
60: 0, # 'J'
|
3335 |
-
16: 0, # 'K'
|
3336 |
-
49: 0, # 'L'
|
3337 |
-
20: 1, # 'M'
|
3338 |
-
46: 1, # 'N'
|
3339 |
-
42: 2, # 'O'
|
3340 |
-
48: 2, # 'P'
|
3341 |
-
44: 1, # 'R'
|
3342 |
-
35: 0, # 'S'
|
3343 |
-
31: 0, # 'T'
|
3344 |
-
51: 1, # 'U'
|
3345 |
-
38: 1, # 'V'
|
3346 |
-
62: 0, # 'W'
|
3347 |
-
43: 2, # 'Y'
|
3348 |
-
56: 0, # 'Z'
|
3349 |
-
1: 0, # 'a'
|
3350 |
-
21: 2, # 'b'
|
3351 |
-
28: 1, # 'c'
|
3352 |
-
12: 2, # 'd'
|
3353 |
-
2: 0, # 'e'
|
3354 |
-
18: 1, # 'f'
|
3355 |
-
27: 1, # 'g'
|
3356 |
-
25: 1, # 'h'
|
3357 |
-
3: 2, # 'i'
|
3358 |
-
24: 0, # 'j'
|
3359 |
-
10: 2, # 'k'
|
3360 |
-
5: 0, # 'l'
|
3361 |
-
13: 0, # 'm'
|
3362 |
-
4: 3, # 'n'
|
3363 |
-
15: 2, # 'o'
|
3364 |
-
26: 2, # 'p'
|
3365 |
-
7: 3, # 'r'
|
3366 |
-
8: 1, # 's'
|
3367 |
-
9: 2, # 't'
|
3368 |
-
14: 0, # 'u'
|
3369 |
-
32: 1, # 'v'
|
3370 |
-
57: 0, # 'w'
|
3371 |
-
58: 0, # 'x'
|
3372 |
-
11: 0, # 'y'
|
3373 |
-
22: 1, # 'z'
|
3374 |
-
63: 0, # '·'
|
3375 |
-
54: 0, # 'Ç'
|
3376 |
-
50: 0, # 'Ö'
|
3377 |
-
55: 0, # 'Ü'
|
3378 |
-
59: 0, # 'â'
|
3379 |
-
33: 0, # 'ç'
|
3380 |
-
61: 0, # 'î'
|
3381 |
-
34: 2, # 'ö'
|
3382 |
-
17: 2, # 'ü'
|
3383 |
-
30: 1, # 'ğ'
|
3384 |
-
41: 0, # 'İ'
|
3385 |
-
6: 2, # 'ı'
|
3386 |
-
40: 0, # 'Ş'
|
3387 |
-
19: 1, # 'ş'
|
3388 |
-
},
|
3389 |
-
55: { # 'Ü'
|
3390 |
-
23: 0, # 'A'
|
3391 |
-
37: 0, # 'B'
|
3392 |
-
47: 0, # 'C'
|
3393 |
-
39: 0, # 'D'
|
3394 |
-
29: 0, # 'E'
|
3395 |
-
52: 2, # 'F'
|
3396 |
-
36: 0, # 'G'
|
3397 |
-
45: 0, # 'H'
|
3398 |
-
53: 0, # 'I'
|
3399 |
-
60: 0, # 'J'
|
3400 |
-
16: 1, # 'K'
|
3401 |
-
49: 0, # 'L'
|
3402 |
-
20: 0, # 'M'
|
3403 |
-
46: 0, # 'N'
|
3404 |
-
42: 0, # 'O'
|
3405 |
-
48: 1, # 'P'
|
3406 |
-
44: 0, # 'R'
|
3407 |
-
35: 0, # 'S'
|
3408 |
-
31: 0, # 'T'
|
3409 |
-
51: 0, # 'U'
|
3410 |
-
38: 1, # 'V'
|
3411 |
-
62: 0, # 'W'
|
3412 |
-
43: 0, # 'Y'
|
3413 |
-
56: 0, # 'Z'
|
3414 |
-
1: 2, # 'a'
|
3415 |
-
21: 0, # 'b'
|
3416 |
-
28: 2, # 'c'
|
3417 |
-
12: 0, # 'd'
|
3418 |
-
2: 2, # 'e'
|
3419 |
-
18: 0, # 'f'
|
3420 |
-
27: 1, # 'g'
|
3421 |
-
25: 0, # 'h'
|
3422 |
-
3: 0, # 'i'
|
3423 |
-
24: 0, # 'j'
|
3424 |
-
10: 0, # 'k'
|
3425 |
-
5: 1, # 'l'
|
3426 |
-
13: 1, # 'm'
|
3427 |
-
4: 1, # 'n'
|
3428 |
-
15: 0, # 'o'
|
3429 |
-
26: 0, # 'p'
|
3430 |
-
7: 0, # 'r'
|
3431 |
-
8: 0, # 's'
|
3432 |
-
9: 1, # 't'
|
3433 |
-
14: 2, # 'u'
|
3434 |
-
32: 0, # 'v'
|
3435 |
-
57: 0, # 'w'
|
3436 |
-
58: 0, # 'x'
|
3437 |
-
11: 0, # 'y'
|
3438 |
-
22: 1, # 'z'
|
3439 |
-
63: 0, # '·'
|
3440 |
-
54: 0, # 'Ç'
|
3441 |
-
50: 1, # 'Ö'
|
3442 |
-
55: 0, # 'Ü'
|
3443 |
-
59: 0, # 'â'
|
3444 |
-
33: 0, # 'ç'
|
3445 |
-
61: 0, # 'î'
|
3446 |
-
34: 1, # 'ö'
|
3447 |
-
17: 0, # 'ü'
|
3448 |
-
30: 1, # 'ğ'
|
3449 |
-
41: 1, # 'İ'
|
3450 |
-
6: 0, # 'ı'
|
3451 |
-
40: 0, # 'Ş'
|
3452 |
-
19: 1, # 'ş'
|
3453 |
-
},
|
3454 |
-
59: { # 'â'
|
3455 |
-
23: 0, # 'A'
|
3456 |
-
37: 0, # 'B'
|
3457 |
-
47: 0, # 'C'
|
3458 |
-
39: 0, # 'D'
|
3459 |
-
29: 0, # 'E'
|
3460 |
-
52: 0, # 'F'
|
3461 |
-
36: 1, # 'G'
|
3462 |
-
45: 0, # 'H'
|
3463 |
-
53: 0, # 'I'
|
3464 |
-
60: 0, # 'J'
|
3465 |
-
16: 1, # 'K'
|
3466 |
-
49: 0, # 'L'
|
3467 |
-
20: 0, # 'M'
|
3468 |
-
46: 0, # 'N'
|
3469 |
-
42: 0, # 'O'
|
3470 |
-
48: 0, # 'P'
|
3471 |
-
44: 0, # 'R'
|
3472 |
-
35: 0, # 'S'
|
3473 |
-
31: 0, # 'T'
|
3474 |
-
51: 0, # 'U'
|
3475 |
-
38: 0, # 'V'
|
3476 |
-
62: 0, # 'W'
|
3477 |
-
43: 0, # 'Y'
|
3478 |
-
56: 0, # 'Z'
|
3479 |
-
1: 2, # 'a'
|
3480 |
-
21: 0, # 'b'
|
3481 |
-
28: 0, # 'c'
|
3482 |
-
12: 0, # 'd'
|
3483 |
-
2: 2, # 'e'
|
3484 |
-
18: 0, # 'f'
|
3485 |
-
27: 0, # 'g'
|
3486 |
-
25: 0, # 'h'
|
3487 |
-
3: 0, # 'i'
|
3488 |
-
24: 0, # 'j'
|
3489 |
-
10: 0, # 'k'
|
3490 |
-
5: 0, # 'l'
|
3491 |
-
13: 2, # 'm'
|
3492 |
-
4: 0, # 'n'
|
3493 |
-
15: 1, # 'o'
|
3494 |
-
26: 0, # 'p'
|
3495 |
-
7: 0, # 'r'
|
3496 |
-
8: 0, # 's'
|
3497 |
-
9: 0, # 't'
|
3498 |
-
14: 2, # 'u'
|
3499 |
-
32: 0, # 'v'
|
3500 |
-
57: 0, # 'w'
|
3501 |
-
58: 0, # 'x'
|
3502 |
-
11: 0, # 'y'
|
3503 |
-
22: 1, # 'z'
|
3504 |
-
63: 0, # '·'
|
3505 |
-
54: 0, # 'Ç'
|
3506 |
-
50: 0, # 'Ö'
|
3507 |
-
55: 0, # 'Ü'
|
3508 |
-
59: 0, # 'â'
|
3509 |
-
33: 0, # 'ç'
|
3510 |
-
61: 0, # 'î'
|
3511 |
-
34: 0, # 'ö'
|
3512 |
-
17: 0, # 'ü'
|
3513 |
-
30: 0, # 'ğ'
|
3514 |
-
41: 0, # 'İ'
|
3515 |
-
6: 1, # 'ı'
|
3516 |
-
40: 1, # 'Ş'
|
3517 |
-
19: 0, # 'ş'
|
3518 |
-
},
|
3519 |
-
33: { # 'ç'
|
3520 |
-
23: 0, # 'A'
|
3521 |
-
37: 0, # 'B'
|
3522 |
-
47: 0, # 'C'
|
3523 |
-
39: 0, # 'D'
|
3524 |
-
29: 3, # 'E'
|
3525 |
-
52: 0, # 'F'
|
3526 |
-
36: 0, # 'G'
|
3527 |
-
45: 0, # 'H'
|
3528 |
-
53: 0, # 'I'
|
3529 |
-
60: 0, # 'J'
|
3530 |
-
16: 0, # 'K'
|
3531 |
-
49: 0, # 'L'
|
3532 |
-
20: 1, # 'M'
|
3533 |
-
46: 0, # 'N'
|
3534 |
-
42: 0, # 'O'
|
3535 |
-
48: 0, # 'P'
|
3536 |
-
44: 0, # 'R'
|
3537 |
-
35: 0, # 'S'
|
3538 |
-
31: 2, # 'T'
|
3539 |
-
51: 0, # 'U'
|
3540 |
-
38: 1, # 'V'
|
3541 |
-
62: 0, # 'W'
|
3542 |
-
43: 0, # 'Y'
|
3543 |
-
56: 0, # 'Z'
|
3544 |
-
1: 0, # 'a'
|
3545 |
-
21: 3, # 'b'
|
3546 |
-
28: 0, # 'c'
|
3547 |
-
12: 2, # 'd'
|
3548 |
-
2: 0, # 'e'
|
3549 |
-
18: 2, # 'f'
|
3550 |
-
27: 1, # 'g'
|
3551 |
-
25: 3, # 'h'
|
3552 |
-
3: 3, # 'i'
|
3553 |
-
24: 0, # 'j'
|
3554 |
-
10: 3, # 'k'
|
3555 |
-
5: 0, # 'l'
|
3556 |
-
13: 0, # 'm'
|
3557 |
-
4: 3, # 'n'
|
3558 |
-
15: 0, # 'o'
|
3559 |
-
26: 1, # 'p'
|
3560 |
-
7: 3, # 'r'
|
3561 |
-
8: 2, # 's'
|
3562 |
-
9: 3, # 't'
|
3563 |
-
14: 0, # 'u'
|
3564 |
-
32: 2, # 'v'
|
3565 |
-
57: 0, # 'w'
|
3566 |
-
58: 0, # 'x'
|
3567 |
-
11: 2, # 'y'
|
3568 |
-
22: 0, # 'z'
|
3569 |
-
63: 0, # '·'
|
3570 |
-
54: 0, # 'Ç'
|
3571 |
-
50: 0, # 'Ö'
|
3572 |
-
55: 0, # 'Ü'
|
3573 |
-
59: 0, # 'â'
|
3574 |
-
33: 0, # 'ç'
|
3575 |
-
61: 0, # 'î'
|
3576 |
-
34: 0, # 'ö'
|
3577 |
-
17: 1, # 'ü'
|
3578 |
-
30: 0, # 'ğ'
|
3579 |
-
41: 0, # 'İ'
|
3580 |
-
6: 3, # 'ı'
|
3581 |
-
40: 0, # 'Ş'
|
3582 |
-
19: 0, # 'ş'
|
3583 |
-
},
|
3584 |
-
61: { # 'î'
|
3585 |
-
23: 0, # 'A'
|
3586 |
-
37: 0, # 'B'
|
3587 |
-
47: 0, # 'C'
|
3588 |
-
39: 0, # 'D'
|
3589 |
-
29: 0, # 'E'
|
3590 |
-
52: 0, # 'F'
|
3591 |
-
36: 0, # 'G'
|
3592 |
-
45: 0, # 'H'
|
3593 |
-
53: 0, # 'I'
|
3594 |
-
60: 0, # 'J'
|
3595 |
-
16: 0, # 'K'
|
3596 |
-
49: 0, # 'L'
|
3597 |
-
20: 0, # 'M'
|
3598 |
-
46: 0, # 'N'
|
3599 |
-
42: 0, # 'O'
|
3600 |
-
48: 0, # 'P'
|
3601 |
-
44: 0, # 'R'
|
3602 |
-
35: 0, # 'S'
|
3603 |
-
31: 0, # 'T'
|
3604 |
-
51: 0, # 'U'
|
3605 |
-
38: 0, # 'V'
|
3606 |
-
62: 0, # 'W'
|
3607 |
-
43: 0, # 'Y'
|
3608 |
-
56: 1, # 'Z'
|
3609 |
-
1: 2, # 'a'
|
3610 |
-
21: 0, # 'b'
|
3611 |
-
28: 0, # 'c'
|
3612 |
-
12: 0, # 'd'
|
3613 |
-
2: 2, # 'e'
|
3614 |
-
18: 0, # 'f'
|
3615 |
-
27: 0, # 'g'
|
3616 |
-
25: 0, # 'h'
|
3617 |
-
3: 0, # 'i'
|
3618 |
-
24: 1, # 'j'
|
3619 |
-
10: 0, # 'k'
|
3620 |
-
5: 0, # 'l'
|
3621 |
-
13: 1, # 'm'
|
3622 |
-
4: 1, # 'n'
|
3623 |
-
15: 0, # 'o'
|
3624 |
-
26: 0, # 'p'
|
3625 |
-
7: 0, # 'r'
|
3626 |
-
8: 0, # 's'
|
3627 |
-
9: 0, # 't'
|
3628 |
-
14: 1, # 'u'
|
3629 |
-
32: 0, # 'v'
|
3630 |
-
57: 0, # 'w'
|
3631 |
-
58: 0, # 'x'
|
3632 |
-
11: 0, # 'y'
|
3633 |
-
22: 1, # 'z'
|
3634 |
-
63: 0, # '·'
|
3635 |
-
54: 0, # 'Ç'
|
3636 |
-
50: 0, # 'Ö'
|
3637 |
-
55: 0, # 'Ü'
|
3638 |
-
59: 0, # 'â'
|
3639 |
-
33: 0, # 'ç'
|
3640 |
-
61: 1, # 'î'
|
3641 |
-
34: 0, # 'ö'
|
3642 |
-
17: 0, # 'ü'
|
3643 |
-
30: 0, # 'ğ'
|
3644 |
-
41: 0, # 'İ'
|
3645 |
-
6: 1, # 'ı'
|
3646 |
-
40: 0, # 'Ş'
|
3647 |
-
19: 0, # 'ş'
|
3648 |
-
},
|
3649 |
-
34: { # 'ö'
|
3650 |
-
23: 0, # 'A'
|
3651 |
-
37: 1, # 'B'
|
3652 |
-
47: 1, # 'C'
|
3653 |
-
39: 0, # 'D'
|
3654 |
-
29: 0, # 'E'
|
3655 |
-
52: 2, # 'F'
|
3656 |
-
36: 1, # 'G'
|
3657 |
-
45: 1, # 'H'
|
3658 |
-
53: 0, # 'I'
|
3659 |
-
60: 0, # 'J'
|
3660 |
-
16: 3, # 'K'
|
3661 |
-
49: 1, # 'L'
|
3662 |
-
20: 0, # 'M'
|
3663 |
-
46: 1, # 'N'
|
3664 |
-
42: 1, # 'O'
|
3665 |
-
48: 2, # 'P'
|
3666 |
-
44: 1, # 'R'
|
3667 |
-
35: 1, # 'S'
|
3668 |
-
31: 1, # 'T'
|
3669 |
-
51: 1, # 'U'
|
3670 |
-
38: 1, # 'V'
|
3671 |
-
62: 0, # 'W'
|
3672 |
-
43: 0, # 'Y'
|
3673 |
-
56: 1, # 'Z'
|
3674 |
-
1: 3, # 'a'
|
3675 |
-
21: 1, # 'b'
|
3676 |
-
28: 2, # 'c'
|
3677 |
-
12: 1, # 'd'
|
3678 |
-
2: 3, # 'e'
|
3679 |
-
18: 0, # 'f'
|
3680 |
-
27: 2, # 'g'
|
3681 |
-
25: 2, # 'h'
|
3682 |
-
3: 1, # 'i'
|
3683 |
-
24: 2, # 'j'
|
3684 |
-
10: 1, # 'k'
|
3685 |
-
5: 2, # 'l'
|
3686 |
-
13: 3, # 'm'
|
3687 |
-
4: 2, # 'n'
|
3688 |
-
15: 2, # 'o'
|
3689 |
-
26: 0, # 'p'
|
3690 |
-
7: 0, # 'r'
|
3691 |
-
8: 3, # 's'
|
3692 |
-
9: 1, # 't'
|
3693 |
-
14: 3, # 'u'
|
3694 |
-
32: 0, # 'v'
|
3695 |
-
57: 0, # 'w'
|
3696 |
-
58: 0, # 'x'
|
3697 |
-
11: 1, # 'y'
|
3698 |
-
22: 2, # 'z'
|
3699 |
-
63: 0, # '·'
|
3700 |
-
54: 1, # 'Ç'
|
3701 |
-
50: 2, # 'Ö'
|
3702 |
-
55: 0, # 'Ü'
|
3703 |
-
59: 0, # 'â'
|
3704 |
-
33: 2, # 'ç'
|
3705 |
-
61: 0, # 'î'
|
3706 |
-
34: 2, # 'ö'
|
3707 |
-
17: 0, # 'ü'
|
3708 |
-
30: 2, # 'ğ'
|
3709 |
-
41: 1, # 'İ'
|
3710 |
-
6: 1, # 'ı'
|
3711 |
-
40: 2, # 'Ş'
|
3712 |
-
19: 1, # 'ş'
|
3713 |
-
},
|
3714 |
-
17: { # 'ü'
|
3715 |
-
23: 0, # 'A'
|
3716 |
-
37: 0, # 'B'
|
3717 |
-
47: 1, # 'C'
|
3718 |
-
39: 0, # 'D'
|
3719 |
-
29: 0, # 'E'
|
3720 |
-
52: 0, # 'F'
|
3721 |
-
36: 0, # 'G'
|
3722 |
-
45: 0, # 'H'
|
3723 |
-
53: 0, # 'I'
|
3724 |
-
60: 1, # 'J'
|
3725 |
-
16: 1, # 'K'
|
3726 |
-
49: 0, # 'L'
|
3727 |
-
20: 1, # 'M'
|
3728 |
-
46: 0, # 'N'
|
3729 |
-
42: 0, # 'O'
|
3730 |
-
48: 0, # 'P'
|
3731 |
-
44: 0, # 'R'
|
3732 |
-
35: 0, # 'S'
|
3733 |
-
31: 1, # 'T'
|
3734 |
-
51: 0, # 'U'
|
3735 |
-
38: 0, # 'V'
|
3736 |
-
62: 0, # 'W'
|
3737 |
-
43: 0, # 'Y'
|
3738 |
-
56: 1, # 'Z'
|
3739 |
-
1: 3, # 'a'
|
3740 |
-
21: 0, # 'b'
|
3741 |
-
28: 0, # 'c'
|
3742 |
-
12: 1, # 'd'
|
3743 |
-
2: 3, # 'e'
|
3744 |
-
18: 1, # 'f'
|
3745 |
-
27: 2, # 'g'
|
3746 |
-
25: 0, # 'h'
|
3747 |
-
3: 1, # 'i'
|
3748 |
-
24: 1, # 'j'
|
3749 |
-
10: 2, # 'k'
|
3750 |
-
5: 3, # 'l'
|
3751 |
-
13: 2, # 'm'
|
3752 |
-
4: 3, # 'n'
|
3753 |
-
15: 0, # 'o'
|
3754 |
-
26: 2, # 'p'
|
3755 |
-
7: 2, # 'r'
|
3756 |
-
8: 3, # 's'
|
3757 |
-
9: 2, # 't'
|
3758 |
-
14: 3, # 'u'
|
3759 |
-
32: 1, # 'v'
|
3760 |
-
57: 1, # 'w'
|
3761 |
-
58: 0, # 'x'
|
3762 |
-
11: 0, # 'y'
|
3763 |
-
22: 0, # 'z'
|
3764 |
-
63: 0, # '·'
|
3765 |
-
54: 0, # 'Ç'
|
3766 |
-
50: 0, # 'Ö'
|
3767 |
-
55: 0, # 'Ü'
|
3768 |
-
59: 0, # 'â'
|
3769 |
-
33: 1, # 'ç'
|
3770 |
-
61: 0, # 'î'
|
3771 |
-
34: 0, # 'ö'
|
3772 |
-
17: 2, # 'ü'
|
3773 |
-
30: 0, # 'ğ'
|
3774 |
-
41: 0, # 'İ'
|
3775 |
-
6: 2, # 'ı'
|
3776 |
-
40: 0, # 'Ş'
|
3777 |
-
19: 0, # 'ş'
|
3778 |
-
},
|
3779 |
-
30: { # 'ğ'
|
3780 |
-
23: 0, # 'A'
|
3781 |
-
37: 2, # 'B'
|
3782 |
-
47: 1, # 'C'
|
3783 |
-
39: 0, # 'D'
|
3784 |
-
29: 0, # 'E'
|
3785 |
-
52: 2, # 'F'
|
3786 |
-
36: 1, # 'G'
|
3787 |
-
45: 0, # 'H'
|
3788 |
-
53: 1, # 'I'
|
3789 |
-
60: 0, # 'J'
|
3790 |
-
16: 3, # 'K'
|
3791 |
-
49: 0, # 'L'
|
3792 |
-
20: 1, # 'M'
|
3793 |
-
46: 2, # 'N'
|
3794 |
-
42: 2, # 'O'
|
3795 |
-
48: 1, # 'P'
|
3796 |
-
44: 1, # 'R'
|
3797 |
-
35: 0, # 'S'
|
3798 |
-
31: 1, # 'T'
|
3799 |
-
51: 0, # 'U'
|
3800 |
-
38: 2, # 'V'
|
3801 |
-
62: 0, # 'W'
|
3802 |
-
43: 2, # 'Y'
|
3803 |
-
56: 0, # 'Z'
|
3804 |
-
1: 3, # 'a'
|
3805 |
-
21: 0, # 'b'
|
3806 |
-
28: 2, # 'c'
|
3807 |
-
12: 0, # 'd'
|
3808 |
-
2: 2, # 'e'
|
3809 |
-
18: 0, # 'f'
|
3810 |
-
27: 0, # 'g'
|
3811 |
-
25: 0, # 'h'
|
3812 |
-
3: 0, # 'i'
|
3813 |
-
24: 3, # 'j'
|
3814 |
-
10: 1, # 'k'
|
3815 |
-
5: 2, # 'l'
|
3816 |
-
13: 3, # 'm'
|
3817 |
-
4: 0, # 'n'
|
3818 |
-
15: 1, # 'o'
|
3819 |
-
26: 0, # 'p'
|
3820 |
-
7: 1, # 'r'
|
3821 |
-
8: 0, # 's'
|
3822 |
-
9: 0, # 't'
|
3823 |
-
14: 3, # 'u'
|
3824 |
-
32: 0, # 'v'
|
3825 |
-
57: 0, # 'w'
|
3826 |
-
58: 0, # 'x'
|
3827 |
-
11: 0, # 'y'
|
3828 |
-
22: 2, # 'z'
|
3829 |
-
63: 0, # '·'
|
3830 |
-
54: 2, # 'Ç'
|
3831 |
-
50: 2, # 'Ö'
|
3832 |
-
55: 0, # 'Ü'
|
3833 |
-
59: 0, # 'â'
|
3834 |
-
33: 1, # 'ç'
|
3835 |
-
61: 0, # 'î'
|
3836 |
-
34: 2, # 'ö'
|
3837 |
-
17: 0, # 'ü'
|
3838 |
-
30: 1, # 'ğ'
|
3839 |
-
41: 2, # 'İ'
|
3840 |
-
6: 2, # 'ı'
|
3841 |
-
40: 2, # 'Ş'
|
3842 |
-
19: 1, # 'ş'
|
3843 |
-
},
|
3844 |
-
41: { # 'İ'
|
3845 |
-
23: 0, # 'A'
|
3846 |
-
37: 0, # 'B'
|
3847 |
-
47: 1, # 'C'
|
3848 |
-
39: 1, # 'D'
|
3849 |
-
29: 1, # 'E'
|
3850 |
-
52: 0, # 'F'
|
3851 |
-
36: 2, # 'G'
|
3852 |
-
45: 2, # 'H'
|
3853 |
-
53: 0, # 'I'
|
3854 |
-
60: 0, # 'J'
|
3855 |
-
16: 0, # 'K'
|
3856 |
-
49: 0, # 'L'
|
3857 |
-
20: 2, # 'M'
|
3858 |
-
46: 1, # 'N'
|
3859 |
-
42: 1, # 'O'
|
3860 |
-
48: 2, # 'P'
|
3861 |
-
44: 0, # 'R'
|
3862 |
-
35: 1, # 'S'
|
3863 |
-
31: 1, # 'T'
|
3864 |
-
51: 1, # 'U'
|
3865 |
-
38: 1, # 'V'
|
3866 |
-
62: 0, # 'W'
|
3867 |
-
43: 2, # 'Y'
|
3868 |
-
56: 0, # 'Z'
|
3869 |
-
1: 1, # 'a'
|
3870 |
-
21: 2, # 'b'
|
3871 |
-
28: 1, # 'c'
|
3872 |
-
12: 2, # 'd'
|
3873 |
-
2: 1, # 'e'
|
3874 |
-
18: 0, # 'f'
|
3875 |
-
27: 3, # 'g'
|
3876 |
-
25: 2, # 'h'
|
3877 |
-
3: 2, # 'i'
|
3878 |
-
24: 2, # 'j'
|
3879 |
-
10: 2, # 'k'
|
3880 |
-
5: 0, # 'l'
|
3881 |
-
13: 1, # 'm'
|
3882 |
-
4: 3, # 'n'
|
3883 |
-
15: 1, # 'o'
|
3884 |
-
26: 1, # 'p'
|
3885 |
-
7: 3, # 'r'
|
3886 |
-
8: 3, # 's'
|
3887 |
-
9: 2, # 't'
|
3888 |
-
14: 0, # 'u'
|
3889 |
-
32: 0, # 'v'
|
3890 |
-
57: 1, # 'w'
|
3891 |
-
58: 0, # 'x'
|
3892 |
-
11: 2, # 'y'
|
3893 |
-
22: 0, # 'z'
|
3894 |
-
63: 0, # '·'
|
3895 |
-
54: 0, # 'Ç'
|
3896 |
-
50: 0, # 'Ö'
|
3897 |
-
55: 1, # 'Ü'
|
3898 |
-
59: 1, # 'â'
|
3899 |
-
33: 0, # 'ç'
|
3900 |
-
61: 0, # 'î'
|
3901 |
-
34: 1, # 'ö'
|
3902 |
-
17: 1, # 'ü'
|
3903 |
-
30: 2, # 'ğ'
|
3904 |
-
41: 0, # 'İ'
|
3905 |
-
6: 3, # 'ı'
|
3906 |
-
40: 0, # 'Ş'
|
3907 |
-
19: 1, # 'ş'
|
3908 |
-
},
|
3909 |
-
6: { # 'ı'
|
3910 |
-
23: 2, # 'A'
|
3911 |
-
37: 0, # 'B'
|
3912 |
-
47: 0, # 'C'
|
3913 |
-
39: 0, # 'D'
|
3914 |
-
29: 0, # 'E'
|
3915 |
-
52: 0, # 'F'
|
3916 |
-
36: 1, # 'G'
|
3917 |
-
45: 0, # 'H'
|
3918 |
-
53: 0, # 'I'
|
3919 |
-
60: 2, # 'J'
|
3920 |
-
16: 3, # 'K'
|
3921 |
-
49: 0, # 'L'
|
3922 |
-
20: 3, # 'M'
|
3923 |
-
46: 1, # 'N'
|
3924 |
-
42: 0, # 'O'
|
3925 |
-
48: 0, # 'P'
|
3926 |
-
44: 0, # 'R'
|
3927 |
-
35: 0, # 'S'
|
3928 |
-
31: 2, # 'T'
|
3929 |
-
51: 0, # 'U'
|
3930 |
-
38: 0, # 'V'
|
3931 |
-
62: 0, # 'W'
|
3932 |
-
43: 2, # 'Y'
|
3933 |
-
56: 1, # 'Z'
|
3934 |
-
1: 3, # 'a'
|
3935 |
-
21: 2, # 'b'
|
3936 |
-
28: 1, # 'c'
|
3937 |
-
12: 3, # 'd'
|
3938 |
-
2: 3, # 'e'
|
3939 |
-
18: 3, # 'f'
|
3940 |
-
27: 3, # 'g'
|
3941 |
-
25: 2, # 'h'
|
3942 |
-
3: 3, # 'i'
|
3943 |
-
24: 3, # 'j'
|
3944 |
-
10: 3, # 'k'
|
3945 |
-
5: 3, # 'l'
|
3946 |
-
13: 3, # 'm'
|
3947 |
-
4: 3, # 'n'
|
3948 |
-
15: 0, # 'o'
|
3949 |
-
26: 3, # 'p'
|
3950 |
-
7: 3, # 'r'
|
3951 |
-
8: 3, # 's'
|
3952 |
-
9: 3, # 't'
|
3953 |
-
14: 3, # 'u'
|
3954 |
-
32: 3, # 'v'
|
3955 |
-
57: 1, # 'w'
|
3956 |
-
58: 1, # 'x'
|
3957 |
-
11: 3, # 'y'
|
3958 |
-
22: 0, # 'z'
|
3959 |
-
63: 1, # '·'
|
3960 |
-
54: 0, # 'Ç'
|
3961 |
-
50: 0, # 'Ö'
|
3962 |
-
55: 0, # 'Ü'
|
3963 |
-
59: 0, # 'â'
|
3964 |
-
33: 2, # 'ç'
|
3965 |
-
61: 0, # 'î'
|
3966 |
-
34: 0, # 'ö'
|
3967 |
-
17: 3, # 'ü'
|
3968 |
-
30: 0, # 'ğ'
|
3969 |
-
41: 0, # 'İ'
|
3970 |
-
6: 3, # 'ı'
|
3971 |
-
40: 0, # 'Ş'
|
3972 |
-
19: 0, # 'ş'
|
3973 |
-
},
|
3974 |
-
40: { # 'Ş'
|
3975 |
-
23: 0, # 'A'
|
3976 |
-
37: 0, # 'B'
|
3977 |
-
47: 1, # 'C'
|
3978 |
-
39: 1, # 'D'
|
3979 |
-
29: 1, # 'E'
|
3980 |
-
52: 0, # 'F'
|
3981 |
-
36: 1, # 'G'
|
3982 |
-
45: 2, # 'H'
|
3983 |
-
53: 1, # 'I'
|
3984 |
-
60: 0, # 'J'
|
3985 |
-
16: 0, # 'K'
|
3986 |
-
49: 0, # 'L'
|
3987 |
-
20: 2, # 'M'
|
3988 |
-
46: 1, # 'N'
|
3989 |
-
42: 1, # 'O'
|
3990 |
-
48: 2, # 'P'
|
3991 |
-
44: 2, # 'R'
|
3992 |
-
35: 1, # 'S'
|
3993 |
-
31: 1, # 'T'
|
3994 |
-
51: 0, # 'U'
|
3995 |
-
38: 1, # 'V'
|
3996 |
-
62: 0, # 'W'
|
3997 |
-
43: 2, # 'Y'
|
3998 |
-
56: 1, # 'Z'
|
3999 |
-
1: 0, # 'a'
|
4000 |
-
21: 2, # 'b'
|
4001 |
-
28: 0, # 'c'
|
4002 |
-
12: 2, # 'd'
|
4003 |
-
2: 0, # 'e'
|
4004 |
-
18: 3, # 'f'
|
4005 |
-
27: 0, # 'g'
|
4006 |
-
25: 2, # 'h'
|
4007 |
-
3: 3, # 'i'
|
4008 |
-
24: 2, # 'j'
|
4009 |
-
10: 1, # 'k'
|
4010 |
-
5: 0, # 'l'
|
4011 |
-
13: 1, # 'm'
|
4012 |
-
4: 3, # 'n'
|
4013 |
-
15: 2, # 'o'
|
4014 |
-
26: 0, # 'p'
|
4015 |
-
7: 3, # 'r'
|
4016 |
-
8: 2, # 's'
|
4017 |
-
9: 2, # 't'
|
4018 |
-
14: 1, # 'u'
|
4019 |
-
32: 3, # 'v'
|
4020 |
-
57: 0, # 'w'
|
4021 |
-
58: 0, # 'x'
|
4022 |
-
11: 2, # 'y'
|
4023 |
-
22: 0, # 'z'
|
4024 |
-
63: 0, # '·'
|
4025 |
-
54: 0, # 'Ç'
|
4026 |
-
50: 0, # 'Ö'
|
4027 |
-
55: 1, # 'Ü'
|
4028 |
-
59: 0, # 'â'
|
4029 |
-
33: 0, # 'ç'
|
4030 |
-
61: 0, # 'î'
|
4031 |
-
34: 2, # 'ö'
|
4032 |
-
17: 1, # 'ü'
|
4033 |
-
30: 2, # 'ğ'
|
4034 |
-
41: 0, # 'İ'
|
4035 |
-
6: 2, # 'ı'
|
4036 |
-
40: 1, # 'Ş'
|
4037 |
-
19: 2, # 'ş'
|
4038 |
-
},
|
4039 |
-
19: { # 'ş'
|
4040 |
-
23: 0, # 'A'
|
4041 |
-
37: 0, # 'B'
|
4042 |
-
47: 1, # 'C'
|
4043 |
-
39: 0, # 'D'
|
4044 |
-
29: 0, # 'E'
|
4045 |
-
52: 2, # 'F'
|
4046 |
-
36: 1, # 'G'
|
4047 |
-
45: 0, # 'H'
|
4048 |
-
53: 0, # 'I'
|
4049 |
-
60: 0, # 'J'
|
4050 |
-
16: 3, # 'K'
|
4051 |
-
49: 2, # 'L'
|
4052 |
-
20: 0, # 'M'
|
4053 |
-
46: 1, # 'N'
|
4054 |
-
42: 1, # 'O'
|
4055 |
-
48: 1, # 'P'
|
4056 |
-
44: 1, # 'R'
|
4057 |
-
35: 1, # 'S'
|
4058 |
-
31: 0, # 'T'
|
4059 |
-
51: 1, # 'U'
|
4060 |
-
38: 1, # 'V'
|
4061 |
-
62: 0, # 'W'
|
4062 |
-
43: 1, # 'Y'
|
4063 |
-
56: 0, # 'Z'
|
4064 |
-
1: 3, # 'a'
|
4065 |
-
21: 1, # 'b'
|
4066 |
-
28: 2, # 'c'
|
4067 |
-
12: 0, # 'd'
|
4068 |
-
2: 3, # 'e'
|
4069 |
-
18: 0, # 'f'
|
4070 |
-
27: 2, # 'g'
|
4071 |
-
25: 1, # 'h'
|
4072 |
-
3: 1, # 'i'
|
4073 |
-
24: 0, # 'j'
|
4074 |
-
10: 2, # 'k'
|
4075 |
-
5: 2, # 'l'
|
4076 |
-
13: 3, # 'm'
|
4077 |
-
4: 0, # 'n'
|
4078 |
-
15: 0, # 'o'
|
4079 |
-
26: 1, # 'p'
|
4080 |
-
7: 3, # 'r'
|
4081 |
-
8: 0, # 's'
|
4082 |
-
9: 0, # 't'
|
4083 |
-
14: 3, # 'u'
|
4084 |
-
32: 0, # 'v'
|
4085 |
-
57: 0, # 'w'
|
4086 |
-
58: 0, # 'x'
|
4087 |
-
11: 0, # 'y'
|
4088 |
-
22: 2, # 'z'
|
4089 |
-
63: 0, # '·'
|
4090 |
-
54: 1, # 'Ç'
|
4091 |
-
50: 2, # 'Ö'
|
4092 |
-
55: 0, # 'Ü'
|
4093 |
-
59: 0, # 'â'
|
4094 |
-
33: 1, # 'ç'
|
4095 |
-
61: 1, # 'î'
|
4096 |
-
34: 2, # 'ö'
|
4097 |
-
17: 0, # 'ü'
|
4098 |
-
30: 1, # 'ğ'
|
4099 |
-
41: 1, # 'İ'
|
4100 |
-
6: 1, # 'ı'
|
4101 |
-
40: 1, # 'Ş'
|
4102 |
-
19: 1, # 'ş'
|
4103 |
-
},
|
4104 |
-
}
|
4105 |
-
|
4106 |
-
# 255: Undefined characters that did not exist in training text
|
4107 |
-
# 254: Carriage/Return
|
4108 |
-
# 253: symbol (punctuation) that does not belong to word
|
4109 |
-
# 252: 0 - 9
|
4110 |
-
# 251: Control characters
|
4111 |
-
|
4112 |
-
# Character Mapping Table(s):
|
4113 |
-
ISO_8859_9_TURKISH_CHAR_TO_ORDER = {
|
4114 |
-
0: 255, # '\x00'
|
4115 |
-
1: 255, # '\x01'
|
4116 |
-
2: 255, # '\x02'
|
4117 |
-
3: 255, # '\x03'
|
4118 |
-
4: 255, # '\x04'
|
4119 |
-
5: 255, # '\x05'
|
4120 |
-
6: 255, # '\x06'
|
4121 |
-
7: 255, # '\x07'
|
4122 |
-
8: 255, # '\x08'
|
4123 |
-
9: 255, # '\t'
|
4124 |
-
10: 255, # '\n'
|
4125 |
-
11: 255, # '\x0b'
|
4126 |
-
12: 255, # '\x0c'
|
4127 |
-
13: 255, # '\r'
|
4128 |
-
14: 255, # '\x0e'
|
4129 |
-
15: 255, # '\x0f'
|
4130 |
-
16: 255, # '\x10'
|
4131 |
-
17: 255, # '\x11'
|
4132 |
-
18: 255, # '\x12'
|
4133 |
-
19: 255, # '\x13'
|
4134 |
-
20: 255, # '\x14'
|
4135 |
-
21: 255, # '\x15'
|
4136 |
-
22: 255, # '\x16'
|
4137 |
-
23: 255, # '\x17'
|
4138 |
-
24: 255, # '\x18'
|
4139 |
-
25: 255, # '\x19'
|
4140 |
-
26: 255, # '\x1a'
|
4141 |
-
27: 255, # '\x1b'
|
4142 |
-
28: 255, # '\x1c'
|
4143 |
-
29: 255, # '\x1d'
|
4144 |
-
30: 255, # '\x1e'
|
4145 |
-
31: 255, # '\x1f'
|
4146 |
-
32: 255, # ' '
|
4147 |
-
33: 255, # '!'
|
4148 |
-
34: 255, # '"'
|
4149 |
-
35: 255, # '#'
|
4150 |
-
36: 255, # '$'
|
4151 |
-
37: 255, # '%'
|
4152 |
-
38: 255, # '&'
|
4153 |
-
39: 255, # "'"
|
4154 |
-
40: 255, # '('
|
4155 |
-
41: 255, # ')'
|
4156 |
-
42: 255, # '*'
|
4157 |
-
43: 255, # '+'
|
4158 |
-
44: 255, # ','
|
4159 |
-
45: 255, # '-'
|
4160 |
-
46: 255, # '.'
|
4161 |
-
47: 255, # '/'
|
4162 |
-
48: 255, # '0'
|
4163 |
-
49: 255, # '1'
|
4164 |
-
50: 255, # '2'
|
4165 |
-
51: 255, # '3'
|
4166 |
-
52: 255, # '4'
|
4167 |
-
53: 255, # '5'
|
4168 |
-
54: 255, # '6'
|
4169 |
-
55: 255, # '7'
|
4170 |
-
56: 255, # '8'
|
4171 |
-
57: 255, # '9'
|
4172 |
-
58: 255, # ':'
|
4173 |
-
59: 255, # ';'
|
4174 |
-
60: 255, # '<'
|
4175 |
-
61: 255, # '='
|
4176 |
-
62: 255, # '>'
|
4177 |
-
63: 255, # '?'
|
4178 |
-
64: 255, # '@'
|
4179 |
-
65: 23, # 'A'
|
4180 |
-
66: 37, # 'B'
|
4181 |
-
67: 47, # 'C'
|
4182 |
-
68: 39, # 'D'
|
4183 |
-
69: 29, # 'E'
|
4184 |
-
70: 52, # 'F'
|
4185 |
-
71: 36, # 'G'
|
4186 |
-
72: 45, # 'H'
|
4187 |
-
73: 53, # 'I'
|
4188 |
-
74: 60, # 'J'
|
4189 |
-
75: 16, # 'K'
|
4190 |
-
76: 49, # 'L'
|
4191 |
-
77: 20, # 'M'
|
4192 |
-
78: 46, # 'N'
|
4193 |
-
79: 42, # 'O'
|
4194 |
-
80: 48, # 'P'
|
4195 |
-
81: 69, # 'Q'
|
4196 |
-
82: 44, # 'R'
|
4197 |
-
83: 35, # 'S'
|
4198 |
-
84: 31, # 'T'
|
4199 |
-
85: 51, # 'U'
|
4200 |
-
86: 38, # 'V'
|
4201 |
-
87: 62, # 'W'
|
4202 |
-
88: 65, # 'X'
|
4203 |
-
89: 43, # 'Y'
|
4204 |
-
90: 56, # 'Z'
|
4205 |
-
91: 255, # '['
|
4206 |
-
92: 255, # '\\'
|
4207 |
-
93: 255, # ']'
|
4208 |
-
94: 255, # '^'
|
4209 |
-
95: 255, # '_'
|
4210 |
-
96: 255, # '`'
|
4211 |
-
97: 1, # 'a'
|
4212 |
-
98: 21, # 'b'
|
4213 |
-
99: 28, # 'c'
|
4214 |
-
100: 12, # 'd'
|
4215 |
-
101: 2, # 'e'
|
4216 |
-
102: 18, # 'f'
|
4217 |
-
103: 27, # 'g'
|
4218 |
-
104: 25, # 'h'
|
4219 |
-
105: 3, # 'i'
|
4220 |
-
106: 24, # 'j'
|
4221 |
-
107: 10, # 'k'
|
4222 |
-
108: 5, # 'l'
|
4223 |
-
109: 13, # 'm'
|
4224 |
-
110: 4, # 'n'
|
4225 |
-
111: 15, # 'o'
|
4226 |
-
112: 26, # 'p'
|
4227 |
-
113: 64, # 'q'
|
4228 |
-
114: 7, # 'r'
|
4229 |
-
115: 8, # 's'
|
4230 |
-
116: 9, # 't'
|
4231 |
-
117: 14, # 'u'
|
4232 |
-
118: 32, # 'v'
|
4233 |
-
119: 57, # 'w'
|
4234 |
-
120: 58, # 'x'
|
4235 |
-
121: 11, # 'y'
|
4236 |
-
122: 22, # 'z'
|
4237 |
-
123: 255, # '{'
|
4238 |
-
124: 255, # '|'
|
4239 |
-
125: 255, # '}'
|
4240 |
-
126: 255, # '~'
|
4241 |
-
127: 255, # '\x7f'
|
4242 |
-
128: 180, # '\x80'
|
4243 |
-
129: 179, # '\x81'
|
4244 |
-
130: 178, # '\x82'
|
4245 |
-
131: 177, # '\x83'
|
4246 |
-
132: 176, # '\x84'
|
4247 |
-
133: 175, # '\x85'
|
4248 |
-
134: 174, # '\x86'
|
4249 |
-
135: 173, # '\x87'
|
4250 |
-
136: 172, # '\x88'
|
4251 |
-
137: 171, # '\x89'
|
4252 |
-
138: 170, # '\x8a'
|
4253 |
-
139: 169, # '\x8b'
|
4254 |
-
140: 168, # '\x8c'
|
4255 |
-
141: 167, # '\x8d'
|
4256 |
-
142: 166, # '\x8e'
|
4257 |
-
143: 165, # '\x8f'
|
4258 |
-
144: 164, # '\x90'
|
4259 |
-
145: 163, # '\x91'
|
4260 |
-
146: 162, # '\x92'
|
4261 |
-
147: 161, # '\x93'
|
4262 |
-
148: 160, # '\x94'
|
4263 |
-
149: 159, # '\x95'
|
4264 |
-
150: 101, # '\x96'
|
4265 |
-
151: 158, # '\x97'
|
4266 |
-
152: 157, # '\x98'
|
4267 |
-
153: 156, # '\x99'
|
4268 |
-
154: 155, # '\x9a'
|
4269 |
-
155: 154, # '\x9b'
|
4270 |
-
156: 153, # '\x9c'
|
4271 |
-
157: 152, # '\x9d'
|
4272 |
-
158: 151, # '\x9e'
|
4273 |
-
159: 106, # '\x9f'
|
4274 |
-
160: 150, # '\xa0'
|
4275 |
-
161: 149, # '¡'
|
4276 |
-
162: 148, # '¢'
|
4277 |
-
163: 147, # '£'
|
4278 |
-
164: 146, # '¤'
|
4279 |
-
165: 145, # '¥'
|
4280 |
-
166: 144, # '¦'
|
4281 |
-
167: 100, # '§'
|
4282 |
-
168: 143, # '¨'
|
4283 |
-
169: 142, # '©'
|
4284 |
-
170: 141, # 'ª'
|
4285 |
-
171: 140, # '«'
|
4286 |
-
172: 139, # '¬'
|
4287 |
-
173: 138, # '\xad'
|
4288 |
-
174: 137, # '®'
|
4289 |
-
175: 136, # '¯'
|
4290 |
-
176: 94, # '°'
|
4291 |
-
177: 80, # '±'
|
4292 |
-
178: 93, # '²'
|
4293 |
-
179: 135, # '³'
|
4294 |
-
180: 105, # '´'
|
4295 |
-
181: 134, # 'µ'
|
4296 |
-
182: 133, # '¶'
|
4297 |
-
183: 63, # '·'
|
4298 |
-
184: 132, # '¸'
|
4299 |
-
185: 131, # '¹'
|
4300 |
-
186: 130, # 'º'
|
4301 |
-
187: 129, # '»'
|
4302 |
-
188: 128, # '¼'
|
4303 |
-
189: 127, # '½'
|
4304 |
-
190: 126, # '¾'
|
4305 |
-
191: 125, # '¿'
|
4306 |
-
192: 124, # 'À'
|
4307 |
-
193: 104, # 'Á'
|
4308 |
-
194: 73, # 'Â'
|
4309 |
-
195: 99, # 'Ã'
|
4310 |
-
196: 79, # 'Ä'
|
4311 |
-
197: 85, # 'Å'
|
4312 |
-
198: 123, # 'Æ'
|
4313 |
-
199: 54, # 'Ç'
|
4314 |
-
200: 122, # 'È'
|
4315 |
-
201: 98, # 'É'
|
4316 |
-
202: 92, # 'Ê'
|
4317 |
-
203: 121, # 'Ë'
|
4318 |
-
204: 120, # 'Ì'
|
4319 |
-
205: 91, # 'Í'
|
4320 |
-
206: 103, # 'Î'
|
4321 |
-
207: 119, # 'Ï'
|
4322 |
-
208: 68, # 'Ğ'
|
4323 |
-
209: 118, # 'Ñ'
|
4324 |
-
210: 117, # 'Ò'
|
4325 |
-
211: 97, # 'Ó'
|
4326 |
-
212: 116, # 'Ô'
|
4327 |
-
213: 115, # 'Õ'
|
4328 |
-
214: 50, # 'Ö'
|
4329 |
-
215: 90, # '×'
|
4330 |
-
216: 114, # 'Ø'
|
4331 |
-
217: 113, # 'Ù'
|
4332 |
-
218: 112, # 'Ú'
|
4333 |
-
219: 111, # 'Û'
|
4334 |
-
220: 55, # 'Ü'
|
4335 |
-
221: 41, # 'İ'
|
4336 |
-
222: 40, # 'Ş'
|
4337 |
-
223: 86, # 'ß'
|
4338 |
-
224: 89, # 'à'
|
4339 |
-
225: 70, # 'á'
|
4340 |
-
226: 59, # 'â'
|
4341 |
-
227: 78, # 'ã'
|
4342 |
-
228: 71, # 'ä'
|
4343 |
-
229: 82, # 'å'
|
4344 |
-
230: 88, # 'æ'
|
4345 |
-
231: 33, # 'ç'
|
4346 |
-
232: 77, # 'è'
|
4347 |
-
233: 66, # 'é'
|
4348 |
-
234: 84, # 'ê'
|
4349 |
-
235: 83, # 'ë'
|
4350 |
-
236: 110, # 'ì'
|
4351 |
-
237: 75, # 'í'
|
4352 |
-
238: 61, # 'î'
|
4353 |
-
239: 96, # 'ï'
|
4354 |
-
240: 30, # 'ğ'
|
4355 |
-
241: 67, # 'ñ'
|
4356 |
-
242: 109, # 'ò'
|
4357 |
-
243: 74, # 'ó'
|
4358 |
-
244: 87, # 'ô'
|
4359 |
-
245: 102, # 'õ'
|
4360 |
-
246: 34, # 'ö'
|
4361 |
-
247: 95, # '÷'
|
4362 |
-
248: 81, # 'ø'
|
4363 |
-
249: 108, # 'ù'
|
4364 |
-
250: 76, # 'ú'
|
4365 |
-
251: 72, # 'û'
|
4366 |
-
252: 17, # 'ü'
|
4367 |
-
253: 6, # 'ı'
|
4368 |
-
254: 19, # 'ş'
|
4369 |
-
255: 107, # 'ÿ'
|
4370 |
-
}
|
4371 |
-
|
4372 |
-
ISO_8859_9_TURKISH_MODEL = SingleByteCharSetModel(
|
4373 |
-
charset_name="ISO-8859-9",
|
4374 |
-
language="Turkish",
|
4375 |
-
char_to_order_map=ISO_8859_9_TURKISH_CHAR_TO_ORDER,
|
4376 |
-
language_model=TURKISH_LANG_MODEL,
|
4377 |
-
typical_positive_ratio=0.97029,
|
4378 |
-
keep_ascii_letters=True,
|
4379 |
-
alphabet="ABCDEFGHIJKLMNOPRSTUVYZabcdefghijklmnoprstuvyzÂÇÎÖÛÜâçîöûüĞğİıŞş",
|
4380 |
-
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Big-Web/MMSD/env/Lib/site-packages/setuptools/_path.py
DELETED
@@ -1,29 +0,0 @@
|
|
1 |
-
import os
|
2 |
-
from typing import Union
|
3 |
-
|
4 |
-
_Path = Union[str, os.PathLike]
|
5 |
-
|
6 |
-
|
7 |
-
def ensure_directory(path):
|
8 |
-
"""Ensure that the parent directory of `path` exists"""
|
9 |
-
dirname = os.path.dirname(path)
|
10 |
-
os.makedirs(dirname, exist_ok=True)
|
11 |
-
|
12 |
-
|
13 |
-
def same_path(p1: _Path, p2: _Path) -> bool:
|
14 |
-
"""Differs from os.path.samefile because it does not require paths to exist.
|
15 |
-
Purely string based (no comparison between i-nodes).
|
16 |
-
>>> same_path("a/b", "./a/b")
|
17 |
-
True
|
18 |
-
>>> same_path("a/b", "a/./b")
|
19 |
-
True
|
20 |
-
>>> same_path("a/b", "././a/b")
|
21 |
-
True
|
22 |
-
>>> same_path("a/b", "./a/b/c/..")
|
23 |
-
True
|
24 |
-
>>> same_path("a/b", "../a/b/c")
|
25 |
-
False
|
26 |
-
>>> same_path("a", "a/b")
|
27 |
-
False
|
28 |
-
"""
|
29 |
-
return os.path.normpath(p1) == os.path.normpath(p2)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/BraydenMoore/MARCI-NFL-Betting/Source/Test/__init__.py
DELETED
File without changes
|
spaces/CVPR/Dual-Key_Backdoor_Attacks/datagen/detectron2/docker/Dockerfile
DELETED
@@ -1,43 +0,0 @@
|
|
1 |
-
FROM nvidia/cuda:10.1-cudnn7-devel
|
2 |
-
|
3 |
-
ENV DEBIAN_FRONTEND noninteractive
|
4 |
-
RUN apt-get update && apt-get install -y \
|
5 |
-
python3-opencv ca-certificates python3-dev git wget sudo && \
|
6 |
-
rm -rf /var/lib/apt/lists/*
|
7 |
-
|
8 |
-
# create a non-root user
|
9 |
-
ARG USER_ID=1000
|
10 |
-
RUN useradd -m --no-log-init --system --uid ${USER_ID} appuser -g sudo
|
11 |
-
RUN echo '%sudo ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers
|
12 |
-
USER appuser
|
13 |
-
WORKDIR /home/appuser
|
14 |
-
|
15 |
-
ENV PATH="/home/appuser/.local/bin:${PATH}"
|
16 |
-
RUN wget https://bootstrap.pypa.io/get-pip.py && \
|
17 |
-
python3 get-pip.py --user && \
|
18 |
-
rm get-pip.py
|
19 |
-
|
20 |
-
# install dependencies
|
21 |
-
# See https://pytorch.org/ for other options if you use a different version of CUDA
|
22 |
-
RUN pip install --user torch torchvision tensorboard cython
|
23 |
-
RUN pip install --user 'git+https://github.com/cocodataset/cocoapi.git#subdirectory=PythonAPI'
|
24 |
-
|
25 |
-
RUN pip install --user 'git+https://github.com/facebookresearch/fvcore'
|
26 |
-
# install detectron2
|
27 |
-
RUN git clone https://github.com/facebookresearch/detectron2 detectron2_repo
|
28 |
-
ENV FORCE_CUDA="1"
|
29 |
-
# This will build detectron2 for all common cuda architectures and take a lot more time,
|
30 |
-
# because inside `docker build`, there is no way to tell which architecture will be used.
|
31 |
-
ENV TORCH_CUDA_ARCH_LIST="Kepler;Kepler+Tesla;Maxwell;Maxwell+Tegra;Pascal;Volta;Turing"
|
32 |
-
RUN pip install --user -e detectron2_repo
|
33 |
-
|
34 |
-
# Set a fixed model cache directory.
|
35 |
-
ENV FVCORE_CACHE="/tmp"
|
36 |
-
WORKDIR /home/appuser/detectron2_repo
|
37 |
-
|
38 |
-
# run it, for example:
|
39 |
-
# wget http://images.cocodataset.org/val2017/000000439715.jpg -O input.jpg
|
40 |
-
# python3 demo/demo.py \
|
41 |
-
#--config-file configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml \
|
42 |
-
#--input input.jpg --output outputs/ \
|
43 |
-
#--opts MODEL.WEIGHTS detectron2://COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x/137849600/model_final_f10217.pkl
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/CVPR/LIVE/thrust/thrust/detail/util/align.h
DELETED
@@ -1,59 +0,0 @@
|
|
1 |
-
/*
|
2 |
-
* Copyright 2008-2013 NVIDIA Corporation
|
3 |
-
*
|
4 |
-
* Licensed under the Apache License, Version 2.0 (the "License");
|
5 |
-
* you may not use this file except in compliance with the License.
|
6 |
-
* You may obtain a copy of the License at
|
7 |
-
*
|
8 |
-
* http://www.apache.org/licenses/LICENSE-2.0
|
9 |
-
*
|
10 |
-
* Unless required by applicable law or agreed to in writing, software
|
11 |
-
* distributed under the License is distributed on an "AS IS" BASIS,
|
12 |
-
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
13 |
-
* See the License for the specific language governing permissions and
|
14 |
-
* limitations under the License.
|
15 |
-
*/
|
16 |
-
|
17 |
-
|
18 |
-
#pragma once
|
19 |
-
|
20 |
-
#include <thrust/detail/cstdint.h>
|
21 |
-
|
22 |
-
// functions to handle memory alignment
|
23 |
-
|
24 |
-
namespace thrust
|
25 |
-
{
|
26 |
-
namespace detail
|
27 |
-
{
|
28 |
-
namespace util
|
29 |
-
{
|
30 |
-
|
31 |
-
|
32 |
-
template<typename T>
|
33 |
-
__host__ __device__
|
34 |
-
T *align_up(T * ptr, detail::uintptr_t bytes)
|
35 |
-
{
|
36 |
-
return (T *) ( bytes * (((detail::uintptr_t) ptr + (bytes - 1)) / bytes) );
|
37 |
-
}
|
38 |
-
|
39 |
-
|
40 |
-
template<typename T>
|
41 |
-
__host__ __device__
|
42 |
-
T *align_down(T * ptr, detail::uintptr_t bytes)
|
43 |
-
{
|
44 |
-
return (T *) ( bytes * (detail::uintptr_t(ptr) / bytes) );
|
45 |
-
}
|
46 |
-
|
47 |
-
|
48 |
-
template<typename T>
|
49 |
-
__host__ __device__
|
50 |
-
bool is_aligned(T * ptr, detail::uintptr_t bytes = sizeof(T))
|
51 |
-
{
|
52 |
-
return detail::uintptr_t(ptr) % bytes == 0;
|
53 |
-
}
|
54 |
-
|
55 |
-
|
56 |
-
} // end namespace util
|
57 |
-
} // end namespace detail
|
58 |
-
} // end namespace thrust
|
59 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/CVPR/LIVE/thrust/thrust/generate.h
DELETED
@@ -1,213 +0,0 @@
|
|
1 |
-
/*
|
2 |
-
* Copyright 2008-2013 NVIDIA Corporation
|
3 |
-
*
|
4 |
-
* Licensed under the Apache License, Version 2.0 (the "License");
|
5 |
-
* you may not use this file except in compliance with the License.
|
6 |
-
* You may obtain a copy of the License at
|
7 |
-
*
|
8 |
-
* http://www.apache.org/licenses/LICENSE-2.0
|
9 |
-
*
|
10 |
-
* Unless required by applicable law or agreed to in writing, software
|
11 |
-
* distributed under the License is distributed on an "AS IS" BASIS,
|
12 |
-
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
13 |
-
* See the License for the specific language governing permissions and
|
14 |
-
* limitations under the License.
|
15 |
-
*/
|
16 |
-
|
17 |
-
|
18 |
-
/*! \file generate.h
|
19 |
-
* \brief Fills a range with values "generated" from a function of no arguments
|
20 |
-
*/
|
21 |
-
|
22 |
-
#pragma once
|
23 |
-
|
24 |
-
#include <thrust/detail/config.h>
|
25 |
-
#include <thrust/detail/execution_policy.h>
|
26 |
-
|
27 |
-
namespace thrust
|
28 |
-
{
|
29 |
-
|
30 |
-
|
31 |
-
/*! \addtogroup transformations
|
32 |
-
* \{
|
33 |
-
*/
|
34 |
-
|
35 |
-
|
36 |
-
/*! \p generate assigns the result of invoking \p gen, a function object that takes no arguments,
|
37 |
-
* to each element in the range <tt>[first,last)</tt>.
|
38 |
-
*
|
39 |
-
* The algorithm's execution is parallelized as determined by \p exec.
|
40 |
-
*
|
41 |
-
* \param exec The execution policy to use for parallelization.
|
42 |
-
* \param first The first element in the range of interest.
|
43 |
-
* \param last The last element in the range of interest.
|
44 |
-
* \param gen A function argument, taking no parameters, used to generate values to assign to
|
45 |
-
* elements in the range <tt>[first,last)</tt>.
|
46 |
-
*
|
47 |
-
* \tparam DerivedPolicy The name of the derived execution policy.
|
48 |
-
* \tparam ForwardIterator is a model of <a href="http://www.sgi.com/tech/stl/ForwardIterator.html">Forward Iterator</a>,
|
49 |
-
* and \p ForwardIterator is mutable.
|
50 |
-
* \tparam Generator is a model of <a href="http://www.sgi.com/tech/stl/Generator.html">Generator</a>,
|
51 |
-
* and \p Generator's \c result_type is convertible to \p ForwardIterator's \c value_type.
|
52 |
-
*
|
53 |
-
* The following code snippet demonstrates how to fill a \c host_vector with random numbers,
|
54 |
-
* using the standard C library function \c rand using the \p thrust::host execution policy for parallelization:
|
55 |
-
*
|
56 |
-
* \code
|
57 |
-
* #include <thrust/generate.h>
|
58 |
-
* #include <thrust/host_vector.h>
|
59 |
-
* #include <thrust/execution_policy.h>
|
60 |
-
* #include <cstdlib>
|
61 |
-
* ...
|
62 |
-
* thrust::host_vector<int> v(10);
|
63 |
-
* srand(13);
|
64 |
-
* thrust::generate(thrust::host, v.begin(), v.end(), rand);
|
65 |
-
*
|
66 |
-
* // the elements of v are now pseudo-random numbers
|
67 |
-
* \endcode
|
68 |
-
*
|
69 |
-
* \see generate_n
|
70 |
-
* \see http://www.sgi.com/tech/stl/generate.html
|
71 |
-
*/
|
72 |
-
template<typename DerivedPolicy,
|
73 |
-
typename ForwardIterator,
|
74 |
-
typename Generator>
|
75 |
-
__host__ __device__
|
76 |
-
void generate(const thrust::detail::execution_policy_base<DerivedPolicy> &exec,
|
77 |
-
ForwardIterator first,
|
78 |
-
ForwardIterator last,
|
79 |
-
Generator gen);
|
80 |
-
|
81 |
-
|
82 |
-
/*! \p generate assigns the result of invoking \p gen, a function object that takes no arguments,
|
83 |
-
* to each element in the range <tt>[first,last)</tt>.
|
84 |
-
*
|
85 |
-
* \param first The first element in the range of interest.
|
86 |
-
* \param last The last element in the range of interest.
|
87 |
-
* \param gen A function argument, taking no parameters, used to generate values to assign to
|
88 |
-
* elements in the range <tt>[first,last)</tt>.
|
89 |
-
*
|
90 |
-
* \tparam ForwardIterator is a model of <a href="http://www.sgi.com/tech/stl/ForwardIterator.html">Forward Iterator</a>,
|
91 |
-
* and \p ForwardIterator is mutable.
|
92 |
-
* \tparam Generator is a model of <a href="http://www.sgi.com/tech/stl/Generator.html">Generator</a>,
|
93 |
-
* and \p Generator's \c result_type is convertible to \p ForwardIterator's \c value_type.
|
94 |
-
*
|
95 |
-
* The following code snippet demonstrates how to fill a \c host_vector with random numbers,
|
96 |
-
* using the standard C library function \c rand.
|
97 |
-
*
|
98 |
-
* \code
|
99 |
-
* #include <thrust/generate.h>
|
100 |
-
* #include <thrust/host_vector.h>
|
101 |
-
* #include <thrust/execution_policy.h>
|
102 |
-
* #include <cstdlib>
|
103 |
-
* ...
|
104 |
-
* thrust::host_vector<int> v(10);
|
105 |
-
* srand(13);
|
106 |
-
* thrust::generate(v.begin(), v.end(), rand);
|
107 |
-
*
|
108 |
-
* // the elements of v are now pseudo-random numbers
|
109 |
-
* \endcode
|
110 |
-
*
|
111 |
-
* \see generate_n
|
112 |
-
* \see http://www.sgi.com/tech/stl/generate.html
|
113 |
-
*/
|
114 |
-
template<typename ForwardIterator,
|
115 |
-
typename Generator>
|
116 |
-
void generate(ForwardIterator first,
|
117 |
-
ForwardIterator last,
|
118 |
-
Generator gen);
|
119 |
-
|
120 |
-
|
121 |
-
/*! \p generate_n assigns the result of invoking \p gen, a function object that takes no arguments,
|
122 |
-
* to each element in the range <tt>[first,first + n)</tt>. The return value is <tt>first + n</tt>.
|
123 |
-
*
|
124 |
-
* The algorithm's execution is parallelized as determined by \p exec.
|
125 |
-
*
|
126 |
-
* \param exec The execution policy to use for parallelization.
|
127 |
-
* \param first The first element in the range of interest.
|
128 |
-
* \param n The size of the range of interest.
|
129 |
-
* \param gen A function argument, taking no parameters, used to generate values to assign to
|
130 |
-
* elements in the range <tt>[first,first + n)</tt>.
|
131 |
-
*
|
132 |
-
* \tparam DerivedPolicy The name of the derived execution policy.
|
133 |
-
* \tparam OutputIterator is a model of <a href="http://www.sgi.com/tech/stl/OutputIterator.html">Output Iterator</a>.
|
134 |
-
* \tparam Size is an integral type (either signed or unsigned).
|
135 |
-
* \tparam Generator is a model of <a href="http://www.sgi.com/tech/stl/Generator.html">Generator</a>,
|
136 |
-
* and \p Generator's \c result_type is convertible to a type in \p OutputIterator's set of \c value_types.
|
137 |
-
*
|
138 |
-
* The following code snippet demonstrates how to fill a \c host_vector with random numbers,
|
139 |
-
* using the standard C library function \c rand using the \p thrust::host execution policy for parallelization:
|
140 |
-
*
|
141 |
-
* \code
|
142 |
-
* #include <thrust/generate.h>
|
143 |
-
* #include <thrust/host_vector.h>
|
144 |
-
* #include <thrust/execution_policy.h>
|
145 |
-
* #include <cstdlib>
|
146 |
-
* ...
|
147 |
-
* thrust::host_vector<int> v(10);
|
148 |
-
* srand(13);
|
149 |
-
* thrust::generate_n(thrust::host, v.begin(), 10, rand);
|
150 |
-
*
|
151 |
-
* // the elements of v are now pseudo-random numbers
|
152 |
-
* \endcode
|
153 |
-
*
|
154 |
-
* \see generate
|
155 |
-
* \see http://www.sgi.com/tech/stl/generate.html
|
156 |
-
*/
|
157 |
-
template<typename DerivedPolicy,
|
158 |
-
typename OutputIterator,
|
159 |
-
typename Size,
|
160 |
-
typename Generator>
|
161 |
-
__host__ __device__
|
162 |
-
OutputIterator generate_n(const thrust::detail::execution_policy_base<DerivedPolicy> &exec,
|
163 |
-
OutputIterator first,
|
164 |
-
Size n,
|
165 |
-
Generator gen);
|
166 |
-
|
167 |
-
|
168 |
-
/*! \p generate_n assigns the result of invoking \p gen, a function object that takes no arguments,
|
169 |
-
* to each element in the range <tt>[first,first + n)</tt>. The return value is <tt>first + n</tt>.
|
170 |
-
*
|
171 |
-
* \param first The first element in the range of interest.
|
172 |
-
* \param n The size of the range of interest.
|
173 |
-
* \param gen A function argument, taking no parameters, used to generate values to assign to
|
174 |
-
* elements in the range <tt>[first,first + n)</tt>.
|
175 |
-
*
|
176 |
-
* \tparam OutputIterator is a model of <a href="http://www.sgi.com/tech/stl/OutputIterator.html">Output Iterator</a>.
|
177 |
-
* \tparam Size is an integral type (either signed or unsigned).
|
178 |
-
* \tparam Generator is a model of <a href="http://www.sgi.com/tech/stl/Generator.html">Generator</a>,
|
179 |
-
* and \p Generator's \c result_type is convertible to a type in \p OutputIterator's set of \c value_types.
|
180 |
-
*
|
181 |
-
* The following code snippet demonstrates how to fill a \c host_vector with random numbers,
|
182 |
-
* using the standard C library function \c rand.
|
183 |
-
*
|
184 |
-
* \code
|
185 |
-
* #include <thrust/generate.h>
|
186 |
-
* #include <thrust/host_vector.h>
|
187 |
-
* #include <stdlib.h>
|
188 |
-
* ...
|
189 |
-
* thrust::host_vector<int> v(10);
|
190 |
-
* srand(13);
|
191 |
-
* thrust::generate_n(v.begin(), 10, rand);
|
192 |
-
*
|
193 |
-
* // the elements of v are now pseudo-random numbers
|
194 |
-
* \endcode
|
195 |
-
*
|
196 |
-
* \see generate
|
197 |
-
* \see http://www.sgi.com/tech/stl/generate.html
|
198 |
-
*/
|
199 |
-
template<typename OutputIterator,
|
200 |
-
typename Size,
|
201 |
-
typename Generator>
|
202 |
-
OutputIterator generate_n(OutputIterator first,
|
203 |
-
Size n,
|
204 |
-
Generator gen);
|
205 |
-
|
206 |
-
|
207 |
-
/*! \} // end transformations
|
208 |
-
*/
|
209 |
-
|
210 |
-
} // end namespace thrust
|
211 |
-
|
212 |
-
#include <thrust/detail/generate.inl>
|
213 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/CVPR/WALT/mmdet/core/bbox/match_costs/match_cost.py
DELETED
@@ -1,184 +0,0 @@
|
|
1 |
-
import torch
|
2 |
-
|
3 |
-
from mmdet.core.bbox.iou_calculators import bbox_overlaps
|
4 |
-
from mmdet.core.bbox.transforms import bbox_cxcywh_to_xyxy, bbox_xyxy_to_cxcywh
|
5 |
-
from .builder import MATCH_COST
|
6 |
-
|
7 |
-
|
8 |
-
@MATCH_COST.register_module()
|
9 |
-
class BBoxL1Cost(object):
|
10 |
-
"""BBoxL1Cost.
|
11 |
-
|
12 |
-
Args:
|
13 |
-
weight (int | float, optional): loss_weight
|
14 |
-
box_format (str, optional): 'xyxy' for DETR, 'xywh' for Sparse_RCNN
|
15 |
-
|
16 |
-
Examples:
|
17 |
-
>>> from mmdet.core.bbox.match_costs.match_cost import BBoxL1Cost
|
18 |
-
>>> import torch
|
19 |
-
>>> self = BBoxL1Cost()
|
20 |
-
>>> bbox_pred = torch.rand(1, 4)
|
21 |
-
>>> gt_bboxes= torch.FloatTensor([[0, 0, 2, 4], [1, 2, 3, 4]])
|
22 |
-
>>> factor = torch.tensor([10, 8, 10, 8])
|
23 |
-
>>> self(bbox_pred, gt_bboxes, factor)
|
24 |
-
tensor([[1.6172, 1.6422]])
|
25 |
-
"""
|
26 |
-
|
27 |
-
def __init__(self, weight=1., box_format='xyxy'):
|
28 |
-
self.weight = weight
|
29 |
-
assert box_format in ['xyxy', 'xywh']
|
30 |
-
self.box_format = box_format
|
31 |
-
|
32 |
-
def __call__(self, bbox_pred, gt_bboxes):
|
33 |
-
"""
|
34 |
-
Args:
|
35 |
-
bbox_pred (Tensor): Predicted boxes with normalized coordinates
|
36 |
-
(cx, cy, w, h), which are all in range [0, 1]. Shape
|
37 |
-
[num_query, 4].
|
38 |
-
gt_bboxes (Tensor): Ground truth boxes with normalized
|
39 |
-
coordinates (x1, y1, x2, y2). Shape [num_gt, 4].
|
40 |
-
|
41 |
-
Returns:
|
42 |
-
torch.Tensor: bbox_cost value with weight
|
43 |
-
"""
|
44 |
-
if self.box_format == 'xywh':
|
45 |
-
gt_bboxes = bbox_xyxy_to_cxcywh(gt_bboxes)
|
46 |
-
elif self.box_format == 'xyxy':
|
47 |
-
bbox_pred = bbox_cxcywh_to_xyxy(bbox_pred)
|
48 |
-
bbox_cost = torch.cdist(bbox_pred, gt_bboxes, p=1)
|
49 |
-
return bbox_cost * self.weight
|
50 |
-
|
51 |
-
|
52 |
-
@MATCH_COST.register_module()
|
53 |
-
class FocalLossCost(object):
|
54 |
-
"""FocalLossCost.
|
55 |
-
|
56 |
-
Args:
|
57 |
-
weight (int | float, optional): loss_weight
|
58 |
-
alpha (int | float, optional): focal_loss alpha
|
59 |
-
gamma (int | float, optional): focal_loss gamma
|
60 |
-
eps (float, optional): default 1e-12
|
61 |
-
|
62 |
-
Examples:
|
63 |
-
>>> from mmdet.core.bbox.match_costs.match_cost import FocalLossCost
|
64 |
-
>>> import torch
|
65 |
-
>>> self = FocalLossCost()
|
66 |
-
>>> cls_pred = torch.rand(4, 3)
|
67 |
-
>>> gt_labels = torch.tensor([0, 1, 2])
|
68 |
-
>>> factor = torch.tensor([10, 8, 10, 8])
|
69 |
-
>>> self(cls_pred, gt_labels)
|
70 |
-
tensor([[-0.3236, -0.3364, -0.2699],
|
71 |
-
[-0.3439, -0.3209, -0.4807],
|
72 |
-
[-0.4099, -0.3795, -0.2929],
|
73 |
-
[-0.1950, -0.1207, -0.2626]])
|
74 |
-
"""
|
75 |
-
|
76 |
-
def __init__(self, weight=1., alpha=0.25, gamma=2, eps=1e-12):
|
77 |
-
self.weight = weight
|
78 |
-
self.alpha = alpha
|
79 |
-
self.gamma = gamma
|
80 |
-
self.eps = eps
|
81 |
-
|
82 |
-
def __call__(self, cls_pred, gt_labels):
|
83 |
-
"""
|
84 |
-
Args:
|
85 |
-
cls_pred (Tensor): Predicted classification logits, shape
|
86 |
-
[num_query, num_class].
|
87 |
-
gt_labels (Tensor): Label of `gt_bboxes`, shape (num_gt,).
|
88 |
-
|
89 |
-
Returns:
|
90 |
-
torch.Tensor: cls_cost value with weight
|
91 |
-
"""
|
92 |
-
cls_pred = cls_pred.sigmoid()
|
93 |
-
neg_cost = -(1 - cls_pred + self.eps).log() * (
|
94 |
-
1 - self.alpha) * cls_pred.pow(self.gamma)
|
95 |
-
pos_cost = -(cls_pred + self.eps).log() * self.alpha * (
|
96 |
-
1 - cls_pred).pow(self.gamma)
|
97 |
-
cls_cost = pos_cost[:, gt_labels] - neg_cost[:, gt_labels]
|
98 |
-
return cls_cost * self.weight
|
99 |
-
|
100 |
-
|
101 |
-
@MATCH_COST.register_module()
|
102 |
-
class ClassificationCost(object):
|
103 |
-
"""ClsSoftmaxCost.
|
104 |
-
|
105 |
-
Args:
|
106 |
-
weight (int | float, optional): loss_weight
|
107 |
-
|
108 |
-
Examples:
|
109 |
-
>>> from mmdet.core.bbox.match_costs.match_cost import \
|
110 |
-
... ClassificationCost
|
111 |
-
>>> import torch
|
112 |
-
>>> self = ClassificationCost()
|
113 |
-
>>> cls_pred = torch.rand(4, 3)
|
114 |
-
>>> gt_labels = torch.tensor([0, 1, 2])
|
115 |
-
>>> factor = torch.tensor([10, 8, 10, 8])
|
116 |
-
>>> self(cls_pred, gt_labels)
|
117 |
-
tensor([[-0.3430, -0.3525, -0.3045],
|
118 |
-
[-0.3077, -0.2931, -0.3992],
|
119 |
-
[-0.3664, -0.3455, -0.2881],
|
120 |
-
[-0.3343, -0.2701, -0.3956]])
|
121 |
-
"""
|
122 |
-
|
123 |
-
def __init__(self, weight=1.):
|
124 |
-
self.weight = weight
|
125 |
-
|
126 |
-
def __call__(self, cls_pred, gt_labels):
|
127 |
-
"""
|
128 |
-
Args:
|
129 |
-
cls_pred (Tensor): Predicted classification logits, shape
|
130 |
-
[num_query, num_class].
|
131 |
-
gt_labels (Tensor): Label of `gt_bboxes`, shape (num_gt,).
|
132 |
-
|
133 |
-
Returns:
|
134 |
-
torch.Tensor: cls_cost value with weight
|
135 |
-
"""
|
136 |
-
# Following the official DETR repo, contrary to the loss that
|
137 |
-
# NLL is used, we approximate it in 1 - cls_score[gt_label].
|
138 |
-
# The 1 is a constant that doesn't change the matching,
|
139 |
-
# so it can be omitted.
|
140 |
-
cls_score = cls_pred.softmax(-1)
|
141 |
-
cls_cost = -cls_score[:, gt_labels]
|
142 |
-
return cls_cost * self.weight
|
143 |
-
|
144 |
-
|
145 |
-
@MATCH_COST.register_module()
|
146 |
-
class IoUCost(object):
|
147 |
-
"""IoUCost.
|
148 |
-
|
149 |
-
Args:
|
150 |
-
iou_mode (str, optional): iou mode such as 'iou' | 'giou'
|
151 |
-
weight (int | float, optional): loss weight
|
152 |
-
|
153 |
-
Examples:
|
154 |
-
>>> from mmdet.core.bbox.match_costs.match_cost import IoUCost
|
155 |
-
>>> import torch
|
156 |
-
>>> self = IoUCost()
|
157 |
-
>>> bboxes = torch.FloatTensor([[1,1, 2, 2], [2, 2, 3, 4]])
|
158 |
-
>>> gt_bboxes = torch.FloatTensor([[0, 0, 2, 4], [1, 2, 3, 4]])
|
159 |
-
>>> self(bboxes, gt_bboxes)
|
160 |
-
tensor([[-0.1250, 0.1667],
|
161 |
-
[ 0.1667, -0.5000]])
|
162 |
-
"""
|
163 |
-
|
164 |
-
def __init__(self, iou_mode='giou', weight=1.):
|
165 |
-
self.weight = weight
|
166 |
-
self.iou_mode = iou_mode
|
167 |
-
|
168 |
-
def __call__(self, bboxes, gt_bboxes):
|
169 |
-
"""
|
170 |
-
Args:
|
171 |
-
bboxes (Tensor): Predicted boxes with unnormalized coordinates
|
172 |
-
(x1, y1, x2, y2). Shape [num_query, 4].
|
173 |
-
gt_bboxes (Tensor): Ground truth boxes with unnormalized
|
174 |
-
coordinates (x1, y1, x2, y2). Shape [num_gt, 4].
|
175 |
-
|
176 |
-
Returns:
|
177 |
-
torch.Tensor: iou_cost value with weight
|
178 |
-
"""
|
179 |
-
# overlaps: [num_bboxes, num_gt]
|
180 |
-
overlaps = bbox_overlaps(
|
181 |
-
bboxes, gt_bboxes, mode=self.iou_mode, is_aligned=False)
|
182 |
-
# The 1 is a constant that doesn't change the matching, so omitted.
|
183 |
-
iou_cost = -overlaps
|
184 |
-
return iou_cost * self.weight
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/CofAI/urlcut/index.html
DELETED
@@ -1,49 +0,0 @@
|
|
1 |
-
<!DOCTYPE html>
|
2 |
-
<html>
|
3 |
-
<head>
|
4 |
-
<title>CofURL.cut</title>
|
5 |
-
<p></p>
|
6 |
-
</head>
|
7 |
-
<body>
|
8 |
-
<h1>CofURLcut</h1>
|
9 |
-
<label for="long-url">Enter a long link:</label>
|
10 |
-
<p></p>
|
11 |
-
<input type="text" id="long-url" placeholder="https://cofai-urlcut.hf.space">
|
12 |
-
<p></p>
|
13 |
-
<button id="shorten-button">Shorten</button>
|
14 |
-
<br>
|
15 |
-
<p></p>
|
16 |
-
<label for="short-url">Short link:</label>
|
17 |
-
<p></p>
|
18 |
-
<input type="text" id="short-url" readonly>
|
19 |
-
<p></p>
|
20 |
-
<script>
|
21 |
-
document.getElementById("shorten-button").addEventListener("click", function() {
|
22 |
-
var longUrl = document.getElementById("long-url").value;
|
23 |
-
|
24 |
-
// Выполнение запроса к API Bit.ly для сокращения ссылки
|
25 |
-
fetch("https://api-ssl.bitly.com/v4/shorten", {
|
26 |
-
method: "POST",
|
27 |
-
headers: {
|
28 |
-
"Content-Type": "application/json",
|
29 |
-
"Authorization": "326c6f40970f20b2e71669323f7849575789630b"
|
30 |
-
},
|
31 |
-
body: JSON.stringify({
|
32 |
-
long_url: longUrl
|
33 |
-
})
|
34 |
-
})
|
35 |
-
.then(response => response.json())
|
36 |
-
.then(data => {
|
37 |
-
var shortUrl = data.link;
|
38 |
-
document.getElementById("short-url").value = shortUrl;
|
39 |
-
})
|
40 |
-
.catch(error => {
|
41 |
-
console.error(error);
|
42 |
-
document.getElementById("short-url").value = "Ошибка при сокращении ссылки.";
|
43 |
-
});
|
44 |
-
});
|
45 |
-
</script>
|
46 |
-
<p></p>
|
47 |
-
Link shortener by CofAI
|
48 |
-
</body>
|
49 |
-
</html>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Cyril666/ContourNet-ABI/maskrcnn_benchmark/data/samplers/iteration_based_batch_sampler.py
DELETED
@@ -1,31 +0,0 @@
|
|
1 |
-
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
|
2 |
-
from torch.utils.data.sampler import BatchSampler
|
3 |
-
|
4 |
-
|
5 |
-
class IterationBasedBatchSampler(BatchSampler):
|
6 |
-
"""
|
7 |
-
Wraps a BatchSampler, resampling from it until
|
8 |
-
a specified number of iterations have been sampled
|
9 |
-
"""
|
10 |
-
|
11 |
-
def __init__(self, batch_sampler, num_iterations, start_iter=0):
|
12 |
-
self.batch_sampler = batch_sampler
|
13 |
-
self.num_iterations = num_iterations
|
14 |
-
self.start_iter = start_iter
|
15 |
-
|
16 |
-
def __iter__(self):
|
17 |
-
iteration = self.start_iter
|
18 |
-
while iteration <= self.num_iterations:
|
19 |
-
# if the underlying sampler has a set_epoch method, like
|
20 |
-
# DistributedSampler, used for making each process see
|
21 |
-
# a different split of the dataset, then set it
|
22 |
-
if hasattr(self.batch_sampler.sampler, "set_epoch"):
|
23 |
-
self.batch_sampler.sampler.set_epoch(iteration)
|
24 |
-
for batch in self.batch_sampler:
|
25 |
-
iteration += 1
|
26 |
-
if iteration > self.num_iterations:
|
27 |
-
break
|
28 |
-
yield batch
|
29 |
-
|
30 |
-
def __len__(self):
|
31 |
-
return self.num_iterations
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/DQChoi/gpt-demo/venv/lib/python3.11/site-packages/PIL/_binary.py
DELETED
@@ -1,102 +0,0 @@
|
|
1 |
-
#
|
2 |
-
# The Python Imaging Library.
|
3 |
-
# $Id$
|
4 |
-
#
|
5 |
-
# Binary input/output support routines.
|
6 |
-
#
|
7 |
-
# Copyright (c) 1997-2003 by Secret Labs AB
|
8 |
-
# Copyright (c) 1995-2003 by Fredrik Lundh
|
9 |
-
# Copyright (c) 2012 by Brian Crowell
|
10 |
-
#
|
11 |
-
# See the README file for information on usage and redistribution.
|
12 |
-
#
|
13 |
-
|
14 |
-
|
15 |
-
"""Binary input/output support routines."""
|
16 |
-
|
17 |
-
|
18 |
-
from struct import pack, unpack_from
|
19 |
-
|
20 |
-
|
21 |
-
def i8(c):
|
22 |
-
return c if c.__class__ is int else c[0]
|
23 |
-
|
24 |
-
|
25 |
-
def o8(i):
|
26 |
-
return bytes((i & 255,))
|
27 |
-
|
28 |
-
|
29 |
-
# Input, le = little endian, be = big endian
|
30 |
-
def i16le(c, o=0):
|
31 |
-
"""
|
32 |
-
Converts a 2-bytes (16 bits) string to an unsigned integer.
|
33 |
-
|
34 |
-
:param c: string containing bytes to convert
|
35 |
-
:param o: offset of bytes to convert in string
|
36 |
-
"""
|
37 |
-
return unpack_from("<H", c, o)[0]
|
38 |
-
|
39 |
-
|
40 |
-
def si16le(c, o=0):
|
41 |
-
"""
|
42 |
-
Converts a 2-bytes (16 bits) string to a signed integer.
|
43 |
-
|
44 |
-
:param c: string containing bytes to convert
|
45 |
-
:param o: offset of bytes to convert in string
|
46 |
-
"""
|
47 |
-
return unpack_from("<h", c, o)[0]
|
48 |
-
|
49 |
-
|
50 |
-
def si16be(c, o=0):
|
51 |
-
"""
|
52 |
-
Converts a 2-bytes (16 bits) string to a signed integer, big endian.
|
53 |
-
|
54 |
-
:param c: string containing bytes to convert
|
55 |
-
:param o: offset of bytes to convert in string
|
56 |
-
"""
|
57 |
-
return unpack_from(">h", c, o)[0]
|
58 |
-
|
59 |
-
|
60 |
-
def i32le(c, o=0):
|
61 |
-
"""
|
62 |
-
Converts a 4-bytes (32 bits) string to an unsigned integer.
|
63 |
-
|
64 |
-
:param c: string containing bytes to convert
|
65 |
-
:param o: offset of bytes to convert in string
|
66 |
-
"""
|
67 |
-
return unpack_from("<I", c, o)[0]
|
68 |
-
|
69 |
-
|
70 |
-
def si32le(c, o=0):
|
71 |
-
"""
|
72 |
-
Converts a 4-bytes (32 bits) string to a signed integer.
|
73 |
-
|
74 |
-
:param c: string containing bytes to convert
|
75 |
-
:param o: offset of bytes to convert in string
|
76 |
-
"""
|
77 |
-
return unpack_from("<i", c, o)[0]
|
78 |
-
|
79 |
-
|
80 |
-
def i16be(c, o=0):
|
81 |
-
return unpack_from(">H", c, o)[0]
|
82 |
-
|
83 |
-
|
84 |
-
def i32be(c, o=0):
|
85 |
-
return unpack_from(">I", c, o)[0]
|
86 |
-
|
87 |
-
|
88 |
-
# Output, le = little endian, be = big endian
|
89 |
-
def o16le(i):
|
90 |
-
return pack("<H", i)
|
91 |
-
|
92 |
-
|
93 |
-
def o32le(i):
|
94 |
-
return pack("<I", i)
|
95 |
-
|
96 |
-
|
97 |
-
def o16be(i):
|
98 |
-
return pack(">H", i)
|
99 |
-
|
100 |
-
|
101 |
-
def o32be(i):
|
102 |
-
return pack(">I", i)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|