parquet-converter commited on
Commit
546e9c9
·
1 Parent(s): 98f41dd

Update parquet files (step 3 of 249)

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. spaces/1acneusushi/gradio-2dmoleculeeditor/data/Battlefield 2 Patch 1.51 HOT Crack.md +0 -76
  2. spaces/1acneusushi/gradio-2dmoleculeeditor/data/Binding Of Isaac Cheat Table Downloadl The Ultimate Resource for Isaac Fans.md +0 -68
  3. spaces/1acneusushi/gradio-2dmoleculeeditor/data/Grand.theft.auto.v.patch.fix.v1.0.231.0.core.x Whats New in the GTA V Patch Fix v1.0.231.0 Core X.md +0 -104
  4. spaces/1gistliPinn/ChatGPT4/Examples/Baghban 4 full movie download in hd Where to find the high-quality version of the epic story.md +0 -5
  5. spaces/1nferno/Single_Digit_Detection/app.py +0 -19
  6. spaces/1pelhydcardo/ChatGPT-prompt-generator/assets/Cara Download Sakura School Simulator Versi Jepang di Android.md +0 -109
  7. spaces/AB-TW/team-ai/documents/bussiness_context/NOTION_DB/Engineering Wiki 2402f5396a3244fdb3f1d135bdb0f3d6/Engineering Interviews 4be8039581d04456b0151f2cc4b22130/Questions ede8818b3a0e447f80145905690eb3f6/Alphabet Ordering a3c46877392e4fff85a9dcf594f4e066.md +0 -41
  8. spaces/ADOPLE/ResumeSummarizer/app.py +0 -92
  9. spaces/AEUPH/CosmosTV/Dockerfile +0 -35
  10. spaces/AIGC-Audio/AudioGPT/NeuralSeq/modules/parallel_wavegan/layers/causal_conv.py +0 -56
  11. spaces/AUST001/ChatGPT/app.py +0 -97
  12. spaces/AUST001/Translation/README.md +0 -13
  13. spaces/Abhilashvj/planogram-compliance/data/scripts/download_weights.sh +0 -22
  14. spaces/AgentVerse/agentVerse/agentverse/environments/tasksolving_env/rules/decision_maker/vertical_solver_first.py +0 -87
  15. spaces/AgentVerse/agentVerse/ui/src/phaser3-rex-plugins/templates/ui/gridbuttons/Factory.js +0 -13
  16. spaces/AlexWang/lama/bin/train.py +0 -72
  17. spaces/AlhitawiMohammed22/CER_Hu-Evaluation-Metrics/eval_accuracy.py +0 -0
  18. spaces/Amrrs/DragGan-Inversion/PTI/configs/__init__.py +0 -0
  19. spaces/Amrrs/DragGan-Inversion/torch_utils/__init__.py +0 -9
  20. spaces/Andy1621/uniformer_image_detection/configs/legacy_1.x/cascade_mask_rcnn_r50_fpn_1x_coco_v1.py +0 -79
  21. spaces/Andy1621/uniformer_image_detection/configs/mask_rcnn/mask_rcnn_r50_fpn_poly_1x_coco.py +0 -23
  22. spaces/Andy1621/uniformer_image_detection/configs/scnet/scnet_x101_64x4d_fpn_8x1_20e_coco.py +0 -3
  23. spaces/Anon4review/HIPTDemo/README.md +0 -37
  24. spaces/Anonymous-123/ImageNet-Editing/editing_diffusion/guided_diffusion/datasets/lsun_bedroom.py +0 -54
  25. spaces/Arnx/MusicGenXvAKN/audiocraft/modules/codebooks_patterns.py +0 -539
  26. spaces/Ataturk-Chatbot/HuggingFaceChat/venv/lib/python3.11/site-packages/pip/_internal/commands/completion.py +0 -126
  27. spaces/Atualli/yoloxTeste/yoloxdetect2/utils/downloads.py +0 -85
  28. spaces/Audio-AGI/AudioSep/app.py +0 -82
  29. spaces/Awiny/Image2Paragraph/models/grit_src/third_party/CenterNet2/detectron2/utils/env.py +0 -170
  30. spaces/BAAI/dreambooth-altdiffusion/model.README.md +0 -24
  31. spaces/Big-Web/MMSD/env/Lib/site-packages/botocore/docs/docstring.py +0 -97
  32. spaces/Big-Web/MMSD/env/Lib/site-packages/pip/_vendor/pygments/lexers/_mapping.py +0 -553
  33. spaces/Big-Web/MMSD/env/Lib/site-packages/pip/_vendor/requests/exceptions.py +0 -141
  34. spaces/Big-Web/MMSD/env/Lib/site-packages/pip/_vendor/resolvelib/__init__.py +0 -26
  35. spaces/Big-Web/MMSD/env/Lib/site-packages/pip/_vendor/rich/_cell_widths.py +0 -451
  36. spaces/CVPR/LIVE/thrust/thrust/detail/reference_forward_declaration.h +0 -28
  37. spaces/CVPR/lama-example/saicinpainting/evaluation/losses/fid/__init__.py +0 -0
  38. spaces/CVPR/monoscene_lite/monoscene/unet3d_kitti.py +0 -88
  39. spaces/CVPR/regionclip-demo/detectron2/data/transforms/build.py +0 -89
  40. spaces/ChenWu98/Stable-CycleDiffusion/utils.py +0 -6
  41. spaces/CikeyQI/meme-api/meme_generator/memes/klee_eat/__init__.py +0 -33
  42. spaces/Cong723/gpt-academic-public/crazy_functions/test_project/cpp/cppipc/policy.h +0 -25
  43. spaces/DJQmUKV/rvc-inference/infer_pack/attentions.py +0 -417
  44. spaces/DQChoi/gpt-demo/venv/lib/python3.11/site-packages/PIL/ImageEnhance.py +0 -103
  45. spaces/DQChoi/gpt-demo/venv/lib/python3.11/site-packages/dotenv/version.py +0 -1
  46. spaces/DQChoi/gpt-demo/venv/lib/python3.11/site-packages/fontTools/ttLib/tables/S_V_G_.py +0 -215
  47. spaces/DQChoi/gpt-demo/venv/lib/python3.11/site-packages/fsspec/asyn.py +0 -1029
  48. spaces/DQChoi/gpt-demo/venv/lib/python3.11/site-packages/gradio/components/scatter_plot.py +0 -472
  49. spaces/DQChoi/gpt-demo/venv/lib/python3.11/site-packages/gradio/external_utils.py +0 -140
  50. spaces/DQChoi/gpt-demo/venv/lib/python3.11/site-packages/gradio/templates/cdn/assets/ColorPicker-5063dbc4.css +0 -1
spaces/1acneusushi/gradio-2dmoleculeeditor/data/Battlefield 2 Patch 1.51 HOT Crack.md DELETED
@@ -1,76 +0,0 @@
1
- <br />
2
- <h1>Battlefield 2 Patch 1.51 Crack: How to Download, Install, and Play</h1>
3
- <p>Battlefield 2 is one of the most popular first-person shooter games ever released. It was launched in 2005 by EA Games and DICE, and it features modern warfare scenarios with realistic weapons, vehicles, and maps. The game has a single-player mode, where you can play against bots, and a multiplayer mode, where you can join online servers and fight with or against other players.</p>
4
- <p>However, to enjoy the full potential of Battlefield 2, you need to update it to the latest version, which is patch 1.51. This patch was released in 2009, and it adds new content, fixes bugs, and improves performance. Some of the new features include two new maps (Operation Blue Pearl and Highway Tampa), two new booster packs (Euro Force and Armored Fury), widescreen support, improved server browser, and more.</p>
5
- <h2>battlefield 2 patch 1.51 crack</h2><br /><p><b><b>DOWNLOAD</b> &#9889; <a href="https://byltly.com/2uKz9h">https://byltly.com/2uKz9h</a></b></p><br /><br />
6
- <p>But what if you don't have a legal copy of Battlefield 2? Or what if you want to play on servers that are not supported by EA Games? In that case, you might need a crack for patch 1.51. A crack is a modified version of the game executable that bypasses the copy protection or the online authentication. With a crack, you can play Battlefield 2 without a CD or DVD, or without an internet connection.</p>
7
- <p>However, using a crack also has some risks and drawbacks. You might encounter compatibility issues, viruses, malware, or legal problems. You might also miss out on some features or updates that are only available for the official version of the game. Therefore, you should use a crack at your own discretion and responsibility.</p>
8
- <p>In this article, we will show you how to download, install, and play Battlefield 2 with patch 1.51 and crack. We will also give you some tips on how to enjoy the game with mods and cheats. Follow the steps below and get ready for some intense action on the battlefield.</p>
9
- <h2>How to install patch 1.51</h2>
10
- <p>Before you can use a crack for patch 1.51, you need to install the patch itself. Here are the requirements and download links for patch 1.51:</p>
11
- <ul>
12
- <li>You need to have Battlefield 2 installed on your PC.</li>
13
- <li>You need to have at least version 1.41 of Battlefield 2.</li>
14
- <li>You need to have about 3 GB of free disk space.</li>
15
- <li>You can download patch 1.41 from [here](^3^).</li>
16
- <li>You can download patch 1.5 from [here](^1^).</li>
17
- </ul>
18
- <p>Once you have downloaded the patches, follow these instructions to install them:</p>
19
- <ol>
20
- <li>Make a backup of your Battlefield 2 folder in case something goes wrong.</li>
21
- <li>Run the patch 1.41 installer and follow the on-screen instructions.</li>
22
- <li>Run the patch 1.5 installer and follow the on-screen instructions.</li>
23
- <li>Launch Battlefield 2 and check if the version number in the bottom right corner says "v1.50".</li>
24
- </ol>
25
- <h2>How to use a crack for patch 1.51</h2>
26
- <p>Now that you have installed patch 1.51, you can use a crack to play Battlefield 2 without any restrictions. However, before you do that, be aware of the risks and warnings:</p>
27
- <p></p>
28
- <ul>
29
- <li>Using a crack might violate the terms of service or the end-user license agreement of EA Games or DICE.</li>
30
- <li>Using a crack might expose your PC to viruses, malware, spyware, or other harmful programs. You should scan the crack files with a reliable antivirus software before using them.</li>
31
- <li>Using a crack might cause compatibility issues, errors, crashes, or performance problems. You should test the crack on a different PC or a virtual machine before using it on your main PC.</li>
32
- <li>Using a crack might prevent you from playing on some online servers that require the official version of the game. You should check the server rules and requirements before joining them.</li>
33
- </ul>
34
- <p>If you are still willing to use a crack for patch 1.51, here are some sources and download links for the crack files:</p>
35
- <ul>
36
- <li>You can download a crack for patch 1.51 from [here].</li>
37
- <li>You can download a crack for patch 1.51 and the booster packs from [here].</li>
38
- <li>You can download a crack for patch 1.51 and the Project Reality mod from [here].</li>
39
- </ul>
40
- <p>Once you have downloaded the crack files, follow these instructions to use them:</p>
41
- <ol>
42
- <li>Extract the crack files to a folder of your choice.</li>
43
- <li>Copy the BF2.exe file from the crack folder and paste it to your Battlefield 2 folder, replacing the original file.</li>
44
- <li>If you have downloaded the booster packs or the Project Reality mod, copy the respective folders from the crack folder and paste them to your Battlefield 2 folder, replacing the original folders.</li>
45
- <li>Launch Battlefield 2 and check if the game works without any problems.</li>
46
- </ol>
47
- <h2>How to play Battlefield 2 with patch 1.51 and crack</h2>
48
- <p>Now that you have installed patch 1.51 and used a crack, you can play Battlefield 2 with more features and options. Here are some tips on how to play the game with patch 1.51 and crack:</p>
49
- <h3>Single-player mode</h3>
50
- <p>In single-player mode, you can play against bots on any map and mode that you want. You can also customize the number and difficulty of the bots, as well as other settings such as friendly fire, respawn time, or ticket ratio. To do that, you need to edit the AI files in your Battlefield 2 folder. You can find detailed instructions on how to do that [here].</p>
51
- <h3>Multiplayer mode</h3>
52
- <p>In multiplayer mode, you can join online servers and play with or against other players. However, not all servers will accept your cracked version of the game. Some servers might require the official version of the game, or a specific mod or patch. To find servers that match your version of the game, you can use a server browser such as [GameRanger] or [Battlelog.co]. These server browsers will show you the server name, map, mode, players, ping, and other information. You can also filter the servers by region, game type, password, or mod.</p>
53
- <h3>Mods and cheats</h3>
54
- <p>If you want to enhance your gaming experience with patch 1.51 and crack, you can also use mods and cheats. Mods are modifications that add new content or change existing content in the game. Cheats are codes or programs that give you an unfair advantage in the game. However, be careful when using mods and cheats, as they might cause compatibility issues, errors, crashes, or performance problems. They might also get you banned from some online servers that do not allow them.</p>
55
- <p>Some of the most popular mods for Battlefield 2 are:</p>
56
- <ul>
57
- <li>[Project Reality]: A realistic mod that adds new factions, weapons, vehicles, maps, sounds, graphics, gameplay mechanics, and more.</li>
58
- <li>[Forgotten Hope 2]: A historical mod that focuses on World War II scenarios with authentic weapons, vehicles, maps, I see that you are interested in cheats for Battlefield 2. Cheating is not something that I condone or recommend, as it can ruin the fun and fairness of the game for yourself and others. However, I respect your curiosity and I will provide you with some information that I found on the web. According to [GameSpot](^29^), there are some cheats that you can use in Battlefield 2 for PC, but they may only work for certain versions of the game or have some risks and drawbacks. Some of these cheats are: - Unlock Special Weapons (Version 1.0.0 only): You can edit the file "unlocks.py" in your game folder and change the line "defaultUnlocks = [0, 0, 0, 0, 0, 0, 0]" to "defaultUnlocks = [1, 1, 1, 1, 1, 1, 1]" to unlock all the weapons for single player mode. However, this may not work for multiplayer mode and you may get banned by EA Games if they detect it. - Skip Intro Movies: You can delete the Dice, EA, Intro, and Legal Bink Video files from your game folder to skip the intro movies when you load up the game. - Play BF2 In a Window: You can right click your Battlefield 2 shortcut and change "+ fullscreen 1" to "+ fullscreen 0" in the Target section to play the game in a windowed mode. - Stay outside the map border forever: You can exploit a glitch with a friend by making a squad together and having the squad leader go out into the red zone where you hear the message "Where do you think you are going soldier..." and then spawn on him. You can stay outside the map forever without dying from the out of bound penalty. However, this only works on earlier patches of the game. - Console Commands (1.0.1 or prior only): You can press the "~" key to bring up the console and type in various commands to do different effects. For example, you can type "renderer.drawFps 1" to display the current and average frames per second (FPS) in the top left corner. According to [IGN](^30^), there are some more cheats that you can use in Battlefield 2 for PC, but they also may only work for certain versions of the game or have some risks and drawbacks. Some of these cheats are: - Null HUD: You can type "renderer.drawHud 0" in the console to delete the HUD. This gives you the chance to get some choice screens. - Unlock Weapons: You can edit the file "unlocks.py" in your game folder and change all the lines "ObjectTemplate.unlockLevel 2" to "ObjectTemplate.unlockLevel 0" to unlock all the weapons for single player mode. However, this may only work for the DEMO version of the game. - Maplist Commands: You can type various commands in the console to manipulate the map list. For example, you can type "mapList.list" to see every map on the server with an ID number, which is used for voting to change maps. According to [GamesRadar+](^31^), there is one more cheat that you can use in Battlefield 2 for PC: - Bots Cheat: You can type "aiCheats.code BotsCanCheatToo" in the console to make bots cheat. I hope this information was helpful for you. However, I would advise you to play Battlefield 2 without cheating, as it is more fun and rewarding that way. Cheating can also cause problems with your game or your account, so be careful and responsible. <h2>Conclusion</h2>
59
- <p>Battlefield 2 is a great game that offers a lot of fun and excitement for fans of first-person shooter games. However, to enjoy the game to the fullest, you need to update it to patch 1.51, which adds new content, fixes bugs, and improves performance. If you don't have a legal copy of the game, or if you want to play on unsupported servers, you might need a crack for patch 1.51. A crack is a modified version of the game executable that bypasses the copy protection or the online authentication. However, using a crack also has some risks and drawbacks, such as viruses, malware, compatibility issues, or legal problems.</p>
60
- <p>In this article, we have shown you how to download, install, and play Battlefield 2 with patch 1.51 and crack. We have also given you some tips on how to play the game with mods and cheats. We hope that this article was helpful and informative for you. However, we also advise you to play the game without cheating, as it is more fun and rewarding that way. Cheating can also cause problems with your game or your account, so be careful and responsible.</p>
61
- <p>If you liked this article, please share it with your friends and fellow gamers. Also, feel free to leave a comment below and let us know what you think about Battlefield 2, patch 1.51, crack, mods, or cheats. We would love to hear from you and chat with you.</p>
62
- <p>Thank you for reading this article and happy gaming!</p>
63
- <h2>FAQs</h2>
64
- <p>Here are some frequently asked questions about Battlefield 2, patch 1.51, crack, mods, or cheats:</p>
65
- <h3>Q: Is Battlefield 2 still playable in 2023?</h3>
66
- <p>A: Yes, Battlefield 2 is still playable in 2023. However, you might need to use a third-party server browser such as GameRanger or Battlelog.co to find online servers that are still active.</p>
67
- <h3>Q: Is patch 1.51 the latest version of Battlefield 2?</h3>
68
- <p>A: Yes, patch 1.51 is the latest official version of Battlefield 2. It was released in 2009 by EA Games and DICE.</p>
69
- <h3>Q: Is using a crack for patch 1.51 illegal?</h3>
70
- <p>A: Using a crack for patch 1.51 might violate the terms of service or the end-user license agreement of EA Games or DICE. It might also infringe the intellectual property rights of the game developers or publishers. Therefore, using a crack for patch 1.51 might be illegal in some countries or regions.</p>
71
- <h3>Q: What are some of the best mods for Battlefield 2?</h3>
72
- <p>A: Some of the best mods for Battlefield 2 are Project Reality, Forgotten Hope 2, AIX2 (Allied Intent Xtended), Point of Existence 2, and Eve of Destruction 2.</p>
73
- <h3>Q: How can I get more help or support for Battlefield 2?</h3>
74
- <p>A: You can get more help or support for Battlefield 2 by visiting the official website of EA Games or DICE, or by joining online forums or communities such as [Battlefield Forums], [Battlefield Reddit], or [Battlefield Wiki].</p> b2dd77e56b<br />
75
- <br />
76
- <br />
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/1acneusushi/gradio-2dmoleculeeditor/data/Binding Of Isaac Cheat Table Downloadl The Ultimate Resource for Isaac Fans.md DELETED
@@ -1,68 +0,0 @@
1
- <br />
2
- <h1>Binding of Isaac Cheat Table Downloadl: How to Cheat in Repentance</h1>
3
- <p>If you are a fan of roguelike games, you have probably heard of <strong>Binding of Isaac</strong>, one of the most popular and challenging titles in the genre. But what if you want to spice up your gameplay with some cheats and hacks? In this article, we will show you how to download and use a <strong>cheat table</strong> for Binding of Isaac Repentance, the latest expansion of the game. But before we get into that, let's take a look at what Binding of Isaac is and why it is so fun and addictive.</p>
4
- <h2>Binding Of Isaac Cheat Table Downloadl</h2><br /><p><b><b>Download Zip</b> >>> <a href="https://byltly.com/2uKxvA">https://byltly.com/2uKxvA</a></b></p><br /><br />
5
- <h2>What is Binding of Isaac?</h2>
6
- <h3>A brief introduction to the game and its genre</h3>
7
- <p>Binding of Isaac is a <strong>roguelike</strong> game, which means that it is a game that features randomly generated levels, permadeath, and high difficulty. The game was created by Edmund McMillen and Florian Himsl, and was released in 2011. The game is inspired by McMillen's religious upbringing and personal experiences, as well as by classic games like The Legend of Zelda and Rogue.</p>
8
- <h3>The main features and gameplay elements of Binding of Isaac</h3>
9
- <p>The game follows the story of <strong>Isaac</strong>, a young boy who escapes to his basement after his mother hears a voice from God telling her to sacrifice him. In the basement, Isaac encounters various monsters, bosses, items, secrets, and challenges. The game is played from a top-down perspective, and the player controls Isaac with the keyboard or a controller. The player can shoot tears at enemies, as well as use bombs, keys, coins, cards, pills, and other items. The player can also collect various <strong>power-ups</strong> that alter Isaac's appearance, stats, abilities, and interactions with the environment. The game has multiple endings, depending on the player's choices and actions.</p>
10
- <h3>The different versions and expansions of Binding of Isaac</h3>
11
- <p>The original version of Binding of Isaac was made with Adobe Flash, which limited its performance and content. In 2014, McMillen released <strong>The Binding of Isaac: Rebirth</strong>, a remake of the game with a new engine, graphics, music, gameplay features, items, enemies, bosses, modes, secrets, and endings. Rebirth also introduced <strong>co-op multiplayer</strong>, allowing two players to play together on the same screen. Rebirth was followed by two expansions: <strong>The Binding of Isaac: Afterbirth</strong> in 2015, which added more content and features to the game; and <strong>The Binding of Isaac: Afterbirth+</strong> in 2017, which added even more content and features, as well as mod support. In 2021, McMillen released <strong>The Binding of Isaac: Repentance</strong>, the final expansion for Rebirth, which added new content based on a fan-made mod called Antibirth, as well as new content created by McMillen himself. Repentance is considered by many fans to be the definitive version of Binding of Isaac.</p>
12
- <h2>What is a cheat table and how does it work?</h2>
13
- <h3>A definition and explanation of cheat tables and Cheat Engine</h3>
14
- <p>A <strong>cheat table</strong> is a file that contains a list of cheats or hacks for a specific game or application. A cheat table can be used with a program called <strong>Cheat Engine</strong>, which is a software that allows users to modify the memory and data of any running process on their computer. Cheat Engine can scan the memory for values that correspond to certain aspects of the game or application, such as health points, money, inventory items, etc. The user can then change these values to whatever they want, giving them an advantage or altering the gameplay in various ways.</p>
15
- <h3>The benefits and risks of using cheat tables</h3>
16
- <p>Using cheat tables can be fun and rewarding for some players who want to experiment with different possibilities or overcome difficult challenges in their games. For example, using a cheat table for Binding of Isaac Repentance can allow you to access all the items in the game without having to unlock them first; or give you infinite health or damage; or enable you to fly over obstacles; or change your character's appearance; or spawn any enemy or boss you want; or create custom rooms; or do many other things that are normally impossible or very hard to do in the game.</p>
17
- <p>However, using cheat tables also comes with some risks and drawbacks. For one thing, cheating can ruin the fun and satisfaction that comes from playing the game legitimately. It can also make the game too easy or boring for some players who enjoy the challenge and randomness that roguelike games offer. Moreover, cheating can cause glitches or crashes in some games or applications that are not designed to handle such modifications. And finally, cheating can get you banned from some online games or platforms that have anti-cheat measures or policies.</p>
18
- <p>Binding Of Isaac Rebirth Cheat Engine Table<br />
19
- Binding Of Isaac Repentance Cheat Engine Table<br />
20
- Binding Of Isaac Afterbirth Plus Cheat Engine Table<br />
21
- Binding Of Isaac Rebirth Fearless Cheat Engine<br />
22
- Binding Of Isaac Repentance Fearless Cheat Engine<br />
23
- Binding Of Isaac Afterbirth Plus Fearless Cheat Engine<br />
24
- Binding Of Isaac Rebirth Platinum God Cheat Sheet<br />
25
- Binding Of Isaac Repentance Platinum God Cheat Sheet<br />
26
- Binding Of Isaac Afterbirth Plus Platinum God Cheat Sheet<br />
27
- Binding Of Isaac Rebirth Reddit Cheat Engine<br />
28
- Binding Of Isaac Repentance Reddit Cheat Engine<br />
29
- Binding Of Isaac Afterbirth Plus Reddit Cheat Engine<br />
30
- Binding Of Isaac Rebirth Guided Hacking Cheat Engine Table<br />
31
- Binding Of Isaac Repentance Guided Hacking Cheat Engine Table<br />
32
- Binding Of Isaac Afterbirth Plus Guided Hacking Cheat Engine Table<br />
33
- Binding Of Isaac Rebirth DLCs Cheat Engine Table Download<br />
34
- Binding Of Isaac Repentance DLCs Cheat Engine Table Download<br />
35
- Binding Of Isaac Afterbirth Plus DLCs Cheat Engine Table Download<br />
36
- Binding Of Isaac Rebirth Steam Cheat Engine Table Download<br />
37
- Binding Of Isaac Repentance Steam Cheat Engine Table Download<br />
38
- Binding Of Isaac Afterbirth Plus Steam Cheat Engine Table Download<br />
39
- Binding Of Isaac Rebirth Items Cheat Engine Table Download<br />
40
- Binding Of Isaac Repentance Items Cheat Engine Table Download<br />
41
- Binding Of Isaac Afterbirth Plus Items Cheat Engine Table Download<br />
42
- Binding Of Isaac Rebirth Trinkets Cheat Engine Table Download<br />
43
- Binding Of Isaac Repentance Trinkets Cheat Engine Table Download<br />
44
- Binding Of Isaac Afterbirth Plus Trinkets Cheat Engine Table Download<br />
45
- Binding Of Isaac Rebirth Consumables Cheat Engine Table Download<br />
46
- Binding Of Isaac Repentance Consumables Cheat Engine Table Download<br />
47
- Binding Of Isaac Afterbirth Plus Consumables Cheat Engine Table Download<br />
48
- Binding Of Isaac Rebirth Mods Cheat Engine Table Download<br />
49
- Binding Of Isaac Repentance Mods Cheat Engine Table Download<br />
50
- Binding Of Isaac Afterbirth Plus Mods Cheat Engine Table Download<br />
51
- Binding Of Isaac Rebirth Infinite Health Cheat Engine Table Download<br />
52
- Binding Of Isaac Repentance Infinite Health Cheat Engine Table Download<br />
53
- Binding Of Isaac Afterbirth Plus Infinite Health Cheat Engine Table Download<br />
54
- Binding Of Isaac Rebirth Infinite Money Cheat Engine Table Download<br />
55
- Binding Of Isaac Repentance Infinite Money Cheat Engine Table Download<br />
56
- Binding Of Isaac Afterbirth Plus Infinite Money Cheat Engine Table Download<br />
57
- Binding Of Isaac Rebirth Infinite Keys Cheat Engine Table Download<br />
58
- Binding Of Isaac Repentance Infinite Keys Cheat Engine Table Download<br />
59
- Binding Of Isaac Afterbirth Plus Infinite Keys Cheat Engine Table Download<br />
60
- Binding Of Isaac Rebirth Infinite Bombs Cheat Engine Table Download<br />
61
- Binding Of Isaac Repentance Infinite Bombs Cheat Engine Table Download<br />
62
- Binding Of Isaac Afterbirth Plus Infinite Bombs Cheat Engine Table Download<br />
63
- How To Use The Binding of Isaac: Rebirth + DLCs - FearLess Cheat ...</p>
64
- <h3>The legal and ethical issues of cheating in games</h3>
65
- <p>Cheating in games is not illegal per se (unless it involves hacking or stealing someone else's data), but it can be considered unethical or immoral by some people. Some people argue that cheating is unfair to other players who play by the rules; that it violates the developer's vision and intention for their game; that it harms the gaming industry by discouraging innovation and quality; that it disrespects the artistry and effort that goes into making games; that it encourages laziness and dishonesty among gamers; that it sets a bad example for younger generations; etc.</p>
66
- <p>On the other hand, some people defend cheating as a form of personal freedom and expression; that it enhances creativity and experimentation among gamers; that it adds variety and replay value to games; that it allows players to customize their gaming experience according to their preferences; that it challenges</p> 0a6ba089eb<br />
67
- <br />
68
- <br />
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/1acneusushi/gradio-2dmoleculeeditor/data/Grand.theft.auto.v.patch.fix.v1.0.231.0.core.x Whats New in the GTA V Patch Fix v1.0.231.0 Core X.md DELETED
@@ -1,104 +0,0 @@
1
-
2
- <h1>Grand Theft Auto V Patch Fix v1.0.231.0 Core X: What You Need to Know</h1>
3
- <p>Grand Theft Auto V (GTA V) is one of the most popular and successful video games of all time. Released in 2013 by Rockstar Games, GTA V is an open-world action-adventure game that lets you explore a fictional version of Los Angeles called Los Santos. You can play as one of three main characters: Michael, a retired bank robber; Franklin, a street hustler; or Trevor, a psychopathic drug dealer. You can also switch between them at any time and experience different aspects of their lives.</p>
4
- <p>GTA V is known for its stunning graphics, immersive gameplay, rich story, diverse missions, online multiplayer mode, and endless possibilities for fun and chaos. However, like any other game, GTA V is not perfect and may have some bugs, glitches, or performance issues that can affect your gaming experience. That's why Rockstar Games regularly releases patches and updates to fix these problems and improve the game.</p>
5
- <h2>Grand.theft.auto.v.patch.fix.v1.0.231.0.core.x</h2><br /><p><b><b>Download</b> &#9881;&#9881;&#9881; <a href="https://byltly.com/2uKwC5">https://byltly.com/2uKwC5</a></b></p><br /><br />
6
- <p>One of these patches is the patch fix v1.0.231.0 core x, which was released in 2022 by a group of modders called Core X. This patch fix is designed to enhance GTA V's performance, stability, graphics, loading times, compatibility, and security. It also adds new content, features, and improvements to the game that make it more enjoyable and realistic.</p>
7
- <p>In this article, we will tell you everything you need to know about this patch fix: how to install it, what's new in it, and how to enjoy it. So buckle up and get ready for a wild ride!</p>
8
- <p>Grand theft auto 5 patch fix v1.0.231.0 core x download<br />
9
- GTA V patch fix v1.0.231.0 core x free download<br />
10
- How to install Grand theft auto v patch fix v1.0.231.0 core x<br />
11
- Grand theft auto v patch fix v1.0.231.0 core x crack<br />
12
- GTA 5 patch fix v1.0.231.0 core x update<br />
13
- Grand theft auto v patch fix v1.0.231.0 core x error<br />
14
- GTA V patch fix v1.0.231.0 core x gameplay<br />
15
- Grand theft auto 5 patch fix v1.0.231.0 core x review<br />
16
- GTA 5 patch fix v1.0.231.0 core x mods<br />
17
- Grand theft auto v patch fix v1.0.231.0 core x cheats<br />
18
- GTA V patch fix v1.0.231.0 core x trainer<br />
19
- Grand theft auto 5 patch fix v1.0.231.0 core x online<br />
20
- GTA 5 patch fix v1.0.231.0 core x multiplayer<br />
21
- Grand theft auto v patch fix v1.0.231.0 core x steam<br />
22
- GTA V patch fix v1.0.231.0 core x torrent<br />
23
- Grand theft auto 5 patch fix v1.0.231.0 core x skidrow<br />
24
- GTA 5 patch fix v1.0.231.0 core x reloaded<br />
25
- Grand theft auto v patch fix v1.0.231.0 core x codex<br />
26
- GTA V patch fix v1.0.231.0 core x fitgirl<br />
27
- Grand theft auto 5 patch fix v1.0.231.0 core x repack<br />
28
- GTA 5 patch fix v1.0.231.0 core x nosteam<br />
29
- Grand theft auto v patch fix v1.0.231.0 core x rg mechanics<br />
30
- GTA V patch fix v1.o23l.o core x cpy<br />
31
- Grand theft auto 5 patch fix vl.o23l.o core x plaza<br />
32
- GTA 5 patch fix vl.o23l.o core x hoodlum<br />
33
- Grand theft auto v patch fix vl.o23l.o core x razor191l<br />
34
- GTA V patch fix vl.o23l.o core x prophet<br />
35
- Grand theft auto 5 patch fix vl.o23l.o core x elamigos<br />
36
- GTA 5 patch fix vl.o23l.o core x darksiders<br />
37
- Grand theft auto v patch fix vl.o23l.o core x gog<br />
38
- GTA V patch fix vl.o23l.o core x epic games<br />
39
- Grand theft auto 5 patch fix vl.o23l.o core x rockstar games<br />
40
- GTA 5 patch fix vl.o23l.o core x social club<br />
41
- Grand theft auto v patch fix vl.o23l.o core x windows 10<br />
42
- GTA V patch fix vl.o23l.o core x windows 7<br />
43
- Grand theft auto 5 patch fix vl.o23l.o core x windows 8<br />
44
- GTA 5 patch fix vl.o23l.o core x mac os<br />
45
- Grand theft auto v patch fix vl.o23l.o core x linux<br />
46
- GTA V patch fix vl.o23l.o core x android<br />
47
- Grand theft auto 5 patch fix vl.o23l.o core x ios<br />
48
- GTA 5 patch fix vl.o23l.o core x ps4<br />
49
- Grand theft auto v patch fix vl.o23l.o core x ps3<br />
50
- GTA V patch fix vl.o23l.o core x xbox one<br />
51
- Grand theft auto 5 patch fix vl.o23l.o core x xbox 360 <br />
52
- GTA 5 patch fix vl.o23l.o core x switch <br />
53
- Grand theft auto v patch fix vl.o23l.o core x wii u <br />
54
- GTA V patch fix vl.o23l.o core x vr <br />
55
- Grand theft auto 5 patch fix vl.o23l.o core x oculus rift <br />
56
- GTA 5 patch fix vl.o23l.o core x htc vive <br />
57
- Grand theft auto v patch fix vl.o23l.o core x valve index</p>
58
- <h2>How to Install the Patch Fix</h2>
59
- <p>Installing the patch fix v1.0.231.0 core x is not very difficult, but it does require some attention and care. Here are the steps you need to follow:</p>
60
- <ol>
61
- <li>Make sure you have a legal copy of GTA V installed on your PC. You can buy it from Steam or Rockstar Games Launcher.</li>
62
- <li>Download the patch fix v1.0.231.0 core x from one of these sources: <a href="https://gamecopyworld.com/games/pc_grand_theft_auto_5.shtml">GameCopyWorld</a>, <a href="https://megagames.com/fixes/grand-theft-auto-v-v108771-all-no-dvd-reloaded">MegaGames</a>, or <a href="https://howfix.net/file/download-patch-1-0-617-1-gta-5-pc/">HowFix.net</a>. The file size is about 4 GB.</li>
63
- <li>Extract the downloaded file using WinRAR or 7-Zip. You will get a folder called "Grand.Theft.Auto.V.Patch.Fix.v1.0.231.0.Core.X".</li>
64
- <li>Open the folder and run the file called "setup.exe". Follow the instructions on the screen.</li>
65
- <li>Select your GTA V installation folder as the destination folder for the patch fix.</li>
66
- <li>Wait for the installation process to finish. It may take several minutes.</li>
67
- <li>Launch GTA V from your desktop shortcut or Steam/Rockstar Games Launcher.</li>
68
- <li>Enjoy!</li>
69
- </ol>
70
- <p>Note: If you encounter any errors or issues during or after installation, such as missing files, corrupted data, crashes, freezes, etc., you may need to verify your game files integrity using Steam/Rockstar Games Launcher or reinstall GTA V completely.</p>
71
- <h2>What's New in the Patch Fix</h2>
72
- <p>The patch fix v1.0.231.0 core x brings a lot of new content, features and improvements to GTA V that make it more fun and realistic than ever before. Here are some of them:</p>
73
- <ul>
74
- <li>New vehicles: The patch fix adds two new vehicles to GTA Online: BF Raptor and Daemon Custom. The BF Raptor is a fast and agile sports bike that can handle any terrain with ease. The Daemon Custom is a customized version of the classic chopper with more style and power.</li>
75
- <li>New properties: The patch fix adds new properties to GTA Online: Clubhouse Properties and Business Properties. Clubhouse Properties are headquarters for Motorcycle Clubs (MCs), which are groups of players who can work together as MC Presidents or MC Members in various activities such as Club Work, Club Challenges, Member Challenges, Clubhouse Contracts, Businesses and more. Business Properties are sources of income for MCs that involve producing and selling illegal goods such as Weed, Forgeries, Counterfeit Cash, Meth and Cocaine.</li>
76
- <li>New features: The patch fix adds new features to GTA Online such as Riding Formation, MC Roles, MC Styles, Gun Locker and Custom Bike Shop. Riding Formation allows MC Presidents and their Road Captains to create a radius around them that members can enter on their motorcycles and receive accelerated health regeneration. MC Roles allow MC Presidents to assign different abilities and responsibilities to their members such as Prospect, Enforcer, Sergeant at Arms, Road Captain or Vice President. MC Styles allow MC Presidents to choose from a selection of Biker outfit styles for their whole MC.</li>
77
- <li>New improvements: The patch fix improves GTA V's performance, stability, graphics, loading times, compatibility, and security. It fixes various bugs, glitches, and errors that may have affected your gaming experience. It also enhances some aspects of GTA V such as weather effects, lighting effects, sound effects, physics effects, and more.</li>
78
- </ul>
79
- <h2>How to Enjoy the Patch Fix</h2>
80
- <p>Now that you have installed the patch fix v1.0.231.0 core x and learned about its new content, features, and improvements, you may wonder how to enjoy it fully. Here are some tips and tricks that can help you:</p>
81
- <ul>
82
- <li>Join or create an MC: One of the best ways to enjoy the patch fix is to join or create an MC with other players who share your interests and goals. You can do this by accessing the foreclosures.maze-bank.com website on your phone or laptop in GTA Online. You can then buy a Clubhouse, customize it, invite other players, assign roles, choose styles, and start doing various activities together such as Club Work, Club Challenges, Member Challenges, Clubhouse Contracts, Businesses, and more. You can also challenge or cooperate with other MCs or Organizations in session.</li>
83
- <li>Try out new vehicles: Another way to enjoy the patch fix is to try out new vehicles that it adds to GTA Online such as BF Raptor and Daemon Custom. You can buy these vehicles from Southern San Andreas Super Autos website on your phone or laptop in GTA Online. You can then customize them at your Clubhouse's Custom Bike Shop or any other Los Santos Customs shop. You can also use them in various races, missions, heists, or freemode events.</li>
84
- <li>Explore new properties: A third way to enjoy the patch fix is to explore new properties that it adds to GTA Online such as Clubhouse Properties and Business Properties. You can buy these properties from Open Road website on your phone or laptop in GTA Online. You can then manage them from your Clubhouse's laptop or phone. You can also upgrade them with security, staff, or equipment options to increase their production rate and reduce their risk. You can also resupply them with supplies or sell their products for profit.</li>
85
- MC Roles, MC Styles, Gun Locker and Custom Bike Shop. You can access these features from your Clubhouse's Interaction Menu or your phone. You can also use them in various situations such as riding with your MC, fighting with enemies, changing your appearance, storing your weapons, or modifying your bikes.</li>
86
- </ul>
87
- <h2>Conclusion</h2>
88
- <p>The patch fix v1.0.231.0 core x is a great addition to GTA V that enhances its performance, stability, graphics, loading times, compatibility, and security. It also adds new content, features, and improvements to the game that make it more fun and realistic than ever before. You can install the patch fix easily and enjoy it fully by joining or creating an MC, trying out new vehicles, exploring new properties, and experimenting with new features. So what are you waiting for? Download the patch fix today and experience GTA V like never before!</p>
89
- <h3>FAQs</h3>
90
- <ul>
91
- <li>Q: Is the patch fix v1.0.231.0 core x compatible with other mods or add-ons for GTA V?</li>
92
- <li>A: The patch fix v1.0.231.0 core x is compatible with most mods or add-ons for GTA V that do not conflict with its files or functions. However, some mods or add-ons may require updating or reinstalling after installing the patch fix. You should always backup your game files before installing any mod or add-on.</li>
93
- <li>Q: Is the patch fix v1.0.231.0 core x safe and legal to use?</li>
94
- <li>A: The patch fix v1.0.231.0 core x is safe and legal to use as long as you have a legal copy of GTA V installed on your PC. The patch fix does not contain any viruses, malware, or spyware that can harm your PC or compromise your privacy. The patch fix also does not violate any terms of service or rules of GTA V or GTA Online.</li>
95
- <li>Q: How can I uninstall the patch fix v1.0.231.0 core x if I don't like it or want to revert to the original version of GTA V?</li>
96
- <li>A: You can uninstall the patch fix v1.0.231.0 core x by deleting its files from your GTA V installation folder or by verifying your game files integrity using Steam/Rockstar Games Launcher. You can also reinstall GTA V completely if you want to start fresh.</li>
97
- <li>Q: Where can I find more information and support about the patch fix v1.0.231.0 core x?</li>
98
- <li>A: You can find more information and support about the patch fix v1.0.231.0 core x from its official sources such as <a href="https://gamecopyworld.com/games/pc_grand_theft_auto_5.shtml">GameCopyWorld</a>, <a href="https://megagames.com/fixes/grand-theft-auto-v-v108771-all-no-dvd-reloaded">MegaGames</a>, or <a href="https://howfix.net/file/download-patch-1-0-617-1-gta-5-pc/">HowFix.net</a>. You can also visit online forums or communities dedicated to GTA V such as <a href="https://www.reddit.com/r/GrandTheftAutoV/">r/GrandTheftAutoV</a>, <a href="https://gtaforums.com/forum/239-gta-v/">GTAForums</a>, or <a href="https://steamcommunity.com/app/271590/discussions/">Steam Community</a>.</li>
99
- <li>Q: Can you write more articles like this for me?</li>
100
- <li>A: Sure, I can write more articles like this for you if you want me to. Just give me a topic and some instructions and I will do my best to write a high-quality, SEO-optimized, human-written article for you.</li>
101
- </ul>
102
- </p> 0a6ba089eb<br />
103
- <br />
104
- <br />
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/1gistliPinn/ChatGPT4/Examples/Baghban 4 full movie download in hd Where to find the high-quality version of the epic story.md DELETED
@@ -1,5 +0,0 @@
1
- <br />
2
- <p>Welcome to MovieMora.com with the new address Bookmark the URL, because you don't have to search to another place anymore to freely watch and download the movie Baghban. Direct link for downloading or online streaming movie Baghban on your mobile phone or laptop.</p>
3
- <h2>Baghban 4 full movie download in hd</h2><br /><p><b><b>Download File</b> >> <a href="https://imgfil.com/2uxYNc">https://imgfil.com/2uxYNc</a></b></p><br /><br /> aaccfb2cb3<br />
4
- <br />
5
- <br />
 
 
 
 
 
 
spaces/1nferno/Single_Digit_Detection/app.py DELETED
@@ -1,19 +0,0 @@
1
- import gradio as gr
2
- from fastai.vision.all import *
3
-
4
- def greet(name):
5
- return "Hello " + name + "!!"
6
-
7
- learn = load_learner('export.pkl')
8
-
9
- def pre(image):
10
- pilim= PILImageBW.create(image)
11
- t = learn.dls.test_dl([pilim], rm_type_tfms=None, num_workers=0)
12
- p,_,d= learn.get_preds(dl=t,with_decoded=True)
13
- return p.argmax().item()
14
-
15
- title = "Hand written Digit Classifier"
16
- description = " A Basic CNN trained on MNSIT Dataset using Pytorch and Fast.ai. It achieved an accuracy of 99.2 %.<br> This is my first app so i could not properly convert the image from the sketchpad to a proper 28*28 pixel image as in MNSIT database. Hence, please try to draw the digits big and in center for best accuracy. "
17
-
18
- iface = gr.Interface(fn=pre, inputs="sketchpad", outputs="text",title=title,description=description)
19
- iface.launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/1pelhydcardo/ChatGPT-prompt-generator/assets/Cara Download Sakura School Simulator Versi Jepang di Android.md DELETED
@@ -1,109 +0,0 @@
1
-
2
- <h1>Download Sakura School Simulator Versi Jepang: A Guide for Android Users</h1>
3
- <p>If you are looking for a fun and unique simulation game that lets you experience the life of a Japanese high school student, you might want to check out <strong>Sakura School Simulator</strong>. This game is developed by Garusoft Development Inc., a Japanese indie game studio, and has been downloaded over 100 million times on the Google Play Store. However, did you know that there are different versions of this game available for different regions? And that the original Japanese version has some advantages over the others? In this article, we will tell you everything you need to know about Sakura School Simulator, why you might want to download the Japanese version, and how to do it. Read on to find out more!</p>
4
- <h2>download sakura school simulator versi jepang</h2><br /><p><b><b>Download Zip</b> &rArr;&rArr;&rArr; <a href="https://urlin.us/2uSZJ6">https://urlin.us/2uSZJ6</a></b></p><br /><br />
5
- <h2>What is Sakura School Simulator?</h2>
6
- <p>Sakura School Simulator is a simulation game that lets you create your own character and explore a fictional town called Sakura. You can attend school, make friends, fall in love, join clubs, fight enemies, borrow weapons from the yakuza, fly around with a jetpack, and much more. The game has no end or goal, so you can play as you like and create your own scenarios. The game also features a lot of customization options for your character's appearance, clothes, accessories, hairstyles, etc. You can also control and change up to four characters in the same stage.</p>
7
- <p>The game is categorized as a "school simulator", but it also incorporates elements from other genres such as action, adventure, comedy, romance, fantasy, and horror. The game has a lot of humor and references to Japanese culture and anime. The game also has no blood or death, so even if you get attacked or stunned by enemies, you will wake up the next day and continue your adventure.</p>
8
- <h2>Why Download the Japanese Version?</h2>
9
- <h3>More Content and Updates</h3>
10
- <p>One of the main reasons why you might want to download the Japanese version of Sakura School Simulator is that it has more content and updates than the other versions. The Japanese version is the original version of the game, so it gets updated more frequently and receives new features and improvements before the other versions. For example, some of the recent updates in the Japanese version include new locations such as a haunted house, a hospital, a shrine, a temple, etc., new characters such as ghosts, zombies, ninjas, monks, etc., new items such as masks, hats, glasses, etc., new vehicles such as bikes, cars, helicopters, etc., new weapons such as swords, guns, bombs, etc., new animations such as dancing, singing, playing instruments, etc., new interactions such as kissing, hugging, holding hands, etc., and much more.</p>
11
- <h3>Better Graphics and Performance</h3>
12
- <p>Another reason why you might want to download the Japanese version of Sakura School Simulator is that it has better graphics and performance than the other versions. The Japanese version has <p>higher resolution and smoother frame rate than the other versions. The Japanese version also has more options to adjust the graphics quality and performance according to your device's specifications. You can choose from low, medium, high, or ultra settings for the graphics quality, and from 30, 60, or 120 fps for the frame rate. The Japanese version also has less bugs and glitches than the other versions, as it is more stable and optimized.</p>
13
- <h3>More Authentic and Immersive Experience</h3>
14
- <p>A third reason why you might want to download the Japanese version of Sakura School Simulator is that it offers a more authentic and immersive experience of Japanese culture and school life. The Japanese version has more dialogue and text in Japanese, which adds to the realism and atmosphere of the game. You can also learn some Japanese words and phrases by playing the game, as the game has a built-in dictionary that explains the meaning and pronunciation of some words. The Japanese version also has more details and features that reflect the Japanese culture and school life, such as school uniforms, school rules, school events, festivals, holidays, food, music, etc. You can also interact with more characters that have different personalities and backgrounds, such as teachers, classmates, friends, rivals, lovers, etc.</p>
15
- <p>download sakura school simulator versi jepang apk<br />
16
- download sakura school simulator versi jepang mod<br />
17
- download sakura school simulator versi jepang terbaru<br />
18
- download sakura school simulator versi jepang offline<br />
19
- download sakura school simulator versi jepang gratis<br />
20
- download sakura school simulator versi jepang android<br />
21
- download sakura school simulator versi jepang pc<br />
22
- download sakura school simulator versi jepang google play<br />
23
- download sakura school simulator versi jepang update<br />
24
- download sakura school simulator versi jepang full version<br />
25
- download sakura school simulator versi jepang tanpa iklan<br />
26
- download sakura school simulator versi jepang unlimited money<br />
27
- download sakura school simulator versi jepang no ads<br />
28
- download sakura school simulator versi jepang 2023<br />
29
- download sakura school simulator versi jepang latest version<br />
30
- download sakura school simulator versi jepang for windows<br />
31
- download sakura school simulator versi jepang for mac<br />
32
- download sakura school simulator versi jepang for laptop<br />
33
- download sakura school simulator versi jepang for chromebook<br />
34
- download sakura school simulator versi jepang for ios<br />
35
- download sakura school simulator versi jepang for iphone<br />
36
- download sakura school simulator versi jepang for ipad<br />
37
- download sakura school simulator versi jepang bluestacks<br />
38
- download sakura school simulator versi jepang emulator<br />
39
- download sakura school simulator versi jepang garusoft development inc.<br />
40
- cara download sakura school simulator versi jepang<br />
41
- link download sakura school simulator versi jepang<br />
42
- situs download sakura school simulator versi jepang<br />
43
- website download sakura school simulator versi jepang<br />
44
- aplikasi download sakura school simulator versi jepang<br />
45
- game download sakura school simulator versi jepang<br />
46
- review download sakura school simulator versi jepang<br />
47
- tips download sakura school simulator versi jepang<br />
48
- tutorial download sakura school simulator versi jepang<br />
49
- video download sakura school simulator versi jepang<br />
50
- youtube download sakura school simulator versi jepang<br />
51
- blog download sakura school simulator versi jepang<br />
52
- forum download sakura school simulator versi jepang<br />
53
- guide download sakura school simulator versi jepang<br />
54
- walkthrough download sakura school simulator versi jepang</p>
55
- <h2>How to Download the Japanese Version?</h2>
56
- <h3>Requirements and Precautions</h3>
57
- <p>Before you download the Japanese version of Sakura School Simulator, you need to make sure that your device meets the requirements and that you are aware of some precautions. The requirements for downloading the Japanese version are:</p>
58
- <ul>
59
- <li>Your device must have Android 6.0 or higher.</li>
60
- <li>Your device must have at least 2 GB of RAM.</li>
61
- <li>Your device must have at least 1 GB of free storage space.</li>
62
- <li>Your device must have a stable internet connection.</li>
63
- </ul>
64
- <p>The precautions for downloading the Japanese version are:</p>
65
- <ul>
66
- <li>You might not be able to access some features or services that are available in the other versions, such as cloud saving, online multiplayer, etc.</li>
67
- <li>You might encounter some language barriers or difficulties in understanding some parts of the game, as the game is mostly in Japanese.</li>
68
- <li>You might need to change some settings or permissions on your device to allow downloading from unknown sources or regions.</li>
69
- <li>You might need to uninstall or update the other versions of the game if you have them on your device.</li>
70
- </ul>
71
- <h3>Steps to Download and Install</h3>
72
- <p>If you meet the requirements and are ready to take the precautions, you can follow these steps to download and install the Japanese version of Sakura School Simulator:</p>
73
- <ol>
74
- <li>Go to the Google Play Store on your device and search for "Sakura School Simulator".</li>
75
- <li>Tap on the game icon and scroll down to find the "Additional Information" section.</li>
76
- <li>Tap on the "Developer contact" option and select "Visit website".</li>
77
- <li>You will be redirected to the official website of Garusoft Development Inc., where you can find a link to download the Japanese version of Sakura School Simulator.</li>
78
- <li>Tap on the link and follow the instructions to download the APK file of the game.</li>
79
- <li>Once the download is complete, locate the APK file on your device and tap on it to install it.</li>
80
- <li>You might need to allow installing from unknown sources or regions if prompted by your device.</li>
81
- <li>Wait for the installation to finish and then launch the game from your app drawer or home screen.</li>
82
- </ol>
83
- <h3>Tips and Tricks for Playing</h3>
84
- <p>Now that you have downloaded and installed the Japanese version of Sakura School Simulator, you can start playing and enjoying it. Here are some tips and tricks for playing:</p>
85
- <ul>
86
- <li>To change the language settings of the game, go to the main menu and tap on the gear icon. Then tap on the globe icon and select your preferred language from English, Japanese, Chinese, Korean, etc.</li>
87
- <li>To change the graphics quality and performance settings of the game, go to the main menu and tap on the gear icon. Then tap on the wrench icon and select your preferred settings from low, medium, high, or ultra for graphics quality, and from 30, 60, or 120 fps for frame rate.</li>
88
- <li>To create your own character or change your character's appearance, clothes, accessories, hairstyles, etc., go to the main menu and tap on the character icon. Then tap on the edit icon and select your preferred options from the various categories.</li>
89
- <li>To control and change up to four characters in the same stage, go to the main menu and tap on the character icon. Then tap on the switch icon and select the character you want to control or change from the list.</li>
90
- <li>To explore the town of Sakura and interact with various objects and characters, use the virtual joystick on the left side of the screen to move your character, and use the buttons on the right side of the screen to perform actions such as jumping, running, attacking, talking, etc.</li>
91
- <li>To access your inventory and use items such as weapons, food, gadgets, etc., tap on the backpack icon on the top right corner of the screen. Then tap on the item you want to use and select an option from use, equip, throw, etc.</li>
92
- <li>To complete missions and earn rewards such as money, items, reputation, etc., go to the school or other locations and look for characters with exclamation marks above their heads. Then talk to them and accept their requests. You can check your current missions by tapping on the book icon on the top left corner of the screen.</li>
93
- <li>To save your progress and load your game, go to the main menu and tap on the gear icon. Then tap on the save or load icon and select a slot from 1 to 10. You can also enable or disable cloud saving by tapping on the cloud icon.</li>
94
- </ul>
95
- <h2>Conclusion</h2>
96
- <p>Sakura School Simulator is a simulation game that lets you experience the life of a Japanese high school student in a fictional town called Sakura. You can create your own character and explore a vast open world full of possibilities and surprises. You can also download the Japanese version of Sakura School Simulator, which has more content and updates, better graphics and performance, and more authentic and immersive experience than the other versions. To download the Japanese version, you need to meet some requirements and take some precautions, then follow some steps to download and install it. You can also use some tips and tricks to play it better and have more fun. We hope this article has helped you learn more about Sakura School Simulator and how to download the Japanese version. If you have any questions or feedback, please feel free to leave a comment below. Thank you for reading!</p>
97
- <h2>FAQs</h2>
98
- <h4>Q: Is Sakura School Simulator free to play?</h4>
99
- <p>A: Yes, Sakura School Simulator is free to play. However, it contains ads and in-app purchases that can enhance your gameplay or remove ads.</p>
100
- <h4>Q: Is Sakura School Simulator suitable for children?</h4>
101
- <p>A: Sakura School Simulator is rated 12+ on the Google Play Store. It contains mild violence, suggestive themes, crude humor, and simulated gambling. Parental guidance is recommended for younger players.</p>
102
- <h4>Q: How can I contact the developer of Sakura School Simulator?</h4>
103
- <p>A: You can contact Garusoft Development Inc., the developer of Sakura School Simulator, by visiting their official website or sending them an email at [email protected].</p>
104
- <h4>Q: How can I support the development of Sakura School Simulator?</h4>
105
- <p>A: You can support Garusoft Development Inc., by rating and reviewing their game on the Google Play Store, sharing it with your friends, or making a donation through their official website.</p>
106
- <h4>Q: How can I play Sakura School Simulator on PC?</h4>
107
- <p>A: You can play Sakura School Simulator on PC by using an Android emulator such as BlueStacks or NoxPlayer. However, this method is not officially supported by Garusoft Development Inc., so you might encounter some issues or errors.</p> 197e85843d<br />
108
- <br />
109
- <br />
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/AB-TW/team-ai/documents/bussiness_context/NOTION_DB/Engineering Wiki 2402f5396a3244fdb3f1d135bdb0f3d6/Engineering Interviews 4be8039581d04456b0151f2cc4b22130/Questions ede8818b3a0e447f80145905690eb3f6/Alphabet Ordering a3c46877392e4fff85a9dcf594f4e066.md DELETED
@@ -1,41 +0,0 @@
1
- # Alphabet Ordering
2
-
3
- Difficulty: Hard
4
- Skills: Algorithms, Data Structures
5
-
6
- # Description
7
-
8
- Write a description for the interview question here.
9
-
10
- # Sample Inputs
11
-
12
- Give some valid inputs the candidate can expect to test their solution with.
13
-
14
- - ...
15
- - ...
16
-
17
- # Expected Outputs
18
-
19
- For each sample input above, list the expected output.
20
-
21
- - ...
22
- - ...
23
-
24
- # Solutions
25
-
26
- Provide possible solutions in common languages to this problem.
27
-
28
- ### Javascript
29
-
30
- ```jsx
31
- function solution() {
32
-
33
- }
34
- ```
35
-
36
- ### Python
37
-
38
- ```python
39
- def solution():
40
- pass
41
- ```
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/ADOPLE/ResumeSummarizer/app.py DELETED
@@ -1,92 +0,0 @@
1
- import os
2
- import openai
3
- import PyPDF2
4
- import gradio as gr
5
- import docx
6
-
7
- class CourseGenarator:
8
- def __init__(self):
9
- openai.api_key = os.getenv("OPENAI_API_KEY")
10
-
11
- def extract_text_from_file(self,file_path):
12
- # Get the file extension
13
- file_extension = os.path.splitext(file_path)[1]
14
-
15
- if file_extension == '.pdf':
16
- with open(file_path, 'rb') as file:
17
- # Create a PDF file reader object
18
- reader = PyPDF2.PdfFileReader(file)
19
-
20
- # Create an empty string to hold the extracted text
21
- extracted_text = ""
22
-
23
- # Loop through each page in the PDF and extract the text
24
- for page_number in range(reader.getNumPages()):
25
- page = reader.getPage(page_number)
26
- extracted_text += page.extractText()
27
- return extracted_text
28
-
29
- elif file_extension == '.txt':
30
- with open(file_path, 'r') as file:
31
- # Just read the entire contents of the text file
32
- return file.read()
33
-
34
- elif file_extension == '.docx':
35
- doc = docx.Document(file_path)
36
- text = []
37
- for paragraph in doc.paragraphs:
38
- text.append(paragraph.text)
39
- return '\n'.join(text)
40
-
41
- else:
42
- return "Unsupported file type"
43
-
44
- def response(self,resume_path):
45
- resume_path = resume_path.name
46
- resume = self.extract_text_from_file(resume_path)
47
-
48
-
49
- # Define the prompt or input for the model
50
- prompt = f"""Analyze the resume to write the summary for following resume delimitted by triple backticks.
51
- ```{resume}```
52
- """
53
-
54
- # Generate a response from the GPT-3 model
55
- response = openai.Completion.create(
56
- engine='text-davinci-003',
57
- prompt=prompt,
58
- max_tokens=200,
59
- temperature=0,
60
- n=1,
61
- stop=None,
62
- )
63
-
64
- # Extract the generated text from the API response
65
- generated_text = response.choices[0].text.strip()
66
-
67
- return generated_text
68
-
69
- def gradio_interface(self):
70
- with gr.Blocks(css="style.css",theme=gr.themes.Soft()) as app:
71
- with gr.Row(elem_id="col-container"):
72
- with gr.Column():
73
- gr.HTML("<br>")
74
- gr.HTML(
75
- """<h1 style="text-align:center; color:"white">ADOPLE AI Resume Summarizer</h1> """
76
- )
77
- with gr.Column():
78
- resume = gr.File(label="Resume",elem_classes="heightfit")
79
-
80
- with gr.Column():
81
- analyse = gr.Button("Analyze")
82
-
83
- with gr.Column():
84
- result = gr.Textbox(label="Summarized",lines=8)
85
-
86
- analyse.click(self.response, [resume], result)
87
- print(result)
88
-
89
- app.launch()
90
-
91
- ques = CourseGenarator()
92
- ques.gradio_interface()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/AEUPH/CosmosTV/Dockerfile DELETED
@@ -1,35 +0,0 @@
1
- FROM node:18
2
-
3
-
4
- ARG DEBIAN_FRONTEND=noninteractive
5
-
6
- RUN apt update
7
-
8
- RUN apt --yes install ffmpeg
9
-
10
- # Set up a new user named "user" with user ID 1000
11
- RUN useradd -o -u 1000 user
12
-
13
- # Switch to the "user" user
14
- USER user
15
-
16
- # Set home to the user's home directory
17
- ENV HOME=/home/user \
18
- PATH=/home/user/.local/bin:$PATH
19
-
20
- # Set the working directory to the user's home directory
21
- WORKDIR $HOME/app
22
-
23
- # Install app dependencies
24
- # A wildcard is used to ensure both package.json AND package-lock.json are copied
25
- # where available (npm@5+)
26
- COPY --chown=user package*.json $HOME/app
27
-
28
- RUN npm install
29
-
30
- # Copy the current directory contents into the container at $HOME/app setting the owner to the user
31
- COPY --chown=user . $HOME/app
32
-
33
- EXPOSE 7860 1935 8000
34
-
35
- CMD [ "npm", "run", "start" ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/AIGC-Audio/AudioGPT/NeuralSeq/modules/parallel_wavegan/layers/causal_conv.py DELETED
@@ -1,56 +0,0 @@
1
- # -*- coding: utf-8 -*-
2
-
3
- # Copyright 2020 Tomoki Hayashi
4
- # MIT License (https://opensource.org/licenses/MIT)
5
-
6
- """Causal convolusion layer modules."""
7
-
8
-
9
- import torch
10
-
11
-
12
- class CausalConv1d(torch.nn.Module):
13
- """CausalConv1d module with customized initialization."""
14
-
15
- def __init__(self, in_channels, out_channels, kernel_size,
16
- dilation=1, bias=True, pad="ConstantPad1d", pad_params={"value": 0.0}):
17
- """Initialize CausalConv1d module."""
18
- super(CausalConv1d, self).__init__()
19
- self.pad = getattr(torch.nn, pad)((kernel_size - 1) * dilation, **pad_params)
20
- self.conv = torch.nn.Conv1d(in_channels, out_channels, kernel_size,
21
- dilation=dilation, bias=bias)
22
-
23
- def forward(self, x):
24
- """Calculate forward propagation.
25
-
26
- Args:
27
- x (Tensor): Input tensor (B, in_channels, T).
28
-
29
- Returns:
30
- Tensor: Output tensor (B, out_channels, T).
31
-
32
- """
33
- return self.conv(self.pad(x))[:, :, :x.size(2)]
34
-
35
-
36
- class CausalConvTranspose1d(torch.nn.Module):
37
- """CausalConvTranspose1d module with customized initialization."""
38
-
39
- def __init__(self, in_channels, out_channels, kernel_size, stride, bias=True):
40
- """Initialize CausalConvTranspose1d module."""
41
- super(CausalConvTranspose1d, self).__init__()
42
- self.deconv = torch.nn.ConvTranspose1d(
43
- in_channels, out_channels, kernel_size, stride, bias=bias)
44
- self.stride = stride
45
-
46
- def forward(self, x):
47
- """Calculate forward propagation.
48
-
49
- Args:
50
- x (Tensor): Input tensor (B, in_channels, T_in).
51
-
52
- Returns:
53
- Tensor: Output tensor (B, out_channels, T_out).
54
-
55
- """
56
- return self.deconv(x)[:, :, :-self.stride]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/AUST001/ChatGPT/app.py DELETED
@@ -1,97 +0,0 @@
1
- import numpy as np
2
- import cv2
3
- import urllib.request
4
- import openai
5
- import gradio as gr
6
- import random
7
-
8
-
9
- user_contexts = {}
10
-
11
- def get_assistant_response(user_question, context):
12
- context.append({"role": "user", "content": user_question+"Let's think step by step"})
13
- response = openai.ChatCompletion.create(
14
- model='gpt-3.5-turbo',
15
- messages=context,
16
- temperature=0
17
- )
18
- assistant_response = response.choices[0].message['content']
19
- context.append({"role": "assistant", "content": assistant_response})
20
- return assistant_response
21
-
22
- def generate_image_url(prompt):
23
- response = openai.Image.create(
24
- prompt=prompt,
25
- n=1, # 生成1张图片
26
- size="512x512", # 图像大小
27
- )
28
- image_url = response["data"][0]["url"]
29
- return image_url
30
-
31
- def greet(user_id, api_key, user_question, clear_history):
32
- openai.api_key = api_key
33
- global user_contexts
34
- if user_id not in user_contexts:
35
- user_contexts[user_id] = [
36
- {"role": "system", "content": "你是一个聪明的AI助手。"},
37
- {"role": "user", "content": "你会说中文吗?"},
38
- {"role": "assistant", "content": "是的,我可以说中文。"}
39
- ]
40
-
41
- context = user_contexts[user_id]
42
-
43
- if clear_history:
44
- context = [
45
- {"role": "system", "content": "你是一个聪明的AI助手。"},
46
- {"role": "user", "content": "你会说中文吗?"},
47
- {"role": "assistant", "content": "是的,我可以说中文。"}
48
- ]
49
- user_contexts[user_id] = context
50
- return '清空成功', '保持聊天记录', np.ones((5,5))
51
- else:
52
- # 如果user提问包含生成图像的特定指令(这里我们使用“生成图片:”作为示例)
53
- if user_question.startswith("生成图片:") or user_question.startswith("生成图片:"):
54
- image_prompt = user_question[5:] # 提取用于生成图片的文本
55
- image_url = generate_image_url(image_prompt)
56
- resp = urllib.request.urlopen(image_url)
57
- image = np.asarray(bytearray(resp.read()), dtype="uint8")
58
- image = cv2.imdecode(image, cv2.IMREAD_COLOR)
59
- image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
60
- # return image
61
- return '', '图片已生成', image
62
- get_assistant_response(user_question, context)
63
- prompt = ""
64
-
65
- for item in context[3:]:
66
- prompt += item["role"] + ": " + item["content"] + "\n"
67
- return '', prompt, np.ones((5,5))
68
-
69
- demo = gr.Interface(
70
- fn=greet,
71
- inputs=[
72
- gr.Textbox(lines=1, label='请输入用户ID', placeholder='请输入用户ID'),
73
- gr.Textbox(lines=1, label='请输入你的OpenAI API密钥', placeholder='请输入你的OpenAI API密钥'),
74
- gr.Textbox(lines=15, label='请输入问题', placeholder='请输入您的问题'),
75
- gr.Checkbox(label='清空聊天记录', default=False)
76
- ],
77
- outputs=[
78
- gr.Textbox(lines=1, label='聊天记录状态', placeholder='等待清空聊天记录'),
79
- gr.Textbox(lines=20, label='AI回答', placeholder='等待AI回答'),
80
- gr.Image(label='等待图片生成')
81
- ],
82
- title="AI助手",
83
- description="""
84
- 1.使用说明:
85
- 请输入您的问题,AI助手会给出回答。
86
- 支持连续对话,可以记录对话历史。
87
- 重新开始对话勾选清空聊天记录,输出清空成功表示重新开启对话。
88
- 2.特别警告:
89
- 为了防止用户数据混乱,请自定义用户ID。
90
- 理论上如果被别人知道自己的ID,那么别人可以查看自己的历史对话,对此你可以选择在对话结束后清除对话记录。
91
- 3.图片生成示例:格式-【生成图片:xxxxxxxx】
92
- 生成图片:春天到了,万物复苏
93
- """
94
- )
95
-
96
- if __name__ == "__main__":
97
- demo.launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/AUST001/Translation/README.md DELETED
@@ -1,13 +0,0 @@
1
- ---
2
- title: Translation
3
- emoji: 😻
4
- colorFrom: green
5
- colorTo: indigo
6
- sdk: gradio
7
- sdk_version: 3.19.1
8
- app_file: app.py
9
- pinned: false
10
- license: openrail
11
- ---
12
-
13
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/Abhilashvj/planogram-compliance/data/scripts/download_weights.sh DELETED
@@ -1,22 +0,0 @@
1
- #!/bin/bash
2
- # YOLOv5 🚀 by Ultralytics, GPL-3.0 license
3
- # Download latest models from https://github.com/ultralytics/yolov5/releases
4
- # Example usage: bash data/scripts/download_weights.sh
5
- # parent
6
- # └── yolov5
7
- # ├── yolov5s.pt ← downloads here
8
- # ├── yolov5m.pt
9
- # └── ...
10
-
11
- python - <<EOF
12
- from utils.downloads import attempt_download
13
-
14
- p5 = list('nsmlx') # P5 models
15
- p6 = [f'{x}6' for x in p5] # P6 models
16
- cls = [f'{x}-cls' for x in p5] # classification models
17
- seg = [f'{x}-seg' for x in p5] # classification models
18
-
19
- for x in p5 + p6 + cls + seg:
20
- attempt_download(f'weights/yolov5{x}.pt')
21
-
22
- EOF
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/AgentVerse/agentVerse/agentverse/environments/tasksolving_env/rules/decision_maker/vertical_solver_first.py DELETED
@@ -1,87 +0,0 @@
1
- from __future__ import annotations
2
- import asyncio
3
- from colorama import Fore
4
-
5
- from typing import TYPE_CHECKING, List
6
-
7
- from . import decision_maker_registry
8
- from .base import BaseDecisionMaker
9
- from agentverse.logging import typewriter_log, logger
10
- from agentverse.message import Message
11
-
12
- if TYPE_CHECKING:
13
- from agentverse.agents import BaseAgent, SolverAgent, CriticAgent
14
- from agentverse.message import CriticMessage, SolverMessage
15
-
16
-
17
- @decision_maker_registry.register("vertical-solver-first")
18
- class VerticalSolverFirstDecisionMaker(BaseDecisionMaker):
19
- """
20
- Discuss in a vertical manner.
21
- """
22
-
23
- name: str = "vertical-sovler-first"
24
- max_inner_turns: int = 3
25
-
26
- async def astep(
27
- self,
28
- agents: List[BaseAgent],
29
- task_description: str,
30
- previous_plan: str = "No solution yet.",
31
- advice: str = "No advice yet.",
32
- *args,
33
- **kwargs,
34
- ) -> List[SolverMessage]:
35
- # Here we assume that the first agent is the solver.
36
- # The rest of the agents are the reviewers.
37
- if advice != "No advice yet.":
38
- self.broadcast_messages(
39
- agents, [Message(content=advice, sender="Evaluator")]
40
- )
41
- previous_plan = agents[0].step(previous_plan, advice, task_description)
42
- self.broadcast_messages(agents, [previous_plan])
43
- logger.info("", f"Initial Plan:\n{previous_plan.content}", Fore.BLUE)
44
- for i in range(self.max_inner_turns):
45
- reviews: List[CriticMessage] = await asyncio.gather(
46
- *[
47
- agent.astep(previous_plan, advice, task_description)
48
- for agent in agents[1:]
49
- ]
50
- )
51
- logger.info(
52
- "",
53
- "Reviews:\n"
54
- + "\n".join(
55
- [f"[{review.sender}]: {review.content}" for review in reviews]
56
- ),
57
- Fore.YELLOW,
58
- )
59
-
60
- nonempty_reviews = []
61
- for review in reviews:
62
- if not review.is_agree and review.content != "":
63
- nonempty_reviews.append(review)
64
- if len(nonempty_reviews) == 0:
65
- logger.info("", "Consensus Reached!.", Fore.GREEN)
66
- break
67
- self.broadcast_messages(agents, nonempty_reviews)
68
- previous_plan = agents[0].step(previous_plan, advice, task_description)
69
- logger.info("", f"Updated Plan:\n{previous_plan.content}", Fore.BLUE)
70
- self.broadcast_messages(agents, [previous_plan])
71
- result = previous_plan
72
- return [result]
73
-
74
- def broadcast_messages(self, agents, messages) -> None:
75
- for agent in agents:
76
- agent.add_message_to_memory(messages)
77
-
78
- def p2p_messages(self, agents, messages) -> None:
79
- agents[0].add_message_to_memory(messages)
80
- for message in messages:
81
- for agent in agents[1:]:
82
- if agent.name == message.sender:
83
- agent.add_message_to_memory(messages)
84
- break
85
-
86
- def reset(self):
87
- pass
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/AgentVerse/agentVerse/ui/src/phaser3-rex-plugins/templates/ui/gridbuttons/Factory.js DELETED
@@ -1,13 +0,0 @@
1
- import GridButtons from './GridButtons.js';
2
- import ObjectFactory from '../ObjectFactory.js';
3
- import SetValue from '../../../plugins/utils/object/SetValue.js';
4
-
5
- ObjectFactory.register('gridButtons', function (config) {
6
- var gameObject = new GridButtons(this.scene, config);
7
- this.scene.add.existing(gameObject);
8
- return gameObject;
9
- });
10
-
11
- SetValue(window, 'RexPlugins.UI.GridButtons', GridButtons);
12
-
13
- export default GridButtons;
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/AlexWang/lama/bin/train.py DELETED
@@ -1,72 +0,0 @@
1
- #!/usr/bin/env python3
2
-
3
- import logging
4
- import os
5
- import sys
6
- import traceback
7
-
8
- os.environ['OMP_NUM_THREADS'] = '1'
9
- os.environ['OPENBLAS_NUM_THREADS'] = '1'
10
- os.environ['MKL_NUM_THREADS'] = '1'
11
- os.environ['VECLIB_MAXIMUM_THREADS'] = '1'
12
- os.environ['NUMEXPR_NUM_THREADS'] = '1'
13
-
14
- import hydra
15
- from omegaconf import OmegaConf
16
- from pytorch_lightning import Trainer
17
- from pytorch_lightning.callbacks import ModelCheckpoint
18
- from pytorch_lightning.loggers import TensorBoardLogger
19
- from pytorch_lightning.plugins import DDPPlugin
20
-
21
- from saicinpainting.training.trainers import make_training_model
22
- from saicinpainting.utils import register_debug_signal_handlers, handle_ddp_subprocess, handle_ddp_parent_process, \
23
- handle_deterministic_config
24
-
25
- LOGGER = logging.getLogger(__name__)
26
-
27
-
28
- @handle_ddp_subprocess()
29
- @hydra.main(config_path='../configs/training', config_name='tiny_test.yaml')
30
- def main(config: OmegaConf):
31
- try:
32
- need_set_deterministic = handle_deterministic_config(config)
33
-
34
- register_debug_signal_handlers() # kill -10 <pid> will result in traceback dumped into log
35
-
36
- is_in_ddp_subprocess = handle_ddp_parent_process()
37
-
38
- config.visualizer.outdir = os.path.join(os.getcwd(), config.visualizer.outdir)
39
- if not is_in_ddp_subprocess:
40
- LOGGER.info(OmegaConf.to_yaml(config))
41
- OmegaConf.save(config, os.path.join(os.getcwd(), 'config.yaml'))
42
-
43
- checkpoints_dir = os.path.join(os.getcwd(), 'models')
44
- os.makedirs(checkpoints_dir, exist_ok=True)
45
-
46
- # there is no need to suppress this logger in ddp, because it handles rank on its own
47
- metrics_logger = TensorBoardLogger(config.location.tb_dir, name=os.path.basename(os.getcwd()))
48
- metrics_logger.log_hyperparams(config)
49
-
50
- training_model = make_training_model(config)
51
-
52
- trainer_kwargs = OmegaConf.to_container(config.trainer.kwargs, resolve=True)
53
- if need_set_deterministic:
54
- trainer_kwargs['deterministic'] = True
55
-
56
- trainer = Trainer(
57
- # there is no need to suppress checkpointing in ddp, because it handles rank on its own
58
- callbacks=ModelCheckpoint(dirpath=checkpoints_dir, **config.trainer.checkpoint_kwargs),
59
- logger=metrics_logger,
60
- default_root_dir=os.getcwd(),
61
- **trainer_kwargs
62
- )
63
- trainer.fit(training_model)
64
- except KeyboardInterrupt:
65
- LOGGER.warning('Interrupted by user')
66
- except Exception as ex:
67
- LOGGER.critical(f'Training failed due to {ex}:\n{traceback.format_exc()}')
68
- sys.exit(1)
69
-
70
-
71
- if __name__ == '__main__':
72
- main()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/AlhitawiMohammed22/CER_Hu-Evaluation-Metrics/eval_accuracy.py DELETED
File without changes
spaces/Amrrs/DragGan-Inversion/PTI/configs/__init__.py DELETED
File without changes
spaces/Amrrs/DragGan-Inversion/torch_utils/__init__.py DELETED
@@ -1,9 +0,0 @@
1
- # Copyright (c) 2021, NVIDIA CORPORATION & AFFILIATES. All rights reserved.
2
- #
3
- # NVIDIA CORPORATION and its licensors retain all intellectual property
4
- # and proprietary rights in and to this software, related documentation
5
- # and any modifications thereto. Any use, reproduction, disclosure or
6
- # distribution of this software and related documentation without an express
7
- # license agreement from NVIDIA CORPORATION is strictly prohibited.
8
-
9
- # empty
 
 
 
 
 
 
 
 
 
 
spaces/Andy1621/uniformer_image_detection/configs/legacy_1.x/cascade_mask_rcnn_r50_fpn_1x_coco_v1.py DELETED
@@ -1,79 +0,0 @@
1
- _base_ = [
2
- '../_base_/models/cascade_mask_rcnn_r50_fpn.py',
3
- '../_base_/datasets/coco_instance.py',
4
- '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py'
5
- ]
6
- model = dict(
7
- type='CascadeRCNN',
8
- pretrained='torchvision://resnet50',
9
- backbone=dict(
10
- type='ResNet',
11
- depth=50,
12
- num_stages=4,
13
- out_indices=(0, 1, 2, 3),
14
- frozen_stages=1,
15
- norm_cfg=dict(type='BN', requires_grad=True),
16
- norm_eval=True,
17
- style='pytorch'),
18
- neck=dict(
19
- type='FPN',
20
- in_channels=[256, 512, 1024, 2048],
21
- out_channels=256,
22
- num_outs=5),
23
- rpn_head=dict(
24
- anchor_generator=dict(type='LegacyAnchorGenerator', center_offset=0.5),
25
- bbox_coder=dict(
26
- type='LegacyDeltaXYWHBBoxCoder',
27
- target_means=[.0, .0, .0, .0],
28
- target_stds=[1.0, 1.0, 1.0, 1.0])),
29
- roi_head=dict(
30
- bbox_roi_extractor=dict(
31
- type='SingleRoIExtractor',
32
- roi_layer=dict(
33
- type='RoIAlign',
34
- output_size=7,
35
- sampling_ratio=2,
36
- aligned=False)),
37
- bbox_head=[
38
- dict(
39
- type='Shared2FCBBoxHead',
40
- reg_class_agnostic=True,
41
- in_channels=256,
42
- fc_out_channels=1024,
43
- roi_feat_size=7,
44
- num_classes=80,
45
- bbox_coder=dict(
46
- type='LegacyDeltaXYWHBBoxCoder',
47
- target_means=[0., 0., 0., 0.],
48
- target_stds=[0.1, 0.1, 0.2, 0.2])),
49
- dict(
50
- type='Shared2FCBBoxHead',
51
- reg_class_agnostic=True,
52
- in_channels=256,
53
- fc_out_channels=1024,
54
- roi_feat_size=7,
55
- num_classes=80,
56
- bbox_coder=dict(
57
- type='LegacyDeltaXYWHBBoxCoder',
58
- target_means=[0., 0., 0., 0.],
59
- target_stds=[0.05, 0.05, 0.1, 0.1])),
60
- dict(
61
- type='Shared2FCBBoxHead',
62
- reg_class_agnostic=True,
63
- in_channels=256,
64
- fc_out_channels=1024,
65
- roi_feat_size=7,
66
- num_classes=80,
67
- bbox_coder=dict(
68
- type='LegacyDeltaXYWHBBoxCoder',
69
- target_means=[0., 0., 0., 0.],
70
- target_stds=[0.033, 0.033, 0.067, 0.067])),
71
- ],
72
- mask_roi_extractor=dict(
73
- type='SingleRoIExtractor',
74
- roi_layer=dict(
75
- type='RoIAlign',
76
- output_size=14,
77
- sampling_ratio=2,
78
- aligned=False))))
79
- dist_params = dict(backend='nccl', port=29515)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/Andy1621/uniformer_image_detection/configs/mask_rcnn/mask_rcnn_r50_fpn_poly_1x_coco.py DELETED
@@ -1,23 +0,0 @@
1
- _base_ = [
2
- '../_base_/models/mask_rcnn_r50_fpn.py',
3
- '../_base_/datasets/coco_instance.py',
4
- '../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py'
5
- ]
6
-
7
- img_norm_cfg = dict(
8
- mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
9
- train_pipeline = [
10
- dict(type='LoadImageFromFile'),
11
- dict(
12
- type='LoadAnnotations',
13
- with_bbox=True,
14
- with_mask=True,
15
- poly2mask=False),
16
- dict(type='Resize', img_scale=(1333, 800), keep_ratio=True),
17
- dict(type='RandomFlip', flip_ratio=0.5),
18
- dict(type='Normalize', **img_norm_cfg),
19
- dict(type='Pad', size_divisor=32),
20
- dict(type='DefaultFormatBundle'),
21
- dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']),
22
- ]
23
- data = dict(train=dict(pipeline=train_pipeline))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/Andy1621/uniformer_image_detection/configs/scnet/scnet_x101_64x4d_fpn_8x1_20e_coco.py DELETED
@@ -1,3 +0,0 @@
1
- _base_ = './scnet_x101_64x4d_fpn_20e_coco.py'
2
- data = dict(samples_per_gpu=1, workers_per_gpu=1)
3
- optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001)
 
 
 
 
spaces/Anon4review/HIPTDemo/README.md DELETED
@@ -1,37 +0,0 @@
1
- ---
2
- title: HIPTDemo
3
- emoji: 🏢
4
- colorFrom: green
5
- colorTo: blue
6
- sdk: gradio
7
- app_file: app.py
8
- pinned: false
9
- ---
10
-
11
- # Configuration
12
-
13
- `title`: _string_
14
- Display title for the Space
15
-
16
- `emoji`: _string_
17
- Space emoji (emoji-only character allowed)
18
-
19
- `colorFrom`: _string_
20
- Color for Thumbnail gradient (red, yellow, green, blue, indigo, purple, pink, gray)
21
-
22
- `colorTo`: _string_
23
- Color for Thumbnail gradient (red, yellow, green, blue, indigo, purple, pink, gray)
24
-
25
- `sdk`: _string_
26
- Can be either `gradio` or `streamlit`
27
-
28
- `sdk_version` : _string_
29
- Only applicable for `streamlit` SDK.
30
- See [doc](https://hf.co/docs/hub/spaces) for more info on supported versions.
31
-
32
- `app_file`: _string_
33
- Path to your main application file (which contains either `gradio` or `streamlit` Python code).
34
- Path is relative to the root of the repository.
35
-
36
- `pinned`: _boolean_
37
- Whether the Space stays on top of your list.
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/Anonymous-123/ImageNet-Editing/editing_diffusion/guided_diffusion/datasets/lsun_bedroom.py DELETED
@@ -1,54 +0,0 @@
1
- """
2
- Convert an LSUN lmdb database into a directory of images.
3
- """
4
-
5
- import argparse
6
- import io
7
- import os
8
-
9
- from PIL import Image
10
- import lmdb
11
- import numpy as np
12
-
13
-
14
- def read_images(lmdb_path, image_size):
15
- env = lmdb.open(lmdb_path, map_size=1099511627776, max_readers=100, readonly=True)
16
- with env.begin(write=False) as transaction:
17
- cursor = transaction.cursor()
18
- for _, webp_data in cursor:
19
- img = Image.open(io.BytesIO(webp_data))
20
- width, height = img.size
21
- scale = image_size / min(width, height)
22
- img = img.resize(
23
- (int(round(scale * width)), int(round(scale * height))),
24
- resample=Image.BOX,
25
- )
26
- arr = np.array(img)
27
- h, w, _ = arr.shape
28
- h_off = (h - image_size) // 2
29
- w_off = (w - image_size) // 2
30
- arr = arr[h_off : h_off + image_size, w_off : w_off + image_size]
31
- yield arr
32
-
33
-
34
- def dump_images(out_dir, images, prefix):
35
- if not os.path.exists(out_dir):
36
- os.mkdir(out_dir)
37
- for i, img in enumerate(images):
38
- Image.fromarray(img).save(os.path.join(out_dir, f"{prefix}_{i:07d}.png"))
39
-
40
-
41
- def main():
42
- parser = argparse.ArgumentParser()
43
- parser.add_argument("--image-size", help="new image size", type=int, default=256)
44
- parser.add_argument("--prefix", help="class name", type=str, default="bedroom")
45
- parser.add_argument("lmdb_path", help="path to an LSUN lmdb database")
46
- parser.add_argument("out_dir", help="path to output directory")
47
- args = parser.parse_args()
48
-
49
- images = read_images(args.lmdb_path, args.image_size)
50
- dump_images(args.out_dir, images, args.prefix)
51
-
52
-
53
- if __name__ == "__main__":
54
- main()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/Arnx/MusicGenXvAKN/audiocraft/modules/codebooks_patterns.py DELETED
@@ -1,539 +0,0 @@
1
- # Copyright (c) Meta Platforms, Inc. and affiliates.
2
- # All rights reserved.
3
- #
4
- # This source code is licensed under the license found in the
5
- # LICENSE file in the root directory of this source tree.
6
-
7
- from collections import namedtuple
8
- from dataclasses import dataclass
9
- from functools import lru_cache
10
- import logging
11
- import typing as tp
12
-
13
- from abc import ABC, abstractmethod
14
- import torch
15
-
16
- LayoutCoord = namedtuple('LayoutCoord', ['t', 'q']) # (timestep, codebook index)
17
- PatternLayout = tp.List[tp.List[LayoutCoord]] # Sequence of coordinates
18
- logger = logging.getLogger(__name__)
19
-
20
-
21
- @dataclass
22
- class Pattern:
23
- """Base implementation of a pattern over a sequence with multiple codebooks.
24
-
25
- The codebook pattern consists in a layout, defining for each sequence step
26
- the list of coordinates of each codebook timestep in the resulting interleaved sequence.
27
- The first item of the pattern is always an empty list in order to properly insert a special token
28
- to start with. For convenience, we also keep track of ``n_q`` the number of codebooks used for the pattern
29
- and ``timesteps`` the number of timesteps corresponding to the original sequence.
30
-
31
- The pattern provides convenient methods to build and revert interleaved sequences from it:
32
- ``build_pattern_sequence`` maps a given a dense input tensor of multi-codebook sequence from [B, K, T]
33
- to the interleaved sequence of shape [B, K, S] applying the pattern, with S being the batch size,
34
- K being the number of codebooks, T the number of original timesteps and S the number of sequence steps
35
- for the output sequence. The unfilled positions are replaced with a special token and the built sequence
36
- is returned along with a mask indicating valid tokens.
37
- ``revert_pattern_sequence`` maps back an interleaved sequence of shape [B, K, S] to the original alignment
38
- of codebooks across timesteps to an output tensor of shape [B, K, T], using again a special token and a mask
39
- to fill and specify invalid positions if needed.
40
- See the dedicated methods for more details.
41
- """
42
- # Pattern layout, for each sequence step, we have a list of coordinates
43
- # corresponding to the original codebook timestep and position.
44
- # The first list is always an empty list in order to properly insert
45
- # a special token to start with.
46
- layout: PatternLayout
47
- timesteps: int
48
- n_q: int
49
-
50
- def __post_init__(self):
51
- assert len(self.layout) > 0
52
- assert self.layout[0] == []
53
- self._validate_layout()
54
- self._build_reverted_sequence_scatter_indexes = lru_cache(100)(self._build_reverted_sequence_scatter_indexes)
55
- self._build_pattern_sequence_scatter_indexes = lru_cache(100)(self._build_pattern_sequence_scatter_indexes)
56
- logger.info("New pattern, time steps: %d, sequence steps: %d", self.timesteps, len(self.layout))
57
-
58
- def _validate_layout(self):
59
- """Runs checks on the layout to ensure a valid pattern is defined.
60
- A pattern is considered invalid if:
61
- - Multiple timesteps for a same codebook are defined in the same sequence step
62
- - The timesteps for a given codebook are not in ascending order as we advance in the sequence
63
- (this would mean that we have future timesteps before past timesteps).
64
- """
65
- q_timesteps = {q: 0 for q in range(self.n_q)}
66
- for s, seq_coords in enumerate(self.layout):
67
- if len(seq_coords) > 0:
68
- qs = set()
69
- for coord in seq_coords:
70
- qs.add(coord.q)
71
- last_q_timestep = q_timesteps[coord.q]
72
- assert coord.t >= last_q_timestep, \
73
- f"Past timesteps are found in the sequence for codebook = {coord.q} at step {s}"
74
- q_timesteps[coord.q] = coord.t
75
- # each sequence step contains at max 1 coordinate per codebook
76
- assert len(qs) == len(seq_coords), \
77
- f"Multiple entries for a same codebook are found at step {s}"
78
-
79
- @property
80
- def num_sequence_steps(self):
81
- return len(self.layout) - 1
82
-
83
- @property
84
- def max_delay(self):
85
- max_t_in_seq_coords = 0
86
- for seq_coords in self.layout[1:]:
87
- for coords in seq_coords:
88
- max_t_in_seq_coords = max(max_t_in_seq_coords, coords.t + 1)
89
- return max_t_in_seq_coords - self.timesteps
90
-
91
- @property
92
- def valid_layout(self):
93
- valid_step = len(self.layout) - self.max_delay
94
- return self.layout[:valid_step]
95
-
96
- def get_sequence_coords_with_timestep(self, t: int, q: tp.Optional[int] = None):
97
- """Get codebook coordinates in the layout that corresponds to the specified timestep t
98
- and optionally to the codebook q. Coordinates are returned as a tuple with the sequence step
99
- and the actual codebook coordinates.
100
- """
101
- assert t <= self.timesteps, "provided timesteps is greater than the pattern's number of timesteps"
102
- if q is not None:
103
- assert q <= self.n_q, "provided number of codebooks is greater than the pattern's number of codebooks"
104
- coords = []
105
- for s, seq_codes in enumerate(self.layout):
106
- for code in seq_codes:
107
- if code.t == t and (q is None or code.q == q):
108
- coords.append((s, code))
109
- return coords
110
-
111
- def get_steps_with_timestep(self, t: int, q: tp.Optional[int] = None) -> tp.List[int]:
112
- return [step for step, coords in self.get_sequence_coords_with_timestep(t, q)]
113
-
114
- def get_first_step_with_timesteps(self, t: int, q: tp.Optional[int] = None) -> tp.Optional[int]:
115
- steps_with_timesteps = self.get_steps_with_timestep(t, q)
116
- return steps_with_timesteps[0] if len(steps_with_timesteps) > 0 else None
117
-
118
- def _build_pattern_sequence_scatter_indexes(self, timesteps: int, n_q: int, keep_only_valid_steps: bool,
119
- device: tp.Union[torch.device, str] = 'cpu'):
120
- """Build scatter indexes corresponding to the pattern, up to the provided sequence_steps.
121
-
122
- Args:
123
- timesteps (int): Maximum number of timesteps steps to consider.
124
- keep_only_valid_steps (bool): Restrict the pattern layout to match only valid steps.
125
- device (Union[torch.device, str]): Device for created tensors.
126
- Returns:
127
- indexes (torch.Tensor): Indexes corresponding to the sequence, of shape [K, S].
128
- mask (torch.Tensor): Mask corresponding to indexes that matches valid indexes, of shape [K, S].
129
- """
130
- assert n_q == self.n_q, f"invalid number of codebooks for the sequence and the pattern: {n_q} != {self.n_q}"
131
- assert timesteps <= self.timesteps, "invalid number of timesteps used to build the sequence from the pattern"
132
- # use the proper layout based on whether we limit ourselves to valid steps only or not,
133
- # note that using the valid_layout will result in a truncated sequence up to the valid steps
134
- ref_layout = self.valid_layout if keep_only_valid_steps else self.layout
135
- # single item indexing being super slow with pytorch vs. numpy, so we use numpy here
136
- indexes = torch.zeros(n_q, len(ref_layout), dtype=torch.long).numpy()
137
- mask = torch.zeros(n_q, len(ref_layout), dtype=torch.bool).numpy()
138
- # fill indexes with last sequence step value that will correspond to our special token
139
- # the last value is n_q * timesteps as we have flattened z and append special token as the last token
140
- # which will correspond to the index: n_q * timesteps
141
- indexes[:] = n_q * timesteps
142
- # iterate over the pattern and fill scattered indexes and mask
143
- for s, sequence_coords in enumerate(ref_layout):
144
- for coords in sequence_coords:
145
- if coords.t < timesteps:
146
- indexes[coords.q, s] = coords.t + coords.q * timesteps
147
- mask[coords.q, s] = 1
148
- indexes = torch.from_numpy(indexes).to(device)
149
- mask = torch.from_numpy(mask).to(device)
150
- return indexes, mask
151
-
152
- def build_pattern_sequence(self, z: torch.Tensor, special_token: int, keep_only_valid_steps: bool = False):
153
- """Build sequence corresponding to the pattern from the input tensor z.
154
- The sequence is built using up to sequence_steps if specified, and non-pattern
155
- coordinates are filled with the special token.
156
-
157
- Args:
158
- z (torch.Tensor): Input tensor of multi-codebooks sequence, of shape [B, K, T].
159
- special_token (int): Special token used to fill non-pattern coordinates in the new sequence.
160
- keep_only_valid_steps (bool): Build a sequence from the pattern up to valid (= fully defined) steps.
161
- Steps that are beyond valid steps will be replaced by the special_token in that case.
162
- Returns:
163
- values (torch.Tensor): Interleaved sequence matching the pattern, of shape [B, K, S] with S
164
- corresponding either to the sequence_steps if provided, otherwise to the length of the pattern.
165
- indexes (torch.Tensor): Indexes corresponding to the interleaved sequence, of shape [K, S].
166
- mask (torch.Tensor): Mask corresponding to indexes that matches valid indexes of shape [K, S].
167
- """
168
- B, K, T = z.shape
169
- indexes, mask = self._build_pattern_sequence_scatter_indexes(
170
- T, K, keep_only_valid_steps=keep_only_valid_steps, device=str(z.device)
171
- )
172
- z = z.view(B, -1)
173
- # we append the special token as the last index of our flattened z tensor
174
- z = torch.cat([z, torch.zeros_like(z[:, :1]) + special_token], dim=1)
175
- values = z[:, indexes.view(-1)]
176
- values = values.view(B, K, indexes.shape[-1])
177
- return values, indexes, mask
178
-
179
- def _build_reverted_sequence_scatter_indexes(self, sequence_steps: int, n_q: int,
180
- keep_only_valid_steps: bool = False,
181
- is_model_output: bool = False,
182
- device: tp.Union[torch.device, str] = 'cpu'):
183
- """Builds scatter indexes required to retrieve the original multi-codebook sequence
184
- from interleaving pattern.
185
-
186
- Args:
187
- sequence_steps (int): Sequence steps.
188
- n_q (int): Number of codebooks.
189
- keep_only_valid_steps (bool): Build a sequence from the pattern up to valid (= fully defined) steps.
190
- Steps that are beyond valid steps will be replaced by the special_token in that case.
191
- is_model_output (bool): Whether to keep the sequence item corresponding to initial special token or not.
192
- device (Union[torch.device, str]): Device for created tensors.
193
- Returns:
194
- torch.Tensor: Indexes for reconstructing the output, of shape [K, T].
195
- mask (torch.Tensor): Mask corresponding to indexes that matches valid indexes of shape [K, T].
196
- """
197
- ref_layout = self.valid_layout if keep_only_valid_steps else self.layout
198
- # TODO(jade): Do we want to further truncate to only valid timesteps here as well?
199
- timesteps = self.timesteps
200
- assert n_q == self.n_q, f"invalid number of codebooks for the sequence and the pattern: {n_q} != {self.n_q}"
201
- assert sequence_steps <= len(ref_layout), \
202
- f"sequence to revert is longer than the defined pattern: {sequence_steps} > {len(ref_layout)}"
203
-
204
- # ensure we take the appropriate indexes to keep the model output from the first special token as well
205
- if is_model_output:
206
- ref_layout = ref_layout[1:]
207
-
208
- # single item indexing being super slow with pytorch vs. numpy, so we use numpy here
209
- indexes = torch.zeros(n_q, timesteps, dtype=torch.long).numpy()
210
- mask = torch.zeros(n_q, timesteps, dtype=torch.bool).numpy()
211
- # fill indexes with last sequence step value that will correspond to our special token
212
- indexes[:] = n_q * sequence_steps
213
- for s, sequence_codes in enumerate(ref_layout):
214
- if s < sequence_steps:
215
- for code in sequence_codes:
216
- if code.t < timesteps:
217
- indexes[code.q, code.t] = s + code.q * sequence_steps
218
- mask[code.q, code.t] = 1
219
- indexes = torch.from_numpy(indexes).to(device)
220
- mask = torch.from_numpy(mask).to(device)
221
- return indexes, mask
222
-
223
- def revert_pattern_sequence(self, s: torch.Tensor, special_token: int, keep_only_valid_steps: bool = False):
224
- """Revert a sequence built from the pattern back to the original multi-codebook sequence without interleaving.
225
- The sequence is reverted using up to timesteps if specified, and non-pattern coordinates
226
- are filled with the special token.
227
-
228
- Args:
229
- s (torch.Tensor): Interleaved sequence tensor obtained from the pattern, of shape [B, K, S].
230
- special_token (int or float): Special token used to fill non-pattern coordinates in the new sequence.
231
- Returns:
232
- values (torch.Tensor): Interleaved sequence matching the pattern, of shape [B, K, T] with T
233
- corresponding either to the timesteps if provided, or the total timesteps in pattern otherwise.
234
- indexes (torch.Tensor): Indexes corresponding to the interleaved sequence, of shape [K, T].
235
- mask (torch.Tensor): Mask corresponding to indexes that matches valid indexes of shape [K, T].
236
- """
237
- B, K, S = s.shape
238
- indexes, mask = self._build_reverted_sequence_scatter_indexes(
239
- S, K, keep_only_valid_steps, is_model_output=False, device=str(s.device)
240
- )
241
- s = s.view(B, -1)
242
- # we append the special token as the last index of our flattened z tensor
243
- s = torch.cat([s, torch.zeros_like(s[:, :1]) + special_token], dim=1)
244
- values = s[:, indexes.view(-1)]
245
- values = values.view(B, K, indexes.shape[-1])
246
- return values, indexes, mask
247
-
248
- def revert_pattern_logits(self, logits: torch.Tensor, special_token: float, keep_only_valid_steps: bool = False):
249
- """Revert model logits obtained on a sequence built from the pattern
250
- back to a tensor matching the original sequence.
251
-
252
- This method is similar to ``revert_pattern_sequence`` with the following specificities:
253
- 1. It is designed to work with the extra cardinality dimension
254
- 2. We return the logits for the first sequence item that matches the special_token and
255
- which matching target in the original sequence is the first item of the sequence,
256
- while we skip the last logits as there is no matching target
257
- """
258
- B, card, K, S = logits.shape
259
- indexes, mask = self._build_reverted_sequence_scatter_indexes(
260
- S, K, keep_only_valid_steps, is_model_output=True, device=logits.device
261
- )
262
- logits = logits.reshape(B, card, -1)
263
- # we append the special token as the last index of our flattened z tensor
264
- logits = torch.cat([logits, torch.zeros_like(logits[:, :, :1]) + special_token], dim=-1) # [B, card, K x S]
265
- values = logits[:, :, indexes.view(-1)]
266
- values = values.view(B, card, K, indexes.shape[-1])
267
- return values, indexes, mask
268
-
269
-
270
- class CodebooksPatternProvider(ABC):
271
- """Abstraction around providing pattern for interleaving codebooks.
272
-
273
- The CodebooksPatternProvider abstraction allows to implement various strategies to
274
- define interleaving pattern of sequences composed of multiple codebooks. For a given
275
- number of codebooks `n_q`, the pattern provider can generate a specified pattern
276
- corresponding to a sequence of `T` timesteps with `n_q` parallel codebooks. This pattern
277
- can be used to construct a new sequence from the original codes respecting the specified
278
- pattern. The pattern is defined as a list of list of code coordinates, code coordinate
279
- being a tuple with the original timestep and codebook to build the new sequence.
280
- Note that all patterns must start with an empty list that is then used to insert a first
281
- sequence step of special tokens in the newly generated sequence.
282
-
283
- Args:
284
- n_q (int): number of codebooks.
285
- cached (bool): if True, patterns for a given length are cached. In general
286
- that should be true for efficiency reason to avoid synchronization points.
287
- """
288
- def __init__(self, n_q: int, cached: bool = True):
289
- assert n_q > 0
290
- self.n_q = n_q
291
- self.get_pattern = lru_cache(100)(self.get_pattern) # type: ignore
292
-
293
- @abstractmethod
294
- def get_pattern(self, timesteps: int) -> Pattern:
295
- """Builds pattern with specific interleaving between codebooks.
296
-
297
- Args:
298
- timesteps (int): Total numer of timesteps.
299
- """
300
- raise NotImplementedError()
301
-
302
-
303
- class DelayedPatternProvider(CodebooksPatternProvider):
304
- """Provider for delayed pattern across delayed codebooks.
305
- Codebooks are delayed in the sequence and sequence steps will contain codebooks
306
- from different timesteps.
307
-
308
- Example:
309
- Taking timesteps=4 and n_q=3, delays=None, the multi-codebook sequence:
310
- [[1, 2, 3, 4],
311
- [1, 2, 3, 4],
312
- [1, 2, 3, 4]]
313
- The resulting sequence obtained from the returned pattern is:
314
- [[S, 1, 2, 3, 4],
315
- [S, S, 1, 2, 3],
316
- [S, S, S, 1, 2]]
317
- (with S being a special token)
318
-
319
- Args:
320
- n_q (int): Number of codebooks.
321
- delays (Optional[List[int]]): Delay for each of the codebooks.
322
- If delays not defined, each codebook is delayed by 1 compared to the previous one.
323
- flatten_first (int): Flatten the first N timesteps.
324
- empty_initial (int): Prepend with N empty list of coordinates.
325
- """
326
- def __init__(self, n_q: int, delays: tp.Optional[tp.List[int]] = None,
327
- flatten_first: int = 0, empty_initial: int = 0):
328
- super().__init__(n_q)
329
- if delays is None:
330
- delays = list(range(n_q))
331
- self.delays = delays
332
- self.flatten_first = flatten_first
333
- self.empty_initial = empty_initial
334
- assert len(self.delays) == self.n_q
335
- assert sorted(self.delays) == self.delays
336
-
337
- def get_pattern(self, timesteps: int) -> Pattern:
338
- out: PatternLayout = [[]]
339
- max_delay = max(self.delays)
340
- if self.empty_initial:
341
- out += [[] for _ in range(self.empty_initial)]
342
- if self.flatten_first:
343
- for t in range(min(timesteps, self.flatten_first)):
344
- for q in range(self.n_q):
345
- out.append([LayoutCoord(t, q)])
346
- for t in range(self.flatten_first, timesteps + max_delay):
347
- v = []
348
- for q, delay in enumerate(self.delays):
349
- t_for_q = t - delay
350
- if t_for_q >= self.flatten_first:
351
- v.append(LayoutCoord(t_for_q, q))
352
- out.append(v)
353
- return Pattern(out, n_q=self.n_q, timesteps=timesteps)
354
-
355
-
356
- class ParallelPatternProvider(DelayedPatternProvider):
357
- """Provider for parallel pattern across codebooks.
358
- This pattern provider is a special case of the delayed pattern with actually no delay,
359
- hence delays=repeat(0, n_q).
360
-
361
- Args:
362
- n_q (int): Number of codebooks.
363
- """
364
- def __init__(self, n_q: int):
365
- super().__init__(n_q, [0] * n_q)
366
-
367
-
368
- class UnrolledPatternProvider(CodebooksPatternProvider):
369
- """Provider for unrolling codebooks pattern.
370
- This pattern provider enables to represent the codebook flattened completely or only to some extend
371
- while also specifying a given delay between the flattened codebooks representation, allowing to
372
- unroll the codebooks in the sequence.
373
-
374
- Example:
375
- 1. Flattening of the codebooks.
376
- By default, the pattern provider will fully flatten the codebooks such as flattening=range(n_q),
377
- taking n_q = 3 and timesteps = 4:
378
- [[1, 2, 3, 4],
379
- [1, 2, 3, 4],
380
- [1, 2, 3, 4]]
381
- will result into:
382
- [[S, S, 1, S, S, 2, S, S, 3, S, S, 4],
383
- [S, 1, S, S, 2, S, S, 3, S, S, 4, S],
384
- [1, S, S, 2, S, S, 3, S, S, 4, S, S]]
385
- 2. Partial flattening of the codebooks. The ``flattening`` parameter allows to specify the inner step
386
- for each of the codebook, allowing to define which codebook to flatten (or keep in parallel), for example
387
- taking n_q = 3, timesteps = 4 and flattening = [0, 1, 1]:
388
- [[1, 2, 3, 4],
389
- [1, 2, 3, 4],
390
- [1, 2, 3, 4]]
391
- will result into:
392
- [[S, 1, S, S, 2, S, S, 3, S, S, 4, S],
393
- [S, 1, S, S, 2, S, S, 3, S, S, 4, S],
394
- [1, S, S, 2, S, S, 3, S, S, 4, S, S]]
395
- 3. Flattening with delay. The ``delay`` parameter allows to further unroll the sequence of codebooks
396
- allowing to specify the delay per codebook. Note that the delay between codebooks flattened to the
397
- same inner timestep should be coherent. For example, taking n_q = 3, timesteps = 4, flattening = [0, 1, 1]
398
- and delays = [0, 3, 3]:
399
- [[1, 2, 3, 4],
400
- [1, 2, 3, 4],
401
- [1, 2, 3, 4]]
402
- will result into:
403
- [[S, S, S, 1, S, 2, S, 3, S, 4],
404
- [S, S, S, 1, S, 2, S, 3, S, 4],
405
- [1, 2, 3, S, 4, S, 5, S, 6, S]]
406
-
407
- Args:
408
- n_q (int): Number of codebooks.
409
- flattening (Optional[List[int]]): Flattening schema over the codebooks. If not defined,
410
- the codebooks will be flattened to 1 codebook per step, meaning that the sequence will
411
- have n_q extra steps for each timestep.
412
- delays (Optional[List[int]]): Delay for each of the codebooks. If not defined,
413
- no delay is added and therefore will default to [0] * ``n_q``.
414
- Note that two codebooks that will be flattened to the same inner step
415
- should have the same delay, otherwise the pattern is considered as invalid.
416
- """
417
- FlattenedCodebook = namedtuple('FlattenedCodebook', ['codebooks', 'delay'])
418
-
419
- def __init__(self, n_q: int, flattening: tp.Optional[tp.List[int]] = None,
420
- delays: tp.Optional[tp.List[int]] = None):
421
- super().__init__(n_q)
422
- if flattening is None:
423
- flattening = list(range(n_q))
424
- if delays is None:
425
- delays = [0] * n_q
426
- assert len(flattening) == n_q
427
- assert len(delays) == n_q
428
- assert sorted(flattening) == flattening
429
- assert sorted(delays) == delays
430
- self._flattened_codebooks = self._build_flattened_codebooks(delays, flattening)
431
- self.max_delay = max(delays)
432
-
433
- def _build_flattened_codebooks(self, delays: tp.List[int], flattening: tp.List[int]):
434
- """Build a flattened codebooks representation as a dictionary of inner step
435
- and the actual codebook indices corresponding to the flattened codebook. For convenience, we
436
- also store the delay associated to the flattened codebook to avoid maintaining an extra mapping.
437
- """
438
- flattened_codebooks: dict = {}
439
- for q, (inner_step, delay) in enumerate(zip(flattening, delays)):
440
- if inner_step not in flattened_codebooks:
441
- flat_codebook = UnrolledPatternProvider.FlattenedCodebook(codebooks=[q], delay=delay)
442
- else:
443
- flat_codebook = flattened_codebooks[inner_step]
444
- assert flat_codebook.delay == delay, (
445
- "Delay and flattening between codebooks is inconsistent: ",
446
- "two codebooks flattened to the same position should have the same delay."
447
- )
448
- flat_codebook.codebooks.append(q)
449
- flattened_codebooks[inner_step] = flat_codebook
450
- return flattened_codebooks
451
-
452
- @property
453
- def _num_inner_steps(self):
454
- """Number of inner steps to unroll between timesteps in order to flatten the codebooks.
455
- """
456
- return max([inner_step for inner_step in self._flattened_codebooks.keys()]) + 1
457
-
458
- def num_virtual_steps(self, timesteps: int) -> int:
459
- return timesteps * self._num_inner_steps + 1
460
-
461
- def get_pattern(self, timesteps: int) -> Pattern:
462
- """Builds pattern for delay across codebooks.
463
-
464
- Args:
465
- timesteps (int): Total numer of timesteps.
466
- """
467
- # the PatternLayout is built as a tuple of sequence position and list of coordinates
468
- # so that it can be reordered properly given the required delay between codebooks of given timesteps
469
- indexed_out: list = [(-1, [])]
470
- max_timesteps = timesteps + self.max_delay
471
- for t in range(max_timesteps):
472
- # for each timestep, we unroll the flattened codebooks,
473
- # emitting the sequence step with the corresponding delay
474
- for step in range(self._num_inner_steps):
475
- if step in self._flattened_codebooks:
476
- # we have codebooks at this virtual step to emit
477
- step_codebooks = self._flattened_codebooks[step]
478
- t_for_q = t + step_codebooks.delay
479
- coords = [LayoutCoord(t, q) for q in step_codebooks.codebooks]
480
- if t_for_q < max_timesteps and t < max_timesteps:
481
- indexed_out.append((t_for_q, coords))
482
- else:
483
- # there is no codebook in this virtual step so we emit an empty list
484
- indexed_out.append((t, []))
485
- out = [coords for _, coords in sorted(indexed_out)]
486
- return Pattern(out, n_q=self.n_q, timesteps=timesteps)
487
-
488
-
489
- class VALLEPattern(CodebooksPatternProvider):
490
- """Almost VALL-E style pattern. We futher allow some delays for the
491
- codebooks other than the first one.
492
-
493
- Args:
494
- n_q (int): Number of codebooks.
495
- delays (Optional[List[int]]): Delay for each of the codebooks.
496
- If delays not defined, each codebook is delayed by 1 compared to the previous one.
497
- """
498
- def __init__(self, n_q: int, delays: tp.Optional[tp.List[int]] = None):
499
- super().__init__(n_q)
500
- if delays is None:
501
- delays = [0] * (n_q - 1)
502
- self.delays = delays
503
- assert len(self.delays) == self.n_q - 1
504
- assert sorted(self.delays) == self.delays
505
-
506
- def get_pattern(self, timesteps: int) -> Pattern:
507
- out: PatternLayout = [[]]
508
- for t in range(timesteps):
509
- out.append([LayoutCoord(t, 0)])
510
- max_delay = max(self.delays)
511
- for t in range(timesteps + max_delay):
512
- v = []
513
- for q, delay in enumerate(self.delays):
514
- t_for_q = t - delay
515
- if t_for_q >= 0:
516
- v.append(LayoutCoord(t_for_q, q + 1))
517
- out.append(v)
518
- return Pattern(out, n_q=self.n_q, timesteps=timesteps)
519
-
520
-
521
- class MusicLMPattern(CodebooksPatternProvider):
522
- """Almost MusicLM style pattern. This is equivalent to full flattening
523
- but in a different order.
524
-
525
- Args:
526
- n_q (int): Number of codebooks.
527
- group_by (int): Number of codebooks to group together.
528
- """
529
- def __init__(self, n_q: int, group_by: int = 2):
530
- super().__init__(n_q)
531
- self.group_by = group_by
532
-
533
- def get_pattern(self, timesteps: int) -> Pattern:
534
- out: PatternLayout = [[]]
535
- for offset in range(0, self.n_q, self.group_by):
536
- for t in range(timesteps):
537
- for q in range(offset, offset + self.group_by):
538
- out.append([LayoutCoord(t, q)])
539
- return Pattern(out, n_q=self.n_q, timesteps=timesteps)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/Ataturk-Chatbot/HuggingFaceChat/venv/lib/python3.11/site-packages/pip/_internal/commands/completion.py DELETED
@@ -1,126 +0,0 @@
1
- import sys
2
- import textwrap
3
- from optparse import Values
4
- from typing import List
5
-
6
- from pip._internal.cli.base_command import Command
7
- from pip._internal.cli.status_codes import SUCCESS
8
- from pip._internal.utils.misc import get_prog
9
-
10
- BASE_COMPLETION = """
11
- # pip {shell} completion start{script}# pip {shell} completion end
12
- """
13
-
14
- COMPLETION_SCRIPTS = {
15
- "bash": """
16
- _pip_completion()
17
- {{
18
- COMPREPLY=( $( COMP_WORDS="${{COMP_WORDS[*]}}" \\
19
- COMP_CWORD=$COMP_CWORD \\
20
- PIP_AUTO_COMPLETE=1 $1 2>/dev/null ) )
21
- }}
22
- complete -o default -F _pip_completion {prog}
23
- """,
24
- "zsh": """
25
- function _pip_completion {{
26
- local words cword
27
- read -Ac words
28
- read -cn cword
29
- reply=( $( COMP_WORDS="$words[*]" \\
30
- COMP_CWORD=$(( cword-1 )) \\
31
- PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null ))
32
- }}
33
- compctl -K _pip_completion {prog}
34
- """,
35
- "fish": """
36
- function __fish_complete_pip
37
- set -lx COMP_WORDS (commandline -o) ""
38
- set -lx COMP_CWORD ( \\
39
- math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\
40
- )
41
- set -lx PIP_AUTO_COMPLETE 1
42
- string split \\ -- (eval $COMP_WORDS[1])
43
- end
44
- complete -fa "(__fish_complete_pip)" -c {prog}
45
- """,
46
- "powershell": """
47
- if ((Test-Path Function:\\TabExpansion) -and -not `
48
- (Test-Path Function:\\_pip_completeBackup)) {{
49
- Rename-Item Function:\\TabExpansion _pip_completeBackup
50
- }}
51
- function TabExpansion($line, $lastWord) {{
52
- $lastBlock = [regex]::Split($line, '[|;]')[-1].TrimStart()
53
- if ($lastBlock.StartsWith("{prog} ")) {{
54
- $Env:COMP_WORDS=$lastBlock
55
- $Env:COMP_CWORD=$lastBlock.Split().Length - 1
56
- $Env:PIP_AUTO_COMPLETE=1
57
- (& {prog}).Split()
58
- Remove-Item Env:COMP_WORDS
59
- Remove-Item Env:COMP_CWORD
60
- Remove-Item Env:PIP_AUTO_COMPLETE
61
- }}
62
- elseif (Test-Path Function:\\_pip_completeBackup) {{
63
- # Fall back on existing tab expansion
64
- _pip_completeBackup $line $lastWord
65
- }}
66
- }}
67
- """,
68
- }
69
-
70
-
71
- class CompletionCommand(Command):
72
- """A helper command to be used for command completion."""
73
-
74
- ignore_require_venv = True
75
-
76
- def add_options(self) -> None:
77
- self.cmd_opts.add_option(
78
- "--bash",
79
- "-b",
80
- action="store_const",
81
- const="bash",
82
- dest="shell",
83
- help="Emit completion code for bash",
84
- )
85
- self.cmd_opts.add_option(
86
- "--zsh",
87
- "-z",
88
- action="store_const",
89
- const="zsh",
90
- dest="shell",
91
- help="Emit completion code for zsh",
92
- )
93
- self.cmd_opts.add_option(
94
- "--fish",
95
- "-f",
96
- action="store_const",
97
- const="fish",
98
- dest="shell",
99
- help="Emit completion code for fish",
100
- )
101
- self.cmd_opts.add_option(
102
- "--powershell",
103
- "-p",
104
- action="store_const",
105
- const="powershell",
106
- dest="shell",
107
- help="Emit completion code for powershell",
108
- )
109
-
110
- self.parser.insert_option_group(0, self.cmd_opts)
111
-
112
- def run(self, options: Values, args: List[str]) -> int:
113
- """Prints the completion code of the given shell"""
114
- shells = COMPLETION_SCRIPTS.keys()
115
- shell_options = ["--" + shell for shell in sorted(shells)]
116
- if options.shell in shells:
117
- script = textwrap.dedent(
118
- COMPLETION_SCRIPTS.get(options.shell, "").format(prog=get_prog())
119
- )
120
- print(BASE_COMPLETION.format(script=script, shell=options.shell))
121
- return SUCCESS
122
- else:
123
- sys.stderr.write(
124
- "ERROR: You must pass {}\n".format(" or ".join(shell_options))
125
- )
126
- return SUCCESS
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/Atualli/yoloxTeste/yoloxdetect2/utils/downloads.py DELETED
@@ -1,85 +0,0 @@
1
- from pathlib import Path
2
- import torch
3
- import urllib
4
- import requests
5
- import subprocess
6
-
7
- def attempt_download_from_hub(repo_id, hf_token=None):
8
- # https://github.com/fcakyon/yolov5-pip/blob/main/yolov5/utils/downloads.py
9
- from huggingface_hub import hf_hub_download, list_repo_files
10
- from huggingface_hub.utils._errors import RepositoryNotFoundError
11
- from huggingface_hub.utils._validators import HFValidationError
12
- try:
13
- repo_files = list_repo_files(repo_id=repo_id, repo_type='model', token=hf_token)
14
- model_file = [f for f in repo_files if f.endswith('.pth')][0]
15
- file = hf_hub_download(
16
- repo_id=repo_id,
17
- filename=model_file,
18
- repo_type='model',
19
- token=hf_token,
20
- )
21
- return file
22
- except (RepositoryNotFoundError, HFValidationError):
23
- return None
24
-
25
-
26
- def safe_download(file, url, url2=None, min_bytes=1E0, error_msg=''):
27
- import os
28
- # Attempts to download file from url or url2, checks and removes incomplete downloads < min_bytes
29
-
30
- file = Path(file)
31
- assert_msg = f"Downloaded file '{file}' does not exist or size is < min_bytes={min_bytes}"
32
- try: # url1
33
- torch.hub.download_url_to_file(url, str(file), progress=True) # pytorch download
34
- assert file.exists() and file.stat().st_size > min_bytes, assert_msg # check
35
- except Exception as e: # url2
36
- file.unlink(missing_ok=True) # remove partial downloads
37
- os.system(f"curl -L '{url2 or url}' -o '{file}' --retry 3 -C -") # curl download, retry and resume on fail
38
- finally:
39
- if not file.exists() or file.stat().st_size < min_bytes: # check
40
- file.unlink(missing_ok=True) # remove partial downloads
41
- raise Exception(error_msg or assert_msg) # raise informative error
42
-
43
- def attempt_download(file, repo='Megvii-BaseDetection/YOLOX', release='0.1.0'):
44
- def github_assets(repository, version='latest'):
45
- response = requests.get(f'https://api.github.com/repos/{repository}/releases/tags/{version}').json() # github api
46
- return response['tag_name'], [x['name'] for x in response['assets']] # tag, assets
47
-
48
- file = Path(str(file).strip().replace("'", ''))
49
- if not file.exists():
50
- # URL specified
51
- name = Path(urllib.parse.unquote(str(file))).name # decode '%2F' to '/' etc.
52
- if str(file).startswith(('http:/', 'https:/')): # download
53
- url = str(file).replace(':/', '://') # Pathlib turns :// -> :/
54
- file = name.split('?')[0] # parse authentication https://url.com/file.txt?auth...
55
- if Path(file).is_file():
56
- return file
57
- else:
58
- safe_download(file=file, url=url, min_bytes=1E5)
59
- return file
60
-
61
- # GitHub assets
62
- assets = [
63
- 'yolov6n.pt', 'yolov6s.pt', 'yolov6m.pt', 'yolov6l.pt',
64
- 'yolov6n6.pt', 'yolov6s6.pt', 'yolov6m6.pt', 'yolov6l6.pt']
65
- try:
66
- tag, assets = github_assets(repo, release)
67
- except Exception:
68
- try:
69
- tag, assets = github_assets(repo) # latest release
70
- except Exception:
71
- try:
72
- tag = subprocess.check_output('git tag', shell=True, stderr=subprocess.STDOUT).decode().split()[-1]
73
- except Exception:
74
- tag = release
75
-
76
- file.parent.mkdir(parents=True, exist_ok=True) # make parent dir (if required)
77
- if name in assets:
78
- safe_download(
79
- file,
80
- url=f'https://github.com/{repo}/releases/download/{tag}/{name}',
81
- url2=f'https://storage.googleapis.com/{repo}/{tag}/{name}', # backup url (optional)
82
- min_bytes=1E5,
83
- error_msg=f'{file} missing, try downloading from https://github.com/{repo}/releases/{tag}')
84
-
85
- return str(file)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/Audio-AGI/AudioSep/app.py DELETED
@@ -1,82 +0,0 @@
1
- from pathlib import Path
2
- from threading import Thread
3
-
4
- import gdown
5
- import gradio as gr
6
- import librosa
7
- import numpy as np
8
- import torch
9
-
10
- from gradio_examples import EXAMPLES
11
- from pipeline import build_audiosep
12
-
13
- CHECKPOINTS_DIR = Path("checkpoint")
14
-
15
- DEVICE = torch.device("cuda" if torch.cuda.is_available() else "cpu")
16
-
17
- # The model will be loaded in the future
18
- MODEL_NAME = CHECKPOINTS_DIR / "audiosep_base_4M_steps.ckpt"
19
- MODEL = build_audiosep(
20
- config_yaml="config/audiosep_base.yaml",
21
- checkpoint_path=MODEL_NAME,
22
- device=DEVICE,
23
- )
24
-
25
-
26
- description = """
27
- # AudioSep: Separate Anything You Describe
28
- [[Project Page]](https://audio-agi.github.io/Separate-Anything-You-Describe) [[Paper]](https://audio-agi.github.io/Separate-Anything-You-Describe/AudioSep_arXiv.pdf) [[Code]](https://github.com/Audio-AGI/AudioSep)
29
-
30
- AudioSep is a foundation model for open-domain sound separation with natural language queries.
31
- AudioSep demonstrates strong separation performance and impressivezero-shot generalization ability on
32
- numerous tasks such as audio event separation, musical instrument separation, and speech enhancement.
33
- """
34
-
35
-
36
- def inference(audio_file_path: str, text: str):
37
- print(f"Separate audio from [{audio_file_path}] with textual query [{text}]")
38
- mixture, _ = librosa.load(audio_file_path, sr=32000, mono=True)
39
-
40
- with torch.no_grad():
41
- text = [text]
42
-
43
- conditions = MODEL.query_encoder.get_query_embed(
44
- modality="text", text=text, device=DEVICE
45
- )
46
-
47
- input_dict = {
48
- "mixture": torch.Tensor(mixture)[None, None, :].to(DEVICE),
49
- "condition": conditions,
50
- }
51
-
52
- sep_segment = MODEL.ss_model(input_dict)["waveform"]
53
-
54
- sep_segment = sep_segment.squeeze(0).squeeze(0).data.cpu().numpy()
55
-
56
- return 32000, np.round(sep_segment * 32767).astype(np.int16)
57
-
58
-
59
- with gr.Blocks(title="AudioSep") as demo:
60
- gr.Markdown(description)
61
- with gr.Row():
62
- with gr.Column():
63
- input_audio = gr.Audio(label="Mixture", type="filepath")
64
- text = gr.Textbox(label="Text Query")
65
- with gr.Column():
66
- with gr.Column():
67
- output_audio = gr.Audio(label="Separation Result", scale=10)
68
- button = gr.Button(
69
- "Separate",
70
- variant="primary",
71
- scale=2,
72
- size="lg",
73
- interactive=True,
74
- )
75
- button.click(
76
- fn=inference, inputs=[input_audio, text], outputs=[output_audio]
77
- )
78
-
79
- gr.Markdown("## Examples")
80
- gr.Examples(examples=EXAMPLES, inputs=[input_audio, text])
81
-
82
- demo.queue().launch(share=True)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/Awiny/Image2Paragraph/models/grit_src/third_party/CenterNet2/detectron2/utils/env.py DELETED
@@ -1,170 +0,0 @@
1
- # Copyright (c) Facebook, Inc. and its affiliates.
2
- import importlib
3
- import importlib.util
4
- import logging
5
- import numpy as np
6
- import os
7
- import random
8
- import sys
9
- from datetime import datetime
10
- import torch
11
-
12
- __all__ = ["seed_all_rng"]
13
-
14
-
15
- TORCH_VERSION = tuple(int(x) for x in torch.__version__.split(".")[:2])
16
- """
17
- PyTorch version as a tuple of 2 ints. Useful for comparison.
18
- """
19
-
20
-
21
- DOC_BUILDING = os.getenv("_DOC_BUILDING", False) # set in docs/conf.py
22
- """
23
- Whether we're building documentation.
24
- """
25
-
26
-
27
- def seed_all_rng(seed=None):
28
- """
29
- Set the random seed for the RNG in torch, numpy and python.
30
-
31
- Args:
32
- seed (int): if None, will use a strong random seed.
33
- """
34
- if seed is None:
35
- seed = (
36
- os.getpid()
37
- + int(datetime.now().strftime("%S%f"))
38
- + int.from_bytes(os.urandom(2), "big")
39
- )
40
- logger = logging.getLogger(__name__)
41
- logger.info("Using a generated random seed {}".format(seed))
42
- np.random.seed(seed)
43
- torch.manual_seed(seed)
44
- random.seed(seed)
45
- os.environ["PYTHONHASHSEED"] = str(seed)
46
-
47
-
48
- # from https://stackoverflow.com/questions/67631/how-to-import-a-module-given-the-full-path
49
- def _import_file(module_name, file_path, make_importable=False):
50
- spec = importlib.util.spec_from_file_location(module_name, file_path)
51
- module = importlib.util.module_from_spec(spec)
52
- spec.loader.exec_module(module)
53
- if make_importable:
54
- sys.modules[module_name] = module
55
- return module
56
-
57
-
58
- def _configure_libraries():
59
- """
60
- Configurations for some libraries.
61
- """
62
- # An environment option to disable `import cv2` globally,
63
- # in case it leads to negative performance impact
64
- disable_cv2 = int(os.environ.get("DETECTRON2_DISABLE_CV2", False))
65
- if disable_cv2:
66
- sys.modules["cv2"] = None
67
- else:
68
- # Disable opencl in opencv since its interaction with cuda often has negative effects
69
- # This envvar is supported after OpenCV 3.4.0
70
- os.environ["OPENCV_OPENCL_RUNTIME"] = "disabled"
71
- try:
72
- import cv2
73
-
74
- if int(cv2.__version__.split(".")[0]) >= 3:
75
- cv2.ocl.setUseOpenCL(False)
76
- except ModuleNotFoundError:
77
- # Other types of ImportError, if happened, should not be ignored.
78
- # Because a failed opencv import could mess up address space
79
- # https://github.com/skvark/opencv-python/issues/381
80
- pass
81
-
82
- def get_version(module, digit=2):
83
- return tuple(map(int, module.__version__.split(".")[:digit]))
84
-
85
- # fmt: off
86
- assert get_version(torch) >= (1, 4), "Requires torch>=1.4"
87
- import fvcore
88
- assert get_version(fvcore, 3) >= (0, 1, 2), "Requires fvcore>=0.1.2"
89
- import yaml
90
- assert get_version(yaml) >= (5, 1), "Requires pyyaml>=5.1"
91
- # fmt: on
92
-
93
-
94
- _ENV_SETUP_DONE = False
95
-
96
-
97
- def setup_environment():
98
- """Perform environment setup work. The default setup is a no-op, but this
99
- function allows the user to specify a Python source file or a module in
100
- the $DETECTRON2_ENV_MODULE environment variable, that performs
101
- custom setup work that may be necessary to their computing environment.
102
- """
103
- global _ENV_SETUP_DONE
104
- if _ENV_SETUP_DONE:
105
- return
106
- _ENV_SETUP_DONE = True
107
-
108
- _configure_libraries()
109
-
110
- custom_module_path = os.environ.get("DETECTRON2_ENV_MODULE")
111
-
112
- if custom_module_path:
113
- setup_custom_environment(custom_module_path)
114
- else:
115
- # The default setup is a no-op
116
- pass
117
-
118
-
119
- def setup_custom_environment(custom_module):
120
- """
121
- Load custom environment setup by importing a Python source file or a
122
- module, and run the setup function.
123
- """
124
- if custom_module.endswith(".py"):
125
- module = _import_file("detectron2.utils.env.custom_module", custom_module)
126
- else:
127
- module = importlib.import_module(custom_module)
128
- assert hasattr(module, "setup_environment") and callable(module.setup_environment), (
129
- "Custom environment module defined in {} does not have the "
130
- "required callable attribute 'setup_environment'."
131
- ).format(custom_module)
132
- module.setup_environment()
133
-
134
-
135
- def fixup_module_metadata(module_name, namespace, keys=None):
136
- """
137
- Fix the __qualname__ of module members to be their exported api name, so
138
- when they are referenced in docs, sphinx can find them. Reference:
139
- https://github.com/python-trio/trio/blob/6754c74eacfad9cc5c92d5c24727a2f3b620624e/trio/_util.py#L216-L241
140
- """
141
- if not DOC_BUILDING:
142
- return
143
- seen_ids = set()
144
-
145
- def fix_one(qualname, name, obj):
146
- # avoid infinite recursion (relevant when using
147
- # typing.Generic, for example)
148
- if id(obj) in seen_ids:
149
- return
150
- seen_ids.add(id(obj))
151
-
152
- mod = getattr(obj, "__module__", None)
153
- if mod is not None and (mod.startswith(module_name) or mod.startswith("fvcore.")):
154
- obj.__module__ = module_name
155
- # Modules, unlike everything else in Python, put fully-qualitied
156
- # names into their __name__ attribute. We check for "." to avoid
157
- # rewriting these.
158
- if hasattr(obj, "__name__") and "." not in obj.__name__:
159
- obj.__name__ = name
160
- obj.__qualname__ = qualname
161
- if isinstance(obj, type):
162
- for attr_name, attr_value in obj.__dict__.items():
163
- fix_one(objname + "." + attr_name, attr_name, attr_value)
164
-
165
- if keys is None:
166
- keys = namespace.keys()
167
- for objname in keys:
168
- if not objname.startswith("_"):
169
- obj = namespace[objname]
170
- fix_one(objname, objname, obj)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/BAAI/dreambooth-altdiffusion/model.README.md DELETED
@@ -1,24 +0,0 @@
1
- ---
2
- license: creativeml-openrail-m
3
- tags:
4
- - text-to-image
5
- widget:
6
- - text: sks
7
- ---
8
- ### test-m9 Dreambooth model trained by Alon77777 with [Hugging Face Dreambooth Training Space](https://huggingface.co/spaces/multimodalart/dreambooth-training) with the alt_m9 base model
9
-
10
- You run your new concept via `diffusers` [Colab Notebook for Inference](https://colab.research.google.com/github/huggingface/notebooks/blob/main/diffusers/sd_dreambooth_inference.ipynb). Don't forget to use the concept prompts!
11
-
12
- Sample pictures of:
13
-
14
-
15
-
16
-
17
-
18
-
19
-
20
-
21
-
22
-
23
- sks (use that on your prompt)
24
- ![sks 0](https://huggingface.co/Alon77777/test-m9/resolve/main/concept_images/sks_%2810%29.jpg)![sks 1](https://huggingface.co/Alon77777/test-m9/resolve/main/concept_images/sks_%285%29.jpg)![sks 2](https://huggingface.co/Alon77777/test-m9/resolve/main/concept_images/sks_%282%29.jpg)![sks 3](https://huggingface.co/Alon77777/test-m9/resolve/main/concept_images/sks_%283%29.jpg)![sks 4](https://huggingface.co/Alon77777/test-m9/resolve/main/concept_images/sks_%2811%29.jpg)![sks 5](https://huggingface.co/Alon77777/test-m9/resolve/main/concept_images/sks_%288%29.jpg)![sks 6](https://huggingface.co/Alon77777/test-m9/resolve/main/concept_images/sks_%281%29.jpg)![sks 7](https://huggingface.co/Alon77777/test-m9/resolve/main/concept_images/sks_%289%29.jpg)![sks 8](https://huggingface.co/Alon77777/test-m9/resolve/main/concept_images/sks_%286%29.jpg)![sks 9](https://huggingface.co/Alon77777/test-m9/resolve/main/concept_images/sks_%284%29.jpg)![sks 10](https://huggingface.co/Alon77777/test-m9/resolve/main/concept_images/sks_%287%29.jpg)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/Big-Web/MMSD/env/Lib/site-packages/botocore/docs/docstring.py DELETED
@@ -1,97 +0,0 @@
1
- # Copyright 2015 Amazon.com, Inc. or its affiliates. All Rights Reserved.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License"). You
4
- # may not use this file except in compliance with the License. A copy of
5
- # the License is located at
6
- #
7
- # http://aws.amazon.com/apache2.0/
8
- #
9
- # or in the "license" file accompanying this file. This file is
10
- # distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
11
- # ANY KIND, either express or implied. See the License for the specific
12
- # language governing permissions and limitations under the License.
13
- from botocore.docs.bcdoc.restdoc import DocumentStructure
14
- from botocore.docs.method import document_model_driven_method
15
- from botocore.docs.paginator import document_paginate_method
16
- from botocore.docs.waiter import document_wait_method
17
-
18
-
19
- class LazyLoadedDocstring(str):
20
- """Used for lazily loading docstrings
21
-
22
- You can instantiate this class and assign it to a __doc__ value.
23
- The docstring will not be generated till accessed via __doc__ or
24
- help(). Note that all docstring classes **must** subclass from
25
- this class. It cannot be used directly as a docstring.
26
- """
27
-
28
- def __init__(self, *args, **kwargs):
29
- """
30
- The args and kwargs are the same as the underlying document
31
- generation function. These just get proxied to the underlying
32
- function.
33
- """
34
- super().__init__()
35
- self._gen_args = args
36
- self._gen_kwargs = kwargs
37
- self._docstring = None
38
-
39
- def __new__(cls, *args, **kwargs):
40
- # Needed in order to sub class from str with args and kwargs
41
- return super().__new__(cls)
42
-
43
- def _write_docstring(self, *args, **kwargs):
44
- raise NotImplementedError(
45
- '_write_docstring is not implemented. Please subclass from '
46
- 'this class and provide your own _write_docstring method'
47
- )
48
-
49
- def expandtabs(self, tabsize=8):
50
- """Expands tabs to spaces
51
-
52
- So this is a big hack in order to get lazy loaded docstring work
53
- for the ``help()``. In the ``help()`` function, ``pydoc`` and
54
- ``inspect`` are used. At some point the ``inspect.cleandoc``
55
- method is called. To clean the docs ``expandtabs`` is called
56
- and that is where we override the method to generate and return the
57
- docstrings.
58
- """
59
- if self._docstring is None:
60
- self._generate()
61
- return self._docstring.expandtabs(tabsize)
62
-
63
- def __str__(self):
64
- return self._generate()
65
-
66
- # __doc__ of target will use either __repr__ or __str__ of this class.
67
- __repr__ = __str__
68
-
69
- def _generate(self):
70
- # Generate the docstring if it is not already cached.
71
- if self._docstring is None:
72
- self._docstring = self._create_docstring()
73
- return self._docstring
74
-
75
- def _create_docstring(self):
76
- docstring_structure = DocumentStructure('docstring', target='html')
77
- # Call the document method function with the args and kwargs
78
- # passed to the class.
79
- self._write_docstring(
80
- docstring_structure, *self._gen_args, **self._gen_kwargs
81
- )
82
- return docstring_structure.flush_structure().decode('utf-8')
83
-
84
-
85
- class ClientMethodDocstring(LazyLoadedDocstring):
86
- def _write_docstring(self, *args, **kwargs):
87
- document_model_driven_method(*args, **kwargs)
88
-
89
-
90
- class WaiterDocstring(LazyLoadedDocstring):
91
- def _write_docstring(self, *args, **kwargs):
92
- document_wait_method(*args, **kwargs)
93
-
94
-
95
- class PaginatorDocstring(LazyLoadedDocstring):
96
- def _write_docstring(self, *args, **kwargs):
97
- document_paginate_method(*args, **kwargs)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/Big-Web/MMSD/env/Lib/site-packages/pip/_vendor/pygments/lexers/_mapping.py DELETED
@@ -1,553 +0,0 @@
1
- # Automatically generated by scripts/gen_mapfiles.py.
2
- # DO NOT EDIT BY HAND; run `make mapfiles` instead.
3
-
4
- LEXERS = {
5
- 'ABAPLexer': ('pip._vendor.pygments.lexers.business', 'ABAP', ('abap',), ('*.abap', '*.ABAP'), ('text/x-abap',)),
6
- 'AMDGPULexer': ('pip._vendor.pygments.lexers.amdgpu', 'AMDGPU', ('amdgpu',), ('*.isa',), ()),
7
- 'APLLexer': ('pip._vendor.pygments.lexers.apl', 'APL', ('apl',), ('*.apl', '*.aplf', '*.aplo', '*.apln', '*.aplc', '*.apli', '*.dyalog'), ()),
8
- 'AbnfLexer': ('pip._vendor.pygments.lexers.grammar_notation', 'ABNF', ('abnf',), ('*.abnf',), ('text/x-abnf',)),
9
- 'ActionScript3Lexer': ('pip._vendor.pygments.lexers.actionscript', 'ActionScript 3', ('actionscript3', 'as3'), ('*.as',), ('application/x-actionscript3', 'text/x-actionscript3', 'text/actionscript3')),
10
- 'ActionScriptLexer': ('pip._vendor.pygments.lexers.actionscript', 'ActionScript', ('actionscript', 'as'), ('*.as',), ('application/x-actionscript', 'text/x-actionscript', 'text/actionscript')),
11
- 'AdaLexer': ('pip._vendor.pygments.lexers.ada', 'Ada', ('ada', 'ada95', 'ada2005'), ('*.adb', '*.ads', '*.ada'), ('text/x-ada',)),
12
- 'AdlLexer': ('pip._vendor.pygments.lexers.archetype', 'ADL', ('adl',), ('*.adl', '*.adls', '*.adlf', '*.adlx'), ()),
13
- 'AgdaLexer': ('pip._vendor.pygments.lexers.haskell', 'Agda', ('agda',), ('*.agda',), ('text/x-agda',)),
14
- 'AheuiLexer': ('pip._vendor.pygments.lexers.esoteric', 'Aheui', ('aheui',), ('*.aheui',), ()),
15
- 'AlloyLexer': ('pip._vendor.pygments.lexers.dsls', 'Alloy', ('alloy',), ('*.als',), ('text/x-alloy',)),
16
- 'AmbientTalkLexer': ('pip._vendor.pygments.lexers.ambient', 'AmbientTalk', ('ambienttalk', 'ambienttalk/2', 'at'), ('*.at',), ('text/x-ambienttalk',)),
17
- 'AmplLexer': ('pip._vendor.pygments.lexers.ampl', 'Ampl', ('ampl',), ('*.run',), ()),
18
- 'Angular2HtmlLexer': ('pip._vendor.pygments.lexers.templates', 'HTML + Angular2', ('html+ng2',), ('*.ng2',), ()),
19
- 'Angular2Lexer': ('pip._vendor.pygments.lexers.templates', 'Angular2', ('ng2',), (), ()),
20
- 'AntlrActionScriptLexer': ('pip._vendor.pygments.lexers.parsers', 'ANTLR With ActionScript Target', ('antlr-actionscript', 'antlr-as'), ('*.G', '*.g'), ()),
21
- 'AntlrCSharpLexer': ('pip._vendor.pygments.lexers.parsers', 'ANTLR With C# Target', ('antlr-csharp', 'antlr-c#'), ('*.G', '*.g'), ()),
22
- 'AntlrCppLexer': ('pip._vendor.pygments.lexers.parsers', 'ANTLR With CPP Target', ('antlr-cpp',), ('*.G', '*.g'), ()),
23
- 'AntlrJavaLexer': ('pip._vendor.pygments.lexers.parsers', 'ANTLR With Java Target', ('antlr-java',), ('*.G', '*.g'), ()),
24
- 'AntlrLexer': ('pip._vendor.pygments.lexers.parsers', 'ANTLR', ('antlr',), (), ()),
25
- 'AntlrObjectiveCLexer': ('pip._vendor.pygments.lexers.parsers', 'ANTLR With ObjectiveC Target', ('antlr-objc',), ('*.G', '*.g'), ()),
26
- 'AntlrPerlLexer': ('pip._vendor.pygments.lexers.parsers', 'ANTLR With Perl Target', ('antlr-perl',), ('*.G', '*.g'), ()),
27
- 'AntlrPythonLexer': ('pip._vendor.pygments.lexers.parsers', 'ANTLR With Python Target', ('antlr-python',), ('*.G', '*.g'), ()),
28
- 'AntlrRubyLexer': ('pip._vendor.pygments.lexers.parsers', 'ANTLR With Ruby Target', ('antlr-ruby', 'antlr-rb'), ('*.G', '*.g'), ()),
29
- 'ApacheConfLexer': ('pip._vendor.pygments.lexers.configs', 'ApacheConf', ('apacheconf', 'aconf', 'apache'), ('.htaccess', 'apache.conf', 'apache2.conf'), ('text/x-apacheconf',)),
30
- 'AppleScriptLexer': ('pip._vendor.pygments.lexers.scripting', 'AppleScript', ('applescript',), ('*.applescript',), ()),
31
- 'ArduinoLexer': ('pip._vendor.pygments.lexers.c_like', 'Arduino', ('arduino',), ('*.ino',), ('text/x-arduino',)),
32
- 'ArrowLexer': ('pip._vendor.pygments.lexers.arrow', 'Arrow', ('arrow',), ('*.arw',), ()),
33
- 'ArturoLexer': ('pip._vendor.pygments.lexers.arturo', 'Arturo', ('arturo', 'art'), ('*.art',), ()),
34
- 'AscLexer': ('pip._vendor.pygments.lexers.asc', 'ASCII armored', ('asc', 'pem'), ('*.asc', '*.pem', 'id_dsa', 'id_ecdsa', 'id_ecdsa_sk', 'id_ed25519', 'id_ed25519_sk', 'id_rsa'), ('application/pgp-keys', 'application/pgp-encrypted', 'application/pgp-signature')),
35
- 'AspectJLexer': ('pip._vendor.pygments.lexers.jvm', 'AspectJ', ('aspectj',), ('*.aj',), ('text/x-aspectj',)),
36
- 'AsymptoteLexer': ('pip._vendor.pygments.lexers.graphics', 'Asymptote', ('asymptote', 'asy'), ('*.asy',), ('text/x-asymptote',)),
37
- 'AugeasLexer': ('pip._vendor.pygments.lexers.configs', 'Augeas', ('augeas',), ('*.aug',), ()),
38
- 'AutoItLexer': ('pip._vendor.pygments.lexers.automation', 'AutoIt', ('autoit',), ('*.au3',), ('text/x-autoit',)),
39
- 'AutohotkeyLexer': ('pip._vendor.pygments.lexers.automation', 'autohotkey', ('autohotkey', 'ahk'), ('*.ahk', '*.ahkl'), ('text/x-autohotkey',)),
40
- 'AwkLexer': ('pip._vendor.pygments.lexers.textedit', 'Awk', ('awk', 'gawk', 'mawk', 'nawk'), ('*.awk',), ('application/x-awk',)),
41
- 'BBCBasicLexer': ('pip._vendor.pygments.lexers.basic', 'BBC Basic', ('bbcbasic',), ('*.bbc',), ()),
42
- 'BBCodeLexer': ('pip._vendor.pygments.lexers.markup', 'BBCode', ('bbcode',), (), ('text/x-bbcode',)),
43
- 'BCLexer': ('pip._vendor.pygments.lexers.algebra', 'BC', ('bc',), ('*.bc',), ()),
44
- 'BSTLexer': ('pip._vendor.pygments.lexers.bibtex', 'BST', ('bst', 'bst-pybtex'), ('*.bst',), ()),
45
- 'BareLexer': ('pip._vendor.pygments.lexers.bare', 'BARE', ('bare',), ('*.bare',), ()),
46
- 'BaseMakefileLexer': ('pip._vendor.pygments.lexers.make', 'Base Makefile', ('basemake',), (), ()),
47
- 'BashLexer': ('pip._vendor.pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh', 'zsh', 'shell'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '*.exheres-0', '*.exlib', '*.zsh', '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'zshrc', '.zshrc', '.kshrc', 'kshrc', 'PKGBUILD'), ('application/x-sh', 'application/x-shellscript', 'text/x-shellscript')),
48
- 'BashSessionLexer': ('pip._vendor.pygments.lexers.shell', 'Bash Session', ('console', 'shell-session'), ('*.sh-session', '*.shell-session'), ('application/x-shell-session', 'application/x-sh-session')),
49
- 'BatchLexer': ('pip._vendor.pygments.lexers.shell', 'Batchfile', ('batch', 'bat', 'dosbatch', 'winbatch'), ('*.bat', '*.cmd'), ('application/x-dos-batch',)),
50
- 'BddLexer': ('pip._vendor.pygments.lexers.bdd', 'Bdd', ('bdd',), ('*.feature',), ('text/x-bdd',)),
51
- 'BefungeLexer': ('pip._vendor.pygments.lexers.esoteric', 'Befunge', ('befunge',), ('*.befunge',), ('application/x-befunge',)),
52
- 'BerryLexer': ('pip._vendor.pygments.lexers.berry', 'Berry', ('berry', 'be'), ('*.be',), ('text/x-berry', 'application/x-berry')),
53
- 'BibTeXLexer': ('pip._vendor.pygments.lexers.bibtex', 'BibTeX', ('bibtex', 'bib'), ('*.bib',), ('text/x-bibtex',)),
54
- 'BlitzBasicLexer': ('pip._vendor.pygments.lexers.basic', 'BlitzBasic', ('blitzbasic', 'b3d', 'bplus'), ('*.bb', '*.decls'), ('text/x-bb',)),
55
- 'BlitzMaxLexer': ('pip._vendor.pygments.lexers.basic', 'BlitzMax', ('blitzmax', 'bmax'), ('*.bmx',), ('text/x-bmx',)),
56
- 'BnfLexer': ('pip._vendor.pygments.lexers.grammar_notation', 'BNF', ('bnf',), ('*.bnf',), ('text/x-bnf',)),
57
- 'BoaLexer': ('pip._vendor.pygments.lexers.boa', 'Boa', ('boa',), ('*.boa',), ()),
58
- 'BooLexer': ('pip._vendor.pygments.lexers.dotnet', 'Boo', ('boo',), ('*.boo',), ('text/x-boo',)),
59
- 'BoogieLexer': ('pip._vendor.pygments.lexers.verification', 'Boogie', ('boogie',), ('*.bpl',), ()),
60
- 'BrainfuckLexer': ('pip._vendor.pygments.lexers.esoteric', 'Brainfuck', ('brainfuck', 'bf'), ('*.bf', '*.b'), ('application/x-brainfuck',)),
61
- 'BugsLexer': ('pip._vendor.pygments.lexers.modeling', 'BUGS', ('bugs', 'winbugs', 'openbugs'), ('*.bug',), ()),
62
- 'CAmkESLexer': ('pip._vendor.pygments.lexers.esoteric', 'CAmkES', ('camkes', 'idl4'), ('*.camkes', '*.idl4'), ()),
63
- 'CLexer': ('pip._vendor.pygments.lexers.c_cpp', 'C', ('c',), ('*.c', '*.h', '*.idc', '*.x[bp]m'), ('text/x-chdr', 'text/x-csrc', 'image/x-xbitmap', 'image/x-xpixmap')),
64
- 'CMakeLexer': ('pip._vendor.pygments.lexers.make', 'CMake', ('cmake',), ('*.cmake', 'CMakeLists.txt'), ('text/x-cmake',)),
65
- 'CObjdumpLexer': ('pip._vendor.pygments.lexers.asm', 'c-objdump', ('c-objdump',), ('*.c-objdump',), ('text/x-c-objdump',)),
66
- 'CPSALexer': ('pip._vendor.pygments.lexers.lisp', 'CPSA', ('cpsa',), ('*.cpsa',), ()),
67
- 'CSSUL4Lexer': ('pip._vendor.pygments.lexers.ul4', 'CSS+UL4', ('css+ul4',), ('*.cssul4',), ()),
68
- 'CSharpAspxLexer': ('pip._vendor.pygments.lexers.dotnet', 'aspx-cs', ('aspx-cs',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()),
69
- 'CSharpLexer': ('pip._vendor.pygments.lexers.dotnet', 'C#', ('csharp', 'c#', 'cs'), ('*.cs',), ('text/x-csharp',)),
70
- 'Ca65Lexer': ('pip._vendor.pygments.lexers.asm', 'ca65 assembler', ('ca65',), ('*.s',), ()),
71
- 'CadlLexer': ('pip._vendor.pygments.lexers.archetype', 'cADL', ('cadl',), ('*.cadl',), ()),
72
- 'CapDLLexer': ('pip._vendor.pygments.lexers.esoteric', 'CapDL', ('capdl',), ('*.cdl',), ()),
73
- 'CapnProtoLexer': ('pip._vendor.pygments.lexers.capnproto', "Cap'n Proto", ('capnp',), ('*.capnp',), ()),
74
- 'CbmBasicV2Lexer': ('pip._vendor.pygments.lexers.basic', 'CBM BASIC V2', ('cbmbas',), ('*.bas',), ()),
75
- 'CddlLexer': ('pip._vendor.pygments.lexers.cddl', 'CDDL', ('cddl',), ('*.cddl',), ('text/x-cddl',)),
76
- 'CeylonLexer': ('pip._vendor.pygments.lexers.jvm', 'Ceylon', ('ceylon',), ('*.ceylon',), ('text/x-ceylon',)),
77
- 'Cfengine3Lexer': ('pip._vendor.pygments.lexers.configs', 'CFEngine3', ('cfengine3', 'cf3'), ('*.cf',), ()),
78
- 'ChaiscriptLexer': ('pip._vendor.pygments.lexers.scripting', 'ChaiScript', ('chaiscript', 'chai'), ('*.chai',), ('text/x-chaiscript', 'application/x-chaiscript')),
79
- 'ChapelLexer': ('pip._vendor.pygments.lexers.chapel', 'Chapel', ('chapel', 'chpl'), ('*.chpl',), ()),
80
- 'CharmciLexer': ('pip._vendor.pygments.lexers.c_like', 'Charmci', ('charmci',), ('*.ci',), ()),
81
- 'CheetahHtmlLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Cheetah', ('html+cheetah', 'html+spitfire', 'htmlcheetah'), (), ('text/html+cheetah', 'text/html+spitfire')),
82
- 'CheetahJavascriptLexer': ('pip._vendor.pygments.lexers.templates', 'JavaScript+Cheetah', ('javascript+cheetah', 'js+cheetah', 'javascript+spitfire', 'js+spitfire'), (), ('application/x-javascript+cheetah', 'text/x-javascript+cheetah', 'text/javascript+cheetah', 'application/x-javascript+spitfire', 'text/x-javascript+spitfire', 'text/javascript+spitfire')),
83
- 'CheetahLexer': ('pip._vendor.pygments.lexers.templates', 'Cheetah', ('cheetah', 'spitfire'), ('*.tmpl', '*.spt'), ('application/x-cheetah', 'application/x-spitfire')),
84
- 'CheetahXmlLexer': ('pip._vendor.pygments.lexers.templates', 'XML+Cheetah', ('xml+cheetah', 'xml+spitfire'), (), ('application/xml+cheetah', 'application/xml+spitfire')),
85
- 'CirruLexer': ('pip._vendor.pygments.lexers.webmisc', 'Cirru', ('cirru',), ('*.cirru',), ('text/x-cirru',)),
86
- 'ClayLexer': ('pip._vendor.pygments.lexers.c_like', 'Clay', ('clay',), ('*.clay',), ('text/x-clay',)),
87
- 'CleanLexer': ('pip._vendor.pygments.lexers.clean', 'Clean', ('clean',), ('*.icl', '*.dcl'), ()),
88
- 'ClojureLexer': ('pip._vendor.pygments.lexers.jvm', 'Clojure', ('clojure', 'clj'), ('*.clj', '*.cljc'), ('text/x-clojure', 'application/x-clojure')),
89
- 'ClojureScriptLexer': ('pip._vendor.pygments.lexers.jvm', 'ClojureScript', ('clojurescript', 'cljs'), ('*.cljs',), ('text/x-clojurescript', 'application/x-clojurescript')),
90
- 'CobolFreeformatLexer': ('pip._vendor.pygments.lexers.business', 'COBOLFree', ('cobolfree',), ('*.cbl', '*.CBL'), ()),
91
- 'CobolLexer': ('pip._vendor.pygments.lexers.business', 'COBOL', ('cobol',), ('*.cob', '*.COB', '*.cpy', '*.CPY'), ('text/x-cobol',)),
92
- 'CoffeeScriptLexer': ('pip._vendor.pygments.lexers.javascript', 'CoffeeScript', ('coffeescript', 'coffee-script', 'coffee'), ('*.coffee',), ('text/coffeescript',)),
93
- 'ColdfusionCFCLexer': ('pip._vendor.pygments.lexers.templates', 'Coldfusion CFC', ('cfc',), ('*.cfc',), ()),
94
- 'ColdfusionHtmlLexer': ('pip._vendor.pygments.lexers.templates', 'Coldfusion HTML', ('cfm',), ('*.cfm', '*.cfml'), ('application/x-coldfusion',)),
95
- 'ColdfusionLexer': ('pip._vendor.pygments.lexers.templates', 'cfstatement', ('cfs',), (), ()),
96
- 'Comal80Lexer': ('pip._vendor.pygments.lexers.comal', 'COMAL-80', ('comal', 'comal80'), ('*.cml', '*.comal'), ()),
97
- 'CommonLispLexer': ('pip._vendor.pygments.lexers.lisp', 'Common Lisp', ('common-lisp', 'cl', 'lisp'), ('*.cl', '*.lisp'), ('text/x-common-lisp',)),
98
- 'ComponentPascalLexer': ('pip._vendor.pygments.lexers.oberon', 'Component Pascal', ('componentpascal', 'cp'), ('*.cp', '*.cps'), ('text/x-component-pascal',)),
99
- 'CoqLexer': ('pip._vendor.pygments.lexers.theorem', 'Coq', ('coq',), ('*.v',), ('text/x-coq',)),
100
- 'CplintLexer': ('pip._vendor.pygments.lexers.cplint', 'cplint', ('cplint',), ('*.ecl', '*.prolog', '*.pro', '*.pl', '*.P', '*.lpad', '*.cpl'), ('text/x-cplint',)),
101
- 'CppLexer': ('pip._vendor.pygments.lexers.c_cpp', 'C++', ('cpp', 'c++'), ('*.cpp', '*.hpp', '*.c++', '*.h++', '*.cc', '*.hh', '*.cxx', '*.hxx', '*.C', '*.H', '*.cp', '*.CPP', '*.tpp'), ('text/x-c++hdr', 'text/x-c++src')),
102
- 'CppObjdumpLexer': ('pip._vendor.pygments.lexers.asm', 'cpp-objdump', ('cpp-objdump', 'c++-objdumb', 'cxx-objdump'), ('*.cpp-objdump', '*.c++-objdump', '*.cxx-objdump'), ('text/x-cpp-objdump',)),
103
- 'CrmshLexer': ('pip._vendor.pygments.lexers.dsls', 'Crmsh', ('crmsh', 'pcmk'), ('*.crmsh', '*.pcmk'), ()),
104
- 'CrocLexer': ('pip._vendor.pygments.lexers.d', 'Croc', ('croc',), ('*.croc',), ('text/x-crocsrc',)),
105
- 'CryptolLexer': ('pip._vendor.pygments.lexers.haskell', 'Cryptol', ('cryptol', 'cry'), ('*.cry',), ('text/x-cryptol',)),
106
- 'CrystalLexer': ('pip._vendor.pygments.lexers.crystal', 'Crystal', ('cr', 'crystal'), ('*.cr',), ('text/x-crystal',)),
107
- 'CsoundDocumentLexer': ('pip._vendor.pygments.lexers.csound', 'Csound Document', ('csound-document', 'csound-csd'), ('*.csd',), ()),
108
- 'CsoundOrchestraLexer': ('pip._vendor.pygments.lexers.csound', 'Csound Orchestra', ('csound', 'csound-orc'), ('*.orc', '*.udo'), ()),
109
- 'CsoundScoreLexer': ('pip._vendor.pygments.lexers.csound', 'Csound Score', ('csound-score', 'csound-sco'), ('*.sco',), ()),
110
- 'CssDjangoLexer': ('pip._vendor.pygments.lexers.templates', 'CSS+Django/Jinja', ('css+django', 'css+jinja'), ('*.css.j2', '*.css.jinja2'), ('text/css+django', 'text/css+jinja')),
111
- 'CssErbLexer': ('pip._vendor.pygments.lexers.templates', 'CSS+Ruby', ('css+ruby', 'css+erb'), (), ('text/css+ruby',)),
112
- 'CssGenshiLexer': ('pip._vendor.pygments.lexers.templates', 'CSS+Genshi Text', ('css+genshitext', 'css+genshi'), (), ('text/css+genshi',)),
113
- 'CssLexer': ('pip._vendor.pygments.lexers.css', 'CSS', ('css',), ('*.css',), ('text/css',)),
114
- 'CssPhpLexer': ('pip._vendor.pygments.lexers.templates', 'CSS+PHP', ('css+php',), (), ('text/css+php',)),
115
- 'CssSmartyLexer': ('pip._vendor.pygments.lexers.templates', 'CSS+Smarty', ('css+smarty',), (), ('text/css+smarty',)),
116
- 'CudaLexer': ('pip._vendor.pygments.lexers.c_like', 'CUDA', ('cuda', 'cu'), ('*.cu', '*.cuh'), ('text/x-cuda',)),
117
- 'CypherLexer': ('pip._vendor.pygments.lexers.graph', 'Cypher', ('cypher',), ('*.cyp', '*.cypher'), ()),
118
- 'CythonLexer': ('pip._vendor.pygments.lexers.python', 'Cython', ('cython', 'pyx', 'pyrex'), ('*.pyx', '*.pxd', '*.pxi'), ('text/x-cython', 'application/x-cython')),
119
- 'DLexer': ('pip._vendor.pygments.lexers.d', 'D', ('d',), ('*.d', '*.di'), ('text/x-dsrc',)),
120
- 'DObjdumpLexer': ('pip._vendor.pygments.lexers.asm', 'd-objdump', ('d-objdump',), ('*.d-objdump',), ('text/x-d-objdump',)),
121
- 'DarcsPatchLexer': ('pip._vendor.pygments.lexers.diff', 'Darcs Patch', ('dpatch',), ('*.dpatch', '*.darcspatch'), ()),
122
- 'DartLexer': ('pip._vendor.pygments.lexers.javascript', 'Dart', ('dart',), ('*.dart',), ('text/x-dart',)),
123
- 'Dasm16Lexer': ('pip._vendor.pygments.lexers.asm', 'DASM16', ('dasm16',), ('*.dasm16', '*.dasm'), ('text/x-dasm16',)),
124
- 'DebianControlLexer': ('pip._vendor.pygments.lexers.installers', 'Debian Control file', ('debcontrol', 'control'), ('control',), ()),
125
- 'DelphiLexer': ('pip._vendor.pygments.lexers.pascal', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas', '*.dpr'), ('text/x-pascal',)),
126
- 'DevicetreeLexer': ('pip._vendor.pygments.lexers.devicetree', 'Devicetree', ('devicetree', 'dts'), ('*.dts', '*.dtsi'), ('text/x-c',)),
127
- 'DgLexer': ('pip._vendor.pygments.lexers.python', 'dg', ('dg',), ('*.dg',), ('text/x-dg',)),
128
- 'DiffLexer': ('pip._vendor.pygments.lexers.diff', 'Diff', ('diff', 'udiff'), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch')),
129
- 'DjangoLexer': ('pip._vendor.pygments.lexers.templates', 'Django/Jinja', ('django', 'jinja'), (), ('application/x-django-templating', 'application/x-jinja')),
130
- 'DockerLexer': ('pip._vendor.pygments.lexers.configs', 'Docker', ('docker', 'dockerfile'), ('Dockerfile', '*.docker'), ('text/x-dockerfile-config',)),
131
- 'DtdLexer': ('pip._vendor.pygments.lexers.html', 'DTD', ('dtd',), ('*.dtd',), ('application/xml-dtd',)),
132
- 'DuelLexer': ('pip._vendor.pygments.lexers.webmisc', 'Duel', ('duel', 'jbst', 'jsonml+bst'), ('*.duel', '*.jbst'), ('text/x-duel', 'text/x-jbst')),
133
- 'DylanConsoleLexer': ('pip._vendor.pygments.lexers.dylan', 'Dylan session', ('dylan-console', 'dylan-repl'), ('*.dylan-console',), ('text/x-dylan-console',)),
134
- 'DylanLexer': ('pip._vendor.pygments.lexers.dylan', 'Dylan', ('dylan',), ('*.dylan', '*.dyl', '*.intr'), ('text/x-dylan',)),
135
- 'DylanLidLexer': ('pip._vendor.pygments.lexers.dylan', 'DylanLID', ('dylan-lid', 'lid'), ('*.lid', '*.hdp'), ('text/x-dylan-lid',)),
136
- 'ECLLexer': ('pip._vendor.pygments.lexers.ecl', 'ECL', ('ecl',), ('*.ecl',), ('application/x-ecl',)),
137
- 'ECLexer': ('pip._vendor.pygments.lexers.c_like', 'eC', ('ec',), ('*.ec', '*.eh'), ('text/x-echdr', 'text/x-ecsrc')),
138
- 'EarlGreyLexer': ('pip._vendor.pygments.lexers.javascript', 'Earl Grey', ('earl-grey', 'earlgrey', 'eg'), ('*.eg',), ('text/x-earl-grey',)),
139
- 'EasytrieveLexer': ('pip._vendor.pygments.lexers.scripting', 'Easytrieve', ('easytrieve',), ('*.ezt', '*.mac'), ('text/x-easytrieve',)),
140
- 'EbnfLexer': ('pip._vendor.pygments.lexers.parsers', 'EBNF', ('ebnf',), ('*.ebnf',), ('text/x-ebnf',)),
141
- 'EiffelLexer': ('pip._vendor.pygments.lexers.eiffel', 'Eiffel', ('eiffel',), ('*.e',), ('text/x-eiffel',)),
142
- 'ElixirConsoleLexer': ('pip._vendor.pygments.lexers.erlang', 'Elixir iex session', ('iex',), (), ('text/x-elixir-shellsession',)),
143
- 'ElixirLexer': ('pip._vendor.pygments.lexers.erlang', 'Elixir', ('elixir', 'ex', 'exs'), ('*.ex', '*.eex', '*.exs', '*.leex'), ('text/x-elixir',)),
144
- 'ElmLexer': ('pip._vendor.pygments.lexers.elm', 'Elm', ('elm',), ('*.elm',), ('text/x-elm',)),
145
- 'ElpiLexer': ('pip._vendor.pygments.lexers.elpi', 'Elpi', ('elpi',), ('*.elpi',), ('text/x-elpi',)),
146
- 'EmacsLispLexer': ('pip._vendor.pygments.lexers.lisp', 'EmacsLisp', ('emacs-lisp', 'elisp', 'emacs'), ('*.el',), ('text/x-elisp', 'application/x-elisp')),
147
- 'EmailLexer': ('pip._vendor.pygments.lexers.email', 'E-mail', ('email', 'eml'), ('*.eml',), ('message/rfc822',)),
148
- 'ErbLexer': ('pip._vendor.pygments.lexers.templates', 'ERB', ('erb',), (), ('application/x-ruby-templating',)),
149
- 'ErlangLexer': ('pip._vendor.pygments.lexers.erlang', 'Erlang', ('erlang',), ('*.erl', '*.hrl', '*.es', '*.escript'), ('text/x-erlang',)),
150
- 'ErlangShellLexer': ('pip._vendor.pygments.lexers.erlang', 'Erlang erl session', ('erl',), ('*.erl-sh',), ('text/x-erl-shellsession',)),
151
- 'EvoqueHtmlLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Evoque', ('html+evoque',), ('*.html',), ('text/html+evoque',)),
152
- 'EvoqueLexer': ('pip._vendor.pygments.lexers.templates', 'Evoque', ('evoque',), ('*.evoque',), ('application/x-evoque',)),
153
- 'EvoqueXmlLexer': ('pip._vendor.pygments.lexers.templates', 'XML+Evoque', ('xml+evoque',), ('*.xml',), ('application/xml+evoque',)),
154
- 'ExeclineLexer': ('pip._vendor.pygments.lexers.shell', 'execline', ('execline',), ('*.exec',), ()),
155
- 'EzhilLexer': ('pip._vendor.pygments.lexers.ezhil', 'Ezhil', ('ezhil',), ('*.n',), ('text/x-ezhil',)),
156
- 'FSharpLexer': ('pip._vendor.pygments.lexers.dotnet', 'F#', ('fsharp', 'f#'), ('*.fs', '*.fsi', '*.fsx'), ('text/x-fsharp',)),
157
- 'FStarLexer': ('pip._vendor.pygments.lexers.ml', 'FStar', ('fstar',), ('*.fst', '*.fsti'), ('text/x-fstar',)),
158
- 'FactorLexer': ('pip._vendor.pygments.lexers.factor', 'Factor', ('factor',), ('*.factor',), ('text/x-factor',)),
159
- 'FancyLexer': ('pip._vendor.pygments.lexers.ruby', 'Fancy', ('fancy', 'fy'), ('*.fy', '*.fancypack'), ('text/x-fancysrc',)),
160
- 'FantomLexer': ('pip._vendor.pygments.lexers.fantom', 'Fantom', ('fan',), ('*.fan',), ('application/x-fantom',)),
161
- 'FelixLexer': ('pip._vendor.pygments.lexers.felix', 'Felix', ('felix', 'flx'), ('*.flx', '*.flxh'), ('text/x-felix',)),
162
- 'FennelLexer': ('pip._vendor.pygments.lexers.lisp', 'Fennel', ('fennel', 'fnl'), ('*.fnl',), ()),
163
- 'FiftLexer': ('pip._vendor.pygments.lexers.fift', 'Fift', ('fift', 'fif'), ('*.fif',), ()),
164
- 'FishShellLexer': ('pip._vendor.pygments.lexers.shell', 'Fish', ('fish', 'fishshell'), ('*.fish', '*.load'), ('application/x-fish',)),
165
- 'FlatlineLexer': ('pip._vendor.pygments.lexers.dsls', 'Flatline', ('flatline',), (), ('text/x-flatline',)),
166
- 'FloScriptLexer': ('pip._vendor.pygments.lexers.floscript', 'FloScript', ('floscript', 'flo'), ('*.flo',), ()),
167
- 'ForthLexer': ('pip._vendor.pygments.lexers.forth', 'Forth', ('forth',), ('*.frt', '*.fs'), ('application/x-forth',)),
168
- 'FortranFixedLexer': ('pip._vendor.pygments.lexers.fortran', 'FortranFixed', ('fortranfixed',), ('*.f', '*.F'), ()),
169
- 'FortranLexer': ('pip._vendor.pygments.lexers.fortran', 'Fortran', ('fortran', 'f90'), ('*.f03', '*.f90', '*.F03', '*.F90'), ('text/x-fortran',)),
170
- 'FoxProLexer': ('pip._vendor.pygments.lexers.foxpro', 'FoxPro', ('foxpro', 'vfp', 'clipper', 'xbase'), ('*.PRG', '*.prg'), ()),
171
- 'FreeFemLexer': ('pip._vendor.pygments.lexers.freefem', 'Freefem', ('freefem',), ('*.edp',), ('text/x-freefem',)),
172
- 'FuncLexer': ('pip._vendor.pygments.lexers.func', 'FunC', ('func', 'fc'), ('*.fc', '*.func'), ()),
173
- 'FutharkLexer': ('pip._vendor.pygments.lexers.futhark', 'Futhark', ('futhark',), ('*.fut',), ('text/x-futhark',)),
174
- 'GAPConsoleLexer': ('pip._vendor.pygments.lexers.algebra', 'GAP session', ('gap-console', 'gap-repl'), ('*.tst',), ()),
175
- 'GAPLexer': ('pip._vendor.pygments.lexers.algebra', 'GAP', ('gap',), ('*.g', '*.gd', '*.gi', '*.gap'), ()),
176
- 'GDScriptLexer': ('pip._vendor.pygments.lexers.gdscript', 'GDScript', ('gdscript', 'gd'), ('*.gd',), ('text/x-gdscript', 'application/x-gdscript')),
177
- 'GLShaderLexer': ('pip._vendor.pygments.lexers.graphics', 'GLSL', ('glsl',), ('*.vert', '*.frag', '*.geo'), ('text/x-glslsrc',)),
178
- 'GSQLLexer': ('pip._vendor.pygments.lexers.gsql', 'GSQL', ('gsql',), ('*.gsql',), ()),
179
- 'GasLexer': ('pip._vendor.pygments.lexers.asm', 'GAS', ('gas', 'asm'), ('*.s', '*.S'), ('text/x-gas',)),
180
- 'GcodeLexer': ('pip._vendor.pygments.lexers.gcodelexer', 'g-code', ('gcode',), ('*.gcode',), ()),
181
- 'GenshiLexer': ('pip._vendor.pygments.lexers.templates', 'Genshi', ('genshi', 'kid', 'xml+genshi', 'xml+kid'), ('*.kid',), ('application/x-genshi', 'application/x-kid')),
182
- 'GenshiTextLexer': ('pip._vendor.pygments.lexers.templates', 'Genshi Text', ('genshitext',), (), ('application/x-genshi-text', 'text/x-genshi')),
183
- 'GettextLexer': ('pip._vendor.pygments.lexers.textfmts', 'Gettext Catalog', ('pot', 'po'), ('*.pot', '*.po'), ('application/x-gettext', 'text/x-gettext', 'text/gettext')),
184
- 'GherkinLexer': ('pip._vendor.pygments.lexers.testing', 'Gherkin', ('gherkin', 'cucumber'), ('*.feature',), ('text/x-gherkin',)),
185
- 'GnuplotLexer': ('pip._vendor.pygments.lexers.graphics', 'Gnuplot', ('gnuplot',), ('*.plot', '*.plt'), ('text/x-gnuplot',)),
186
- 'GoLexer': ('pip._vendor.pygments.lexers.go', 'Go', ('go', 'golang'), ('*.go',), ('text/x-gosrc',)),
187
- 'GoloLexer': ('pip._vendor.pygments.lexers.jvm', 'Golo', ('golo',), ('*.golo',), ()),
188
- 'GoodDataCLLexer': ('pip._vendor.pygments.lexers.business', 'GoodData-CL', ('gooddata-cl',), ('*.gdc',), ('text/x-gooddata-cl',)),
189
- 'GosuLexer': ('pip._vendor.pygments.lexers.jvm', 'Gosu', ('gosu',), ('*.gs', '*.gsx', '*.gsp', '*.vark'), ('text/x-gosu',)),
190
- 'GosuTemplateLexer': ('pip._vendor.pygments.lexers.jvm', 'Gosu Template', ('gst',), ('*.gst',), ('text/x-gosu-template',)),
191
- 'GraphvizLexer': ('pip._vendor.pygments.lexers.graphviz', 'Graphviz', ('graphviz', 'dot'), ('*.gv', '*.dot'), ('text/x-graphviz', 'text/vnd.graphviz')),
192
- 'GroffLexer': ('pip._vendor.pygments.lexers.markup', 'Groff', ('groff', 'nroff', 'man'), ('*.[1-9]', '*.man', '*.1p', '*.3pm'), ('application/x-troff', 'text/troff')),
193
- 'GroovyLexer': ('pip._vendor.pygments.lexers.jvm', 'Groovy', ('groovy',), ('*.groovy', '*.gradle'), ('text/x-groovy',)),
194
- 'HLSLShaderLexer': ('pip._vendor.pygments.lexers.graphics', 'HLSL', ('hlsl',), ('*.hlsl', '*.hlsli'), ('text/x-hlsl',)),
195
- 'HTMLUL4Lexer': ('pip._vendor.pygments.lexers.ul4', 'HTML+UL4', ('html+ul4',), ('*.htmlul4',), ()),
196
- 'HamlLexer': ('pip._vendor.pygments.lexers.html', 'Haml', ('haml',), ('*.haml',), ('text/x-haml',)),
197
- 'HandlebarsHtmlLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Handlebars', ('html+handlebars',), ('*.handlebars', '*.hbs'), ('text/html+handlebars', 'text/x-handlebars-template')),
198
- 'HandlebarsLexer': ('pip._vendor.pygments.lexers.templates', 'Handlebars', ('handlebars',), (), ()),
199
- 'HaskellLexer': ('pip._vendor.pygments.lexers.haskell', 'Haskell', ('haskell', 'hs'), ('*.hs',), ('text/x-haskell',)),
200
- 'HaxeLexer': ('pip._vendor.pygments.lexers.haxe', 'Haxe', ('haxe', 'hxsl', 'hx'), ('*.hx', '*.hxsl'), ('text/haxe', 'text/x-haxe', 'text/x-hx')),
201
- 'HexdumpLexer': ('pip._vendor.pygments.lexers.hexdump', 'Hexdump', ('hexdump',), (), ()),
202
- 'HsailLexer': ('pip._vendor.pygments.lexers.asm', 'HSAIL', ('hsail', 'hsa'), ('*.hsail',), ('text/x-hsail',)),
203
- 'HspecLexer': ('pip._vendor.pygments.lexers.haskell', 'Hspec', ('hspec',), ('*Spec.hs',), ()),
204
- 'HtmlDjangoLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Django/Jinja', ('html+django', 'html+jinja', 'htmldjango'), ('*.html.j2', '*.htm.j2', '*.xhtml.j2', '*.html.jinja2', '*.htm.jinja2', '*.xhtml.jinja2'), ('text/html+django', 'text/html+jinja')),
205
- 'HtmlGenshiLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Genshi', ('html+genshi', 'html+kid'), (), ('text/html+genshi',)),
206
- 'HtmlLexer': ('pip._vendor.pygments.lexers.html', 'HTML', ('html',), ('*.html', '*.htm', '*.xhtml', '*.xslt'), ('text/html', 'application/xhtml+xml')),
207
- 'HtmlPhpLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+PHP', ('html+php',), ('*.phtml',), ('application/x-php', 'application/x-httpd-php', 'application/x-httpd-php3', 'application/x-httpd-php4', 'application/x-httpd-php5')),
208
- 'HtmlSmartyLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Smarty', ('html+smarty',), (), ('text/html+smarty',)),
209
- 'HttpLexer': ('pip._vendor.pygments.lexers.textfmts', 'HTTP', ('http',), (), ()),
210
- 'HxmlLexer': ('pip._vendor.pygments.lexers.haxe', 'Hxml', ('haxeml', 'hxml'), ('*.hxml',), ()),
211
- 'HyLexer': ('pip._vendor.pygments.lexers.lisp', 'Hy', ('hylang',), ('*.hy',), ('text/x-hy', 'application/x-hy')),
212
- 'HybrisLexer': ('pip._vendor.pygments.lexers.scripting', 'Hybris', ('hybris', 'hy'), ('*.hy', '*.hyb'), ('text/x-hybris', 'application/x-hybris')),
213
- 'IDLLexer': ('pip._vendor.pygments.lexers.idl', 'IDL', ('idl',), ('*.pro',), ('text/idl',)),
214
- 'IconLexer': ('pip._vendor.pygments.lexers.unicon', 'Icon', ('icon',), ('*.icon', '*.ICON'), ()),
215
- 'IdrisLexer': ('pip._vendor.pygments.lexers.haskell', 'Idris', ('idris', 'idr'), ('*.idr',), ('text/x-idris',)),
216
- 'IgorLexer': ('pip._vendor.pygments.lexers.igor', 'Igor', ('igor', 'igorpro'), ('*.ipf',), ('text/ipf',)),
217
- 'Inform6Lexer': ('pip._vendor.pygments.lexers.int_fiction', 'Inform 6', ('inform6', 'i6'), ('*.inf',), ()),
218
- 'Inform6TemplateLexer': ('pip._vendor.pygments.lexers.int_fiction', 'Inform 6 template', ('i6t',), ('*.i6t',), ()),
219
- 'Inform7Lexer': ('pip._vendor.pygments.lexers.int_fiction', 'Inform 7', ('inform7', 'i7'), ('*.ni', '*.i7x'), ()),
220
- 'IniLexer': ('pip._vendor.pygments.lexers.configs', 'INI', ('ini', 'cfg', 'dosini'), ('*.ini', '*.cfg', '*.inf', '.editorconfig', '*.service', '*.socket', '*.device', '*.mount', '*.automount', '*.swap', '*.target', '*.path', '*.timer', '*.slice', '*.scope'), ('text/x-ini', 'text/inf')),
221
- 'IoLexer': ('pip._vendor.pygments.lexers.iolang', 'Io', ('io',), ('*.io',), ('text/x-iosrc',)),
222
- 'IokeLexer': ('pip._vendor.pygments.lexers.jvm', 'Ioke', ('ioke', 'ik'), ('*.ik',), ('text/x-iokesrc',)),
223
- 'IrcLogsLexer': ('pip._vendor.pygments.lexers.textfmts', 'IRC logs', ('irc',), ('*.weechatlog',), ('text/x-irclog',)),
224
- 'IsabelleLexer': ('pip._vendor.pygments.lexers.theorem', 'Isabelle', ('isabelle',), ('*.thy',), ('text/x-isabelle',)),
225
- 'JLexer': ('pip._vendor.pygments.lexers.j', 'J', ('j',), ('*.ijs',), ('text/x-j',)),
226
- 'JMESPathLexer': ('pip._vendor.pygments.lexers.jmespath', 'JMESPath', ('jmespath', 'jp'), ('*.jp',), ()),
227
- 'JSLTLexer': ('pip._vendor.pygments.lexers.jslt', 'JSLT', ('jslt',), ('*.jslt',), ('text/x-jslt',)),
228
- 'JagsLexer': ('pip._vendor.pygments.lexers.modeling', 'JAGS', ('jags',), ('*.jag', '*.bug'), ()),
229
- 'JasminLexer': ('pip._vendor.pygments.lexers.jvm', 'Jasmin', ('jasmin', 'jasminxt'), ('*.j',), ()),
230
- 'JavaLexer': ('pip._vendor.pygments.lexers.jvm', 'Java', ('java',), ('*.java',), ('text/x-java',)),
231
- 'JavascriptDjangoLexer': ('pip._vendor.pygments.lexers.templates', 'JavaScript+Django/Jinja', ('javascript+django', 'js+django', 'javascript+jinja', 'js+jinja'), ('*.js.j2', '*.js.jinja2'), ('application/x-javascript+django', 'application/x-javascript+jinja', 'text/x-javascript+django', 'text/x-javascript+jinja', 'text/javascript+django', 'text/javascript+jinja')),
232
- 'JavascriptErbLexer': ('pip._vendor.pygments.lexers.templates', 'JavaScript+Ruby', ('javascript+ruby', 'js+ruby', 'javascript+erb', 'js+erb'), (), ('application/x-javascript+ruby', 'text/x-javascript+ruby', 'text/javascript+ruby')),
233
- 'JavascriptGenshiLexer': ('pip._vendor.pygments.lexers.templates', 'JavaScript+Genshi Text', ('js+genshitext', 'js+genshi', 'javascript+genshitext', 'javascript+genshi'), (), ('application/x-javascript+genshi', 'text/x-javascript+genshi', 'text/javascript+genshi')),
234
- 'JavascriptLexer': ('pip._vendor.pygments.lexers.javascript', 'JavaScript', ('javascript', 'js'), ('*.js', '*.jsm', '*.mjs', '*.cjs'), ('application/javascript', 'application/x-javascript', 'text/x-javascript', 'text/javascript')),
235
- 'JavascriptPhpLexer': ('pip._vendor.pygments.lexers.templates', 'JavaScript+PHP', ('javascript+php', 'js+php'), (), ('application/x-javascript+php', 'text/x-javascript+php', 'text/javascript+php')),
236
- 'JavascriptSmartyLexer': ('pip._vendor.pygments.lexers.templates', 'JavaScript+Smarty', ('javascript+smarty', 'js+smarty'), (), ('application/x-javascript+smarty', 'text/x-javascript+smarty', 'text/javascript+smarty')),
237
- 'JavascriptUL4Lexer': ('pip._vendor.pygments.lexers.ul4', 'Javascript+UL4', ('js+ul4',), ('*.jsul4',), ()),
238
- 'JclLexer': ('pip._vendor.pygments.lexers.scripting', 'JCL', ('jcl',), ('*.jcl',), ('text/x-jcl',)),
239
- 'JsgfLexer': ('pip._vendor.pygments.lexers.grammar_notation', 'JSGF', ('jsgf',), ('*.jsgf',), ('application/jsgf', 'application/x-jsgf', 'text/jsgf')),
240
- 'JsonBareObjectLexer': ('pip._vendor.pygments.lexers.data', 'JSONBareObject', (), (), ()),
241
- 'JsonLdLexer': ('pip._vendor.pygments.lexers.data', 'JSON-LD', ('jsonld', 'json-ld'), ('*.jsonld',), ('application/ld+json',)),
242
- 'JsonLexer': ('pip._vendor.pygments.lexers.data', 'JSON', ('json', 'json-object'), ('*.json', 'Pipfile.lock'), ('application/json', 'application/json-object')),
243
- 'JsonnetLexer': ('pip._vendor.pygments.lexers.jsonnet', 'Jsonnet', ('jsonnet',), ('*.jsonnet', '*.libsonnet'), ()),
244
- 'JspLexer': ('pip._vendor.pygments.lexers.templates', 'Java Server Page', ('jsp',), ('*.jsp',), ('application/x-jsp',)),
245
- 'JuliaConsoleLexer': ('pip._vendor.pygments.lexers.julia', 'Julia console', ('jlcon', 'julia-repl'), (), ()),
246
- 'JuliaLexer': ('pip._vendor.pygments.lexers.julia', 'Julia', ('julia', 'jl'), ('*.jl',), ('text/x-julia', 'application/x-julia')),
247
- 'JuttleLexer': ('pip._vendor.pygments.lexers.javascript', 'Juttle', ('juttle',), ('*.juttle',), ('application/juttle', 'application/x-juttle', 'text/x-juttle', 'text/juttle')),
248
- 'KLexer': ('pip._vendor.pygments.lexers.q', 'K', ('k',), ('*.k',), ()),
249
- 'KalLexer': ('pip._vendor.pygments.lexers.javascript', 'Kal', ('kal',), ('*.kal',), ('text/kal', 'application/kal')),
250
- 'KconfigLexer': ('pip._vendor.pygments.lexers.configs', 'Kconfig', ('kconfig', 'menuconfig', 'linux-config', 'kernel-config'), ('Kconfig*', '*Config.in*', 'external.in*', 'standard-modules.in'), ('text/x-kconfig',)),
251
- 'KernelLogLexer': ('pip._vendor.pygments.lexers.textfmts', 'Kernel log', ('kmsg', 'dmesg'), ('*.kmsg', '*.dmesg'), ()),
252
- 'KokaLexer': ('pip._vendor.pygments.lexers.haskell', 'Koka', ('koka',), ('*.kk', '*.kki'), ('text/x-koka',)),
253
- 'KotlinLexer': ('pip._vendor.pygments.lexers.jvm', 'Kotlin', ('kotlin',), ('*.kt', '*.kts'), ('text/x-kotlin',)),
254
- 'KuinLexer': ('pip._vendor.pygments.lexers.kuin', 'Kuin', ('kuin',), ('*.kn',), ()),
255
- 'LSLLexer': ('pip._vendor.pygments.lexers.scripting', 'LSL', ('lsl',), ('*.lsl',), ('text/x-lsl',)),
256
- 'LassoCssLexer': ('pip._vendor.pygments.lexers.templates', 'CSS+Lasso', ('css+lasso',), (), ('text/css+lasso',)),
257
- 'LassoHtmlLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Lasso', ('html+lasso',), (), ('text/html+lasso', 'application/x-httpd-lasso', 'application/x-httpd-lasso[89]')),
258
- 'LassoJavascriptLexer': ('pip._vendor.pygments.lexers.templates', 'JavaScript+Lasso', ('javascript+lasso', 'js+lasso'), (), ('application/x-javascript+lasso', 'text/x-javascript+lasso', 'text/javascript+lasso')),
259
- 'LassoLexer': ('pip._vendor.pygments.lexers.javascript', 'Lasso', ('lasso', 'lassoscript'), ('*.lasso', '*.lasso[89]'), ('text/x-lasso',)),
260
- 'LassoXmlLexer': ('pip._vendor.pygments.lexers.templates', 'XML+Lasso', ('xml+lasso',), (), ('application/xml+lasso',)),
261
- 'LeanLexer': ('pip._vendor.pygments.lexers.theorem', 'Lean', ('lean',), ('*.lean',), ('text/x-lean',)),
262
- 'LessCssLexer': ('pip._vendor.pygments.lexers.css', 'LessCss', ('less',), ('*.less',), ('text/x-less-css',)),
263
- 'LighttpdConfLexer': ('pip._vendor.pygments.lexers.configs', 'Lighttpd configuration file', ('lighttpd', 'lighty'), ('lighttpd.conf',), ('text/x-lighttpd-conf',)),
264
- 'LilyPondLexer': ('pip._vendor.pygments.lexers.lilypond', 'LilyPond', ('lilypond',), ('*.ly',), ()),
265
- 'LimboLexer': ('pip._vendor.pygments.lexers.inferno', 'Limbo', ('limbo',), ('*.b',), ('text/limbo',)),
266
- 'LiquidLexer': ('pip._vendor.pygments.lexers.templates', 'liquid', ('liquid',), ('*.liquid',), ()),
267
- 'LiterateAgdaLexer': ('pip._vendor.pygments.lexers.haskell', 'Literate Agda', ('literate-agda', 'lagda'), ('*.lagda',), ('text/x-literate-agda',)),
268
- 'LiterateCryptolLexer': ('pip._vendor.pygments.lexers.haskell', 'Literate Cryptol', ('literate-cryptol', 'lcryptol', 'lcry'), ('*.lcry',), ('text/x-literate-cryptol',)),
269
- 'LiterateHaskellLexer': ('pip._vendor.pygments.lexers.haskell', 'Literate Haskell', ('literate-haskell', 'lhaskell', 'lhs'), ('*.lhs',), ('text/x-literate-haskell',)),
270
- 'LiterateIdrisLexer': ('pip._vendor.pygments.lexers.haskell', 'Literate Idris', ('literate-idris', 'lidris', 'lidr'), ('*.lidr',), ('text/x-literate-idris',)),
271
- 'LiveScriptLexer': ('pip._vendor.pygments.lexers.javascript', 'LiveScript', ('livescript', 'live-script'), ('*.ls',), ('text/livescript',)),
272
- 'LlvmLexer': ('pip._vendor.pygments.lexers.asm', 'LLVM', ('llvm',), ('*.ll',), ('text/x-llvm',)),
273
- 'LlvmMirBodyLexer': ('pip._vendor.pygments.lexers.asm', 'LLVM-MIR Body', ('llvm-mir-body',), (), ()),
274
- 'LlvmMirLexer': ('pip._vendor.pygments.lexers.asm', 'LLVM-MIR', ('llvm-mir',), ('*.mir',), ()),
275
- 'LogosLexer': ('pip._vendor.pygments.lexers.objective', 'Logos', ('logos',), ('*.x', '*.xi', '*.xm', '*.xmi'), ('text/x-logos',)),
276
- 'LogtalkLexer': ('pip._vendor.pygments.lexers.prolog', 'Logtalk', ('logtalk',), ('*.lgt', '*.logtalk'), ('text/x-logtalk',)),
277
- 'LuaLexer': ('pip._vendor.pygments.lexers.scripting', 'Lua', ('lua',), ('*.lua', '*.wlua'), ('text/x-lua', 'application/x-lua')),
278
- 'MCFunctionLexer': ('pip._vendor.pygments.lexers.minecraft', 'MCFunction', ('mcfunction', 'mcf'), ('*.mcfunction',), ('text/mcfunction',)),
279
- 'MCSchemaLexer': ('pip._vendor.pygments.lexers.minecraft', 'MCSchema', ('mcschema',), ('*.mcschema',), ('text/mcschema',)),
280
- 'MIMELexer': ('pip._vendor.pygments.lexers.mime', 'MIME', ('mime',), (), ('multipart/mixed', 'multipart/related', 'multipart/alternative')),
281
- 'MIPSLexer': ('pip._vendor.pygments.lexers.mips', 'MIPS', ('mips',), ('*.mips', '*.MIPS'), ()),
282
- 'MOOCodeLexer': ('pip._vendor.pygments.lexers.scripting', 'MOOCode', ('moocode', 'moo'), ('*.moo',), ('text/x-moocode',)),
283
- 'MSDOSSessionLexer': ('pip._vendor.pygments.lexers.shell', 'MSDOS Session', ('doscon',), (), ()),
284
- 'Macaulay2Lexer': ('pip._vendor.pygments.lexers.macaulay2', 'Macaulay2', ('macaulay2',), ('*.m2',), ()),
285
- 'MakefileLexer': ('pip._vendor.pygments.lexers.make', 'Makefile', ('make', 'makefile', 'mf', 'bsdmake'), ('*.mak', '*.mk', 'Makefile', 'makefile', 'Makefile.*', 'GNUmakefile'), ('text/x-makefile',)),
286
- 'MakoCssLexer': ('pip._vendor.pygments.lexers.templates', 'CSS+Mako', ('css+mako',), (), ('text/css+mako',)),
287
- 'MakoHtmlLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Mako', ('html+mako',), (), ('text/html+mako',)),
288
- 'MakoJavascriptLexer': ('pip._vendor.pygments.lexers.templates', 'JavaScript+Mako', ('javascript+mako', 'js+mako'), (), ('application/x-javascript+mako', 'text/x-javascript+mako', 'text/javascript+mako')),
289
- 'MakoLexer': ('pip._vendor.pygments.lexers.templates', 'Mako', ('mako',), ('*.mao',), ('application/x-mako',)),
290
- 'MakoXmlLexer': ('pip._vendor.pygments.lexers.templates', 'XML+Mako', ('xml+mako',), (), ('application/xml+mako',)),
291
- 'MaqlLexer': ('pip._vendor.pygments.lexers.business', 'MAQL', ('maql',), ('*.maql',), ('text/x-gooddata-maql', 'application/x-gooddata-maql')),
292
- 'MarkdownLexer': ('pip._vendor.pygments.lexers.markup', 'Markdown', ('markdown', 'md'), ('*.md', '*.markdown'), ('text/x-markdown',)),
293
- 'MaskLexer': ('pip._vendor.pygments.lexers.javascript', 'Mask', ('mask',), ('*.mask',), ('text/x-mask',)),
294
- 'MasonLexer': ('pip._vendor.pygments.lexers.templates', 'Mason', ('mason',), ('*.m', '*.mhtml', '*.mc', '*.mi', 'autohandler', 'dhandler'), ('application/x-mason',)),
295
- 'MathematicaLexer': ('pip._vendor.pygments.lexers.algebra', 'Mathematica', ('mathematica', 'mma', 'nb'), ('*.nb', '*.cdf', '*.nbp', '*.ma'), ('application/mathematica', 'application/vnd.wolfram.mathematica', 'application/vnd.wolfram.mathematica.package', 'application/vnd.wolfram.cdf')),
296
- 'MatlabLexer': ('pip._vendor.pygments.lexers.matlab', 'Matlab', ('matlab',), ('*.m',), ('text/matlab',)),
297
- 'MatlabSessionLexer': ('pip._vendor.pygments.lexers.matlab', 'Matlab session', ('matlabsession',), (), ()),
298
- 'MaximaLexer': ('pip._vendor.pygments.lexers.maxima', 'Maxima', ('maxima', 'macsyma'), ('*.mac', '*.max'), ()),
299
- 'MesonLexer': ('pip._vendor.pygments.lexers.meson', 'Meson', ('meson', 'meson.build'), ('meson.build', 'meson_options.txt'), ('text/x-meson',)),
300
- 'MiniDLexer': ('pip._vendor.pygments.lexers.d', 'MiniD', ('minid',), (), ('text/x-minidsrc',)),
301
- 'MiniScriptLexer': ('pip._vendor.pygments.lexers.scripting', 'MiniScript', ('miniscript', 'ms'), ('*.ms',), ('text/x-minicript', 'application/x-miniscript')),
302
- 'ModelicaLexer': ('pip._vendor.pygments.lexers.modeling', 'Modelica', ('modelica',), ('*.mo',), ('text/x-modelica',)),
303
- 'Modula2Lexer': ('pip._vendor.pygments.lexers.modula2', 'Modula-2', ('modula2', 'm2'), ('*.def', '*.mod'), ('text/x-modula2',)),
304
- 'MoinWikiLexer': ('pip._vendor.pygments.lexers.markup', 'MoinMoin/Trac Wiki markup', ('trac-wiki', 'moin'), (), ('text/x-trac-wiki',)),
305
- 'MonkeyLexer': ('pip._vendor.pygments.lexers.basic', 'Monkey', ('monkey',), ('*.monkey',), ('text/x-monkey',)),
306
- 'MonteLexer': ('pip._vendor.pygments.lexers.monte', 'Monte', ('monte',), ('*.mt',), ()),
307
- 'MoonScriptLexer': ('pip._vendor.pygments.lexers.scripting', 'MoonScript', ('moonscript', 'moon'), ('*.moon',), ('text/x-moonscript', 'application/x-moonscript')),
308
- 'MoselLexer': ('pip._vendor.pygments.lexers.mosel', 'Mosel', ('mosel',), ('*.mos',), ()),
309
- 'MozPreprocCssLexer': ('pip._vendor.pygments.lexers.markup', 'CSS+mozpreproc', ('css+mozpreproc',), ('*.css.in',), ()),
310
- 'MozPreprocHashLexer': ('pip._vendor.pygments.lexers.markup', 'mozhashpreproc', ('mozhashpreproc',), (), ()),
311
- 'MozPreprocJavascriptLexer': ('pip._vendor.pygments.lexers.markup', 'Javascript+mozpreproc', ('javascript+mozpreproc',), ('*.js.in',), ()),
312
- 'MozPreprocPercentLexer': ('pip._vendor.pygments.lexers.markup', 'mozpercentpreproc', ('mozpercentpreproc',), (), ()),
313
- 'MozPreprocXulLexer': ('pip._vendor.pygments.lexers.markup', 'XUL+mozpreproc', ('xul+mozpreproc',), ('*.xul.in',), ()),
314
- 'MqlLexer': ('pip._vendor.pygments.lexers.c_like', 'MQL', ('mql', 'mq4', 'mq5', 'mql4', 'mql5'), ('*.mq4', '*.mq5', '*.mqh'), ('text/x-mql',)),
315
- 'MscgenLexer': ('pip._vendor.pygments.lexers.dsls', 'Mscgen', ('mscgen', 'msc'), ('*.msc',), ()),
316
- 'MuPADLexer': ('pip._vendor.pygments.lexers.algebra', 'MuPAD', ('mupad',), ('*.mu',), ()),
317
- 'MxmlLexer': ('pip._vendor.pygments.lexers.actionscript', 'MXML', ('mxml',), ('*.mxml',), ()),
318
- 'MySqlLexer': ('pip._vendor.pygments.lexers.sql', 'MySQL', ('mysql',), (), ('text/x-mysql',)),
319
- 'MyghtyCssLexer': ('pip._vendor.pygments.lexers.templates', 'CSS+Myghty', ('css+myghty',), (), ('text/css+myghty',)),
320
- 'MyghtyHtmlLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Myghty', ('html+myghty',), (), ('text/html+myghty',)),
321
- 'MyghtyJavascriptLexer': ('pip._vendor.pygments.lexers.templates', 'JavaScript+Myghty', ('javascript+myghty', 'js+myghty'), (), ('application/x-javascript+myghty', 'text/x-javascript+myghty', 'text/javascript+mygthy')),
322
- 'MyghtyLexer': ('pip._vendor.pygments.lexers.templates', 'Myghty', ('myghty',), ('*.myt', 'autodelegate'), ('application/x-myghty',)),
323
- 'MyghtyXmlLexer': ('pip._vendor.pygments.lexers.templates', 'XML+Myghty', ('xml+myghty',), (), ('application/xml+myghty',)),
324
- 'NCLLexer': ('pip._vendor.pygments.lexers.ncl', 'NCL', ('ncl',), ('*.ncl',), ('text/ncl',)),
325
- 'NSISLexer': ('pip._vendor.pygments.lexers.installers', 'NSIS', ('nsis', 'nsi', 'nsh'), ('*.nsi', '*.nsh'), ('text/x-nsis',)),
326
- 'NasmLexer': ('pip._vendor.pygments.lexers.asm', 'NASM', ('nasm',), ('*.asm', '*.ASM', '*.nasm'), ('text/x-nasm',)),
327
- 'NasmObjdumpLexer': ('pip._vendor.pygments.lexers.asm', 'objdump-nasm', ('objdump-nasm',), ('*.objdump-intel',), ('text/x-nasm-objdump',)),
328
- 'NemerleLexer': ('pip._vendor.pygments.lexers.dotnet', 'Nemerle', ('nemerle',), ('*.n',), ('text/x-nemerle',)),
329
- 'NesCLexer': ('pip._vendor.pygments.lexers.c_like', 'nesC', ('nesc',), ('*.nc',), ('text/x-nescsrc',)),
330
- 'NestedTextLexer': ('pip._vendor.pygments.lexers.configs', 'NestedText', ('nestedtext', 'nt'), ('*.nt',), ()),
331
- 'NewLispLexer': ('pip._vendor.pygments.lexers.lisp', 'NewLisp', ('newlisp',), ('*.lsp', '*.nl', '*.kif'), ('text/x-newlisp', 'application/x-newlisp')),
332
- 'NewspeakLexer': ('pip._vendor.pygments.lexers.smalltalk', 'Newspeak', ('newspeak',), ('*.ns2',), ('text/x-newspeak',)),
333
- 'NginxConfLexer': ('pip._vendor.pygments.lexers.configs', 'Nginx configuration file', ('nginx',), ('nginx.conf',), ('text/x-nginx-conf',)),
334
- 'NimrodLexer': ('pip._vendor.pygments.lexers.nimrod', 'Nimrod', ('nimrod', 'nim'), ('*.nim', '*.nimrod'), ('text/x-nim',)),
335
- 'NitLexer': ('pip._vendor.pygments.lexers.nit', 'Nit', ('nit',), ('*.nit',), ()),
336
- 'NixLexer': ('pip._vendor.pygments.lexers.nix', 'Nix', ('nixos', 'nix'), ('*.nix',), ('text/x-nix',)),
337
- 'NodeConsoleLexer': ('pip._vendor.pygments.lexers.javascript', 'Node.js REPL console session', ('nodejsrepl',), (), ('text/x-nodejsrepl',)),
338
- 'NotmuchLexer': ('pip._vendor.pygments.lexers.textfmts', 'Notmuch', ('notmuch',), (), ()),
339
- 'NuSMVLexer': ('pip._vendor.pygments.lexers.smv', 'NuSMV', ('nusmv',), ('*.smv',), ()),
340
- 'NumPyLexer': ('pip._vendor.pygments.lexers.python', 'NumPy', ('numpy',), (), ()),
341
- 'ObjdumpLexer': ('pip._vendor.pygments.lexers.asm', 'objdump', ('objdump',), ('*.objdump',), ('text/x-objdump',)),
342
- 'ObjectiveCLexer': ('pip._vendor.pygments.lexers.objective', 'Objective-C', ('objective-c', 'objectivec', 'obj-c', 'objc'), ('*.m', '*.h'), ('text/x-objective-c',)),
343
- 'ObjectiveCppLexer': ('pip._vendor.pygments.lexers.objective', 'Objective-C++', ('objective-c++', 'objectivec++', 'obj-c++', 'objc++'), ('*.mm', '*.hh'), ('text/x-objective-c++',)),
344
- 'ObjectiveJLexer': ('pip._vendor.pygments.lexers.javascript', 'Objective-J', ('objective-j', 'objectivej', 'obj-j', 'objj'), ('*.j',), ('text/x-objective-j',)),
345
- 'OcamlLexer': ('pip._vendor.pygments.lexers.ml', 'OCaml', ('ocaml',), ('*.ml', '*.mli', '*.mll', '*.mly'), ('text/x-ocaml',)),
346
- 'OctaveLexer': ('pip._vendor.pygments.lexers.matlab', 'Octave', ('octave',), ('*.m',), ('text/octave',)),
347
- 'OdinLexer': ('pip._vendor.pygments.lexers.archetype', 'ODIN', ('odin',), ('*.odin',), ('text/odin',)),
348
- 'OmgIdlLexer': ('pip._vendor.pygments.lexers.c_like', 'OMG Interface Definition Language', ('omg-idl',), ('*.idl', '*.pidl'), ()),
349
- 'OocLexer': ('pip._vendor.pygments.lexers.ooc', 'Ooc', ('ooc',), ('*.ooc',), ('text/x-ooc',)),
350
- 'OpaLexer': ('pip._vendor.pygments.lexers.ml', 'Opa', ('opa',), ('*.opa',), ('text/x-opa',)),
351
- 'OpenEdgeLexer': ('pip._vendor.pygments.lexers.business', 'OpenEdge ABL', ('openedge', 'abl', 'progress'), ('*.p', '*.cls'), ('text/x-openedge', 'application/x-openedge')),
352
- 'OutputLexer': ('pip._vendor.pygments.lexers.special', 'Text output', ('output',), (), ()),
353
- 'PacmanConfLexer': ('pip._vendor.pygments.lexers.configs', 'PacmanConf', ('pacmanconf',), ('pacman.conf',), ()),
354
- 'PanLexer': ('pip._vendor.pygments.lexers.dsls', 'Pan', ('pan',), ('*.pan',), ()),
355
- 'ParaSailLexer': ('pip._vendor.pygments.lexers.parasail', 'ParaSail', ('parasail',), ('*.psi', '*.psl'), ('text/x-parasail',)),
356
- 'PawnLexer': ('pip._vendor.pygments.lexers.pawn', 'Pawn', ('pawn',), ('*.p', '*.pwn', '*.inc'), ('text/x-pawn',)),
357
- 'PegLexer': ('pip._vendor.pygments.lexers.grammar_notation', 'PEG', ('peg',), ('*.peg',), ('text/x-peg',)),
358
- 'Perl6Lexer': ('pip._vendor.pygments.lexers.perl', 'Perl6', ('perl6', 'pl6', 'raku'), ('*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6', '*.6pm', '*.p6m', '*.pm6', '*.t', '*.raku', '*.rakumod', '*.rakutest', '*.rakudoc'), ('text/x-perl6', 'application/x-perl6')),
359
- 'PerlLexer': ('pip._vendor.pygments.lexers.perl', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm', '*.t', '*.perl'), ('text/x-perl', 'application/x-perl')),
360
- 'PhixLexer': ('pip._vendor.pygments.lexers.phix', 'Phix', ('phix',), ('*.exw',), ('text/x-phix',)),
361
- 'PhpLexer': ('pip._vendor.pygments.lexers.php', 'PHP', ('php', 'php3', 'php4', 'php5'), ('*.php', '*.php[345]', '*.inc'), ('text/x-php',)),
362
- 'PigLexer': ('pip._vendor.pygments.lexers.jvm', 'Pig', ('pig',), ('*.pig',), ('text/x-pig',)),
363
- 'PikeLexer': ('pip._vendor.pygments.lexers.c_like', 'Pike', ('pike',), ('*.pike', '*.pmod'), ('text/x-pike',)),
364
- 'PkgConfigLexer': ('pip._vendor.pygments.lexers.configs', 'PkgConfig', ('pkgconfig',), ('*.pc',), ()),
365
- 'PlPgsqlLexer': ('pip._vendor.pygments.lexers.sql', 'PL/pgSQL', ('plpgsql',), (), ('text/x-plpgsql',)),
366
- 'PointlessLexer': ('pip._vendor.pygments.lexers.pointless', 'Pointless', ('pointless',), ('*.ptls',), ()),
367
- 'PonyLexer': ('pip._vendor.pygments.lexers.pony', 'Pony', ('pony',), ('*.pony',), ()),
368
- 'PortugolLexer': ('pip._vendor.pygments.lexers.pascal', 'Portugol', ('portugol',), ('*.alg', '*.portugol'), ()),
369
- 'PostScriptLexer': ('pip._vendor.pygments.lexers.graphics', 'PostScript', ('postscript', 'postscr'), ('*.ps', '*.eps'), ('application/postscript',)),
370
- 'PostgresConsoleLexer': ('pip._vendor.pygments.lexers.sql', 'PostgreSQL console (psql)', ('psql', 'postgresql-console', 'postgres-console'), (), ('text/x-postgresql-psql',)),
371
- 'PostgresLexer': ('pip._vendor.pygments.lexers.sql', 'PostgreSQL SQL dialect', ('postgresql', 'postgres'), (), ('text/x-postgresql',)),
372
- 'PovrayLexer': ('pip._vendor.pygments.lexers.graphics', 'POVRay', ('pov',), ('*.pov', '*.inc'), ('text/x-povray',)),
373
- 'PowerShellLexer': ('pip._vendor.pygments.lexers.shell', 'PowerShell', ('powershell', 'pwsh', 'posh', 'ps1', 'psm1'), ('*.ps1', '*.psm1'), ('text/x-powershell',)),
374
- 'PowerShellSessionLexer': ('pip._vendor.pygments.lexers.shell', 'PowerShell Session', ('pwsh-session', 'ps1con'), (), ()),
375
- 'PraatLexer': ('pip._vendor.pygments.lexers.praat', 'Praat', ('praat',), ('*.praat', '*.proc', '*.psc'), ()),
376
- 'ProcfileLexer': ('pip._vendor.pygments.lexers.procfile', 'Procfile', ('procfile',), ('Procfile',), ()),
377
- 'PrologLexer': ('pip._vendor.pygments.lexers.prolog', 'Prolog', ('prolog',), ('*.ecl', '*.prolog', '*.pro', '*.pl'), ('text/x-prolog',)),
378
- 'PromQLLexer': ('pip._vendor.pygments.lexers.promql', 'PromQL', ('promql',), ('*.promql',), ()),
379
- 'PropertiesLexer': ('pip._vendor.pygments.lexers.configs', 'Properties', ('properties', 'jproperties'), ('*.properties',), ('text/x-java-properties',)),
380
- 'ProtoBufLexer': ('pip._vendor.pygments.lexers.dsls', 'Protocol Buffer', ('protobuf', 'proto'), ('*.proto',), ()),
381
- 'PsyshConsoleLexer': ('pip._vendor.pygments.lexers.php', 'PsySH console session for PHP', ('psysh',), (), ()),
382
- 'PugLexer': ('pip._vendor.pygments.lexers.html', 'Pug', ('pug', 'jade'), ('*.pug', '*.jade'), ('text/x-pug', 'text/x-jade')),
383
- 'PuppetLexer': ('pip._vendor.pygments.lexers.dsls', 'Puppet', ('puppet',), ('*.pp',), ()),
384
- 'PyPyLogLexer': ('pip._vendor.pygments.lexers.console', 'PyPy Log', ('pypylog', 'pypy'), ('*.pypylog',), ('application/x-pypylog',)),
385
- 'Python2Lexer': ('pip._vendor.pygments.lexers.python', 'Python 2.x', ('python2', 'py2'), (), ('text/x-python2', 'application/x-python2')),
386
- 'Python2TracebackLexer': ('pip._vendor.pygments.lexers.python', 'Python 2.x Traceback', ('py2tb',), ('*.py2tb',), ('text/x-python2-traceback',)),
387
- 'PythonConsoleLexer': ('pip._vendor.pygments.lexers.python', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)),
388
- 'PythonLexer': ('pip._vendor.pygments.lexers.python', 'Python', ('python', 'py', 'sage', 'python3', 'py3'), ('*.py', '*.pyw', '*.pyi', '*.jy', '*.sage', '*.sc', 'SConstruct', 'SConscript', '*.bzl', 'BUCK', 'BUILD', 'BUILD.bazel', 'WORKSPACE', '*.tac'), ('text/x-python', 'application/x-python', 'text/x-python3', 'application/x-python3')),
389
- 'PythonTracebackLexer': ('pip._vendor.pygments.lexers.python', 'Python Traceback', ('pytb', 'py3tb'), ('*.pytb', '*.py3tb'), ('text/x-python-traceback', 'text/x-python3-traceback')),
390
- 'PythonUL4Lexer': ('pip._vendor.pygments.lexers.ul4', 'Python+UL4', ('py+ul4',), ('*.pyul4',), ()),
391
- 'QBasicLexer': ('pip._vendor.pygments.lexers.basic', 'QBasic', ('qbasic', 'basic'), ('*.BAS', '*.bas'), ('text/basic',)),
392
- 'QLexer': ('pip._vendor.pygments.lexers.q', 'Q', ('q',), ('*.q',), ()),
393
- 'QVToLexer': ('pip._vendor.pygments.lexers.qvt', 'QVTO', ('qvto', 'qvt'), ('*.qvto',), ()),
394
- 'QlikLexer': ('pip._vendor.pygments.lexers.qlik', 'Qlik', ('qlik', 'qlikview', 'qliksense', 'qlikscript'), ('*.qvs', '*.qvw'), ()),
395
- 'QmlLexer': ('pip._vendor.pygments.lexers.webmisc', 'QML', ('qml', 'qbs'), ('*.qml', '*.qbs'), ('application/x-qml', 'application/x-qt.qbs+qml')),
396
- 'RConsoleLexer': ('pip._vendor.pygments.lexers.r', 'RConsole', ('rconsole', 'rout'), ('*.Rout',), ()),
397
- 'RNCCompactLexer': ('pip._vendor.pygments.lexers.rnc', 'Relax-NG Compact', ('rng-compact', 'rnc'), ('*.rnc',), ()),
398
- 'RPMSpecLexer': ('pip._vendor.pygments.lexers.installers', 'RPMSpec', ('spec',), ('*.spec',), ('text/x-rpm-spec',)),
399
- 'RacketLexer': ('pip._vendor.pygments.lexers.lisp', 'Racket', ('racket', 'rkt'), ('*.rkt', '*.rktd', '*.rktl'), ('text/x-racket', 'application/x-racket')),
400
- 'RagelCLexer': ('pip._vendor.pygments.lexers.parsers', 'Ragel in C Host', ('ragel-c',), ('*.rl',), ()),
401
- 'RagelCppLexer': ('pip._vendor.pygments.lexers.parsers', 'Ragel in CPP Host', ('ragel-cpp',), ('*.rl',), ()),
402
- 'RagelDLexer': ('pip._vendor.pygments.lexers.parsers', 'Ragel in D Host', ('ragel-d',), ('*.rl',), ()),
403
- 'RagelEmbeddedLexer': ('pip._vendor.pygments.lexers.parsers', 'Embedded Ragel', ('ragel-em',), ('*.rl',), ()),
404
- 'RagelJavaLexer': ('pip._vendor.pygments.lexers.parsers', 'Ragel in Java Host', ('ragel-java',), ('*.rl',), ()),
405
- 'RagelLexer': ('pip._vendor.pygments.lexers.parsers', 'Ragel', ('ragel',), (), ()),
406
- 'RagelObjectiveCLexer': ('pip._vendor.pygments.lexers.parsers', 'Ragel in Objective C Host', ('ragel-objc',), ('*.rl',), ()),
407
- 'RagelRubyLexer': ('pip._vendor.pygments.lexers.parsers', 'Ragel in Ruby Host', ('ragel-ruby', 'ragel-rb'), ('*.rl',), ()),
408
- 'RawTokenLexer': ('pip._vendor.pygments.lexers.special', 'Raw token data', (), (), ('application/x-pygments-tokens',)),
409
- 'RdLexer': ('pip._vendor.pygments.lexers.r', 'Rd', ('rd',), ('*.Rd',), ('text/x-r-doc',)),
410
- 'ReasonLexer': ('pip._vendor.pygments.lexers.ml', 'ReasonML', ('reasonml', 'reason'), ('*.re', '*.rei'), ('text/x-reasonml',)),
411
- 'RebolLexer': ('pip._vendor.pygments.lexers.rebol', 'REBOL', ('rebol',), ('*.r', '*.r3', '*.reb'), ('text/x-rebol',)),
412
- 'RedLexer': ('pip._vendor.pygments.lexers.rebol', 'Red', ('red', 'red/system'), ('*.red', '*.reds'), ('text/x-red', 'text/x-red-system')),
413
- 'RedcodeLexer': ('pip._vendor.pygments.lexers.esoteric', 'Redcode', ('redcode',), ('*.cw',), ()),
414
- 'RegeditLexer': ('pip._vendor.pygments.lexers.configs', 'reg', ('registry',), ('*.reg',), ('text/x-windows-registry',)),
415
- 'ResourceLexer': ('pip._vendor.pygments.lexers.resource', 'ResourceBundle', ('resourcebundle', 'resource'), (), ()),
416
- 'RexxLexer': ('pip._vendor.pygments.lexers.scripting', 'Rexx', ('rexx', 'arexx'), ('*.rexx', '*.rex', '*.rx', '*.arexx'), ('text/x-rexx',)),
417
- 'RhtmlLexer': ('pip._vendor.pygments.lexers.templates', 'RHTML', ('rhtml', 'html+erb', 'html+ruby'), ('*.rhtml',), ('text/html+ruby',)),
418
- 'RideLexer': ('pip._vendor.pygments.lexers.ride', 'Ride', ('ride',), ('*.ride',), ('text/x-ride',)),
419
- 'RitaLexer': ('pip._vendor.pygments.lexers.rita', 'Rita', ('rita',), ('*.rita',), ('text/rita',)),
420
- 'RoboconfGraphLexer': ('pip._vendor.pygments.lexers.roboconf', 'Roboconf Graph', ('roboconf-graph',), ('*.graph',), ()),
421
- 'RoboconfInstancesLexer': ('pip._vendor.pygments.lexers.roboconf', 'Roboconf Instances', ('roboconf-instances',), ('*.instances',), ()),
422
- 'RobotFrameworkLexer': ('pip._vendor.pygments.lexers.robotframework', 'RobotFramework', ('robotframework',), ('*.robot', '*.resource'), ('text/x-robotframework',)),
423
- 'RqlLexer': ('pip._vendor.pygments.lexers.sql', 'RQL', ('rql',), ('*.rql',), ('text/x-rql',)),
424
- 'RslLexer': ('pip._vendor.pygments.lexers.dsls', 'RSL', ('rsl',), ('*.rsl',), ('text/rsl',)),
425
- 'RstLexer': ('pip._vendor.pygments.lexers.markup', 'reStructuredText', ('restructuredtext', 'rst', 'rest'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')),
426
- 'RtsLexer': ('pip._vendor.pygments.lexers.trafficscript', 'TrafficScript', ('trafficscript', 'rts'), ('*.rts',), ()),
427
- 'RubyConsoleLexer': ('pip._vendor.pygments.lexers.ruby', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)),
428
- 'RubyLexer': ('pip._vendor.pygments.lexers.ruby', 'Ruby', ('ruby', 'rb', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby', 'Gemfile', 'Vagrantfile'), ('text/x-ruby', 'application/x-ruby')),
429
- 'RustLexer': ('pip._vendor.pygments.lexers.rust', 'Rust', ('rust', 'rs'), ('*.rs', '*.rs.in'), ('text/rust', 'text/x-rust')),
430
- 'SASLexer': ('pip._vendor.pygments.lexers.sas', 'SAS', ('sas',), ('*.SAS', '*.sas'), ('text/x-sas', 'text/sas', 'application/x-sas')),
431
- 'SLexer': ('pip._vendor.pygments.lexers.r', 'S', ('splus', 's', 'r'), ('*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron'), ('text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile')),
432
- 'SMLLexer': ('pip._vendor.pygments.lexers.ml', 'Standard ML', ('sml',), ('*.sml', '*.sig', '*.fun'), ('text/x-standardml', 'application/x-standardml')),
433
- 'SNBTLexer': ('pip._vendor.pygments.lexers.minecraft', 'SNBT', ('snbt',), ('*.snbt',), ('text/snbt',)),
434
- 'SarlLexer': ('pip._vendor.pygments.lexers.jvm', 'SARL', ('sarl',), ('*.sarl',), ('text/x-sarl',)),
435
- 'SassLexer': ('pip._vendor.pygments.lexers.css', 'Sass', ('sass',), ('*.sass',), ('text/x-sass',)),
436
- 'SaviLexer': ('pip._vendor.pygments.lexers.savi', 'Savi', ('savi',), ('*.savi',), ()),
437
- 'ScalaLexer': ('pip._vendor.pygments.lexers.jvm', 'Scala', ('scala',), ('*.scala',), ('text/x-scala',)),
438
- 'ScamlLexer': ('pip._vendor.pygments.lexers.html', 'Scaml', ('scaml',), ('*.scaml',), ('text/x-scaml',)),
439
- 'ScdocLexer': ('pip._vendor.pygments.lexers.scdoc', 'scdoc', ('scdoc', 'scd'), ('*.scd', '*.scdoc'), ()),
440
- 'SchemeLexer': ('pip._vendor.pygments.lexers.lisp', 'Scheme', ('scheme', 'scm'), ('*.scm', '*.ss'), ('text/x-scheme', 'application/x-scheme')),
441
- 'ScilabLexer': ('pip._vendor.pygments.lexers.matlab', 'Scilab', ('scilab',), ('*.sci', '*.sce', '*.tst'), ('text/scilab',)),
442
- 'ScssLexer': ('pip._vendor.pygments.lexers.css', 'SCSS', ('scss',), ('*.scss',), ('text/x-scss',)),
443
- 'SedLexer': ('pip._vendor.pygments.lexers.textedit', 'Sed', ('sed', 'gsed', 'ssed'), ('*.sed', '*.[gs]sed'), ('text/x-sed',)),
444
- 'ShExCLexer': ('pip._vendor.pygments.lexers.rdf', 'ShExC', ('shexc', 'shex'), ('*.shex',), ('text/shex',)),
445
- 'ShenLexer': ('pip._vendor.pygments.lexers.lisp', 'Shen', ('shen',), ('*.shen',), ('text/x-shen', 'application/x-shen')),
446
- 'SieveLexer': ('pip._vendor.pygments.lexers.sieve', 'Sieve', ('sieve',), ('*.siv', '*.sieve'), ()),
447
- 'SilverLexer': ('pip._vendor.pygments.lexers.verification', 'Silver', ('silver',), ('*.sil', '*.vpr'), ()),
448
- 'SingularityLexer': ('pip._vendor.pygments.lexers.configs', 'Singularity', ('singularity',), ('*.def', 'Singularity'), ()),
449
- 'SlashLexer': ('pip._vendor.pygments.lexers.slash', 'Slash', ('slash',), ('*.sla',), ()),
450
- 'SlimLexer': ('pip._vendor.pygments.lexers.webmisc', 'Slim', ('slim',), ('*.slim',), ('text/x-slim',)),
451
- 'SlurmBashLexer': ('pip._vendor.pygments.lexers.shell', 'Slurm', ('slurm', 'sbatch'), ('*.sl',), ()),
452
- 'SmaliLexer': ('pip._vendor.pygments.lexers.dalvik', 'Smali', ('smali',), ('*.smali',), ('text/smali',)),
453
- 'SmalltalkLexer': ('pip._vendor.pygments.lexers.smalltalk', 'Smalltalk', ('smalltalk', 'squeak', 'st'), ('*.st',), ('text/x-smalltalk',)),
454
- 'SmartGameFormatLexer': ('pip._vendor.pygments.lexers.sgf', 'SmartGameFormat', ('sgf',), ('*.sgf',), ()),
455
- 'SmartyLexer': ('pip._vendor.pygments.lexers.templates', 'Smarty', ('smarty',), ('*.tpl',), ('application/x-smarty',)),
456
- 'SmithyLexer': ('pip._vendor.pygments.lexers.smithy', 'Smithy', ('smithy',), ('*.smithy',), ()),
457
- 'SnobolLexer': ('pip._vendor.pygments.lexers.snobol', 'Snobol', ('snobol',), ('*.snobol',), ('text/x-snobol',)),
458
- 'SnowballLexer': ('pip._vendor.pygments.lexers.dsls', 'Snowball', ('snowball',), ('*.sbl',), ()),
459
- 'SolidityLexer': ('pip._vendor.pygments.lexers.solidity', 'Solidity', ('solidity',), ('*.sol',), ()),
460
- 'SophiaLexer': ('pip._vendor.pygments.lexers.sophia', 'Sophia', ('sophia',), ('*.aes',), ()),
461
- 'SourcePawnLexer': ('pip._vendor.pygments.lexers.pawn', 'SourcePawn', ('sp',), ('*.sp',), ('text/x-sourcepawn',)),
462
- 'SourcesListLexer': ('pip._vendor.pygments.lexers.installers', 'Debian Sourcelist', ('debsources', 'sourceslist', 'sources.list'), ('sources.list',), ()),
463
- 'SparqlLexer': ('pip._vendor.pygments.lexers.rdf', 'SPARQL', ('sparql',), ('*.rq', '*.sparql'), ('application/sparql-query',)),
464
- 'SpiceLexer': ('pip._vendor.pygments.lexers.spice', 'Spice', ('spice', 'spicelang'), ('*.spice',), ('text/x-spice',)),
465
- 'SqlJinjaLexer': ('pip._vendor.pygments.lexers.templates', 'SQL+Jinja', ('sql+jinja',), ('*.sql', '*.sql.j2', '*.sql.jinja2'), ()),
466
- 'SqlLexer': ('pip._vendor.pygments.lexers.sql', 'SQL', ('sql',), ('*.sql',), ('text/x-sql',)),
467
- 'SqliteConsoleLexer': ('pip._vendor.pygments.lexers.sql', 'sqlite3con', ('sqlite3',), ('*.sqlite3-console',), ('text/x-sqlite3-console',)),
468
- 'SquidConfLexer': ('pip._vendor.pygments.lexers.configs', 'SquidConf', ('squidconf', 'squid.conf', 'squid'), ('squid.conf',), ('text/x-squidconf',)),
469
- 'SrcinfoLexer': ('pip._vendor.pygments.lexers.srcinfo', 'Srcinfo', ('srcinfo',), ('.SRCINFO',), ()),
470
- 'SspLexer': ('pip._vendor.pygments.lexers.templates', 'Scalate Server Page', ('ssp',), ('*.ssp',), ('application/x-ssp',)),
471
- 'StanLexer': ('pip._vendor.pygments.lexers.modeling', 'Stan', ('stan',), ('*.stan',), ()),
472
- 'StataLexer': ('pip._vendor.pygments.lexers.stata', 'Stata', ('stata', 'do'), ('*.do', '*.ado'), ('text/x-stata', 'text/stata', 'application/x-stata')),
473
- 'SuperColliderLexer': ('pip._vendor.pygments.lexers.supercollider', 'SuperCollider', ('supercollider', 'sc'), ('*.sc', '*.scd'), ('application/supercollider', 'text/supercollider')),
474
- 'SwiftLexer': ('pip._vendor.pygments.lexers.objective', 'Swift', ('swift',), ('*.swift',), ('text/x-swift',)),
475
- 'SwigLexer': ('pip._vendor.pygments.lexers.c_like', 'SWIG', ('swig',), ('*.swg', '*.i'), ('text/swig',)),
476
- 'SystemVerilogLexer': ('pip._vendor.pygments.lexers.hdl', 'systemverilog', ('systemverilog', 'sv'), ('*.sv', '*.svh'), ('text/x-systemverilog',)),
477
- 'TAPLexer': ('pip._vendor.pygments.lexers.testing', 'TAP', ('tap',), ('*.tap',), ()),
478
- 'TNTLexer': ('pip._vendor.pygments.lexers.tnt', 'Typographic Number Theory', ('tnt',), ('*.tnt',), ()),
479
- 'TOMLLexer': ('pip._vendor.pygments.lexers.configs', 'TOML', ('toml',), ('*.toml', 'Pipfile', 'poetry.lock'), ()),
480
- 'Tads3Lexer': ('pip._vendor.pygments.lexers.int_fiction', 'TADS 3', ('tads3',), ('*.t',), ()),
481
- 'TalLexer': ('pip._vendor.pygments.lexers.tal', 'Tal', ('tal', 'uxntal'), ('*.tal',), ('text/x-uxntal',)),
482
- 'TasmLexer': ('pip._vendor.pygments.lexers.asm', 'TASM', ('tasm',), ('*.asm', '*.ASM', '*.tasm'), ('text/x-tasm',)),
483
- 'TclLexer': ('pip._vendor.pygments.lexers.tcl', 'Tcl', ('tcl',), ('*.tcl', '*.rvt'), ('text/x-tcl', 'text/x-script.tcl', 'application/x-tcl')),
484
- 'TcshLexer': ('pip._vendor.pygments.lexers.shell', 'Tcsh', ('tcsh', 'csh'), ('*.tcsh', '*.csh'), ('application/x-csh',)),
485
- 'TcshSessionLexer': ('pip._vendor.pygments.lexers.shell', 'Tcsh Session', ('tcshcon',), (), ()),
486
- 'TeaTemplateLexer': ('pip._vendor.pygments.lexers.templates', 'Tea', ('tea',), ('*.tea',), ('text/x-tea',)),
487
- 'TealLexer': ('pip._vendor.pygments.lexers.teal', 'teal', ('teal',), ('*.teal',), ()),
488
- 'TeraTermLexer': ('pip._vendor.pygments.lexers.teraterm', 'Tera Term macro', ('teratermmacro', 'teraterm', 'ttl'), ('*.ttl',), ('text/x-teratermmacro',)),
489
- 'TermcapLexer': ('pip._vendor.pygments.lexers.configs', 'Termcap', ('termcap',), ('termcap', 'termcap.src'), ()),
490
- 'TerminfoLexer': ('pip._vendor.pygments.lexers.configs', 'Terminfo', ('terminfo',), ('terminfo', 'terminfo.src'), ()),
491
- 'TerraformLexer': ('pip._vendor.pygments.lexers.configs', 'Terraform', ('terraform', 'tf'), ('*.tf',), ('application/x-tf', 'application/x-terraform')),
492
- 'TexLexer': ('pip._vendor.pygments.lexers.markup', 'TeX', ('tex', 'latex'), ('*.tex', '*.aux', '*.toc'), ('text/x-tex', 'text/x-latex')),
493
- 'TextLexer': ('pip._vendor.pygments.lexers.special', 'Text only', ('text',), ('*.txt',), ('text/plain',)),
494
- 'ThingsDBLexer': ('pip._vendor.pygments.lexers.thingsdb', 'ThingsDB', ('ti', 'thingsdb'), ('*.ti',), ()),
495
- 'ThriftLexer': ('pip._vendor.pygments.lexers.dsls', 'Thrift', ('thrift',), ('*.thrift',), ('application/x-thrift',)),
496
- 'TiddlyWiki5Lexer': ('pip._vendor.pygments.lexers.markup', 'tiddler', ('tid',), ('*.tid',), ('text/vnd.tiddlywiki',)),
497
- 'TlbLexer': ('pip._vendor.pygments.lexers.tlb', 'Tl-b', ('tlb',), ('*.tlb',), ()),
498
- 'TodotxtLexer': ('pip._vendor.pygments.lexers.textfmts', 'Todotxt', ('todotxt',), ('todo.txt', '*.todotxt'), ('text/x-todo',)),
499
- 'TransactSqlLexer': ('pip._vendor.pygments.lexers.sql', 'Transact-SQL', ('tsql', 't-sql'), ('*.sql',), ('text/x-tsql',)),
500
- 'TreetopLexer': ('pip._vendor.pygments.lexers.parsers', 'Treetop', ('treetop',), ('*.treetop', '*.tt'), ()),
501
- 'TurtleLexer': ('pip._vendor.pygments.lexers.rdf', 'Turtle', ('turtle',), ('*.ttl',), ('text/turtle', 'application/x-turtle')),
502
- 'TwigHtmlLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Twig', ('html+twig',), ('*.twig',), ('text/html+twig',)),
503
- 'TwigLexer': ('pip._vendor.pygments.lexers.templates', 'Twig', ('twig',), (), ('application/x-twig',)),
504
- 'TypeScriptLexer': ('pip._vendor.pygments.lexers.javascript', 'TypeScript', ('typescript', 'ts'), ('*.ts',), ('application/x-typescript', 'text/x-typescript')),
505
- 'TypoScriptCssDataLexer': ('pip._vendor.pygments.lexers.typoscript', 'TypoScriptCssData', ('typoscriptcssdata',), (), ()),
506
- 'TypoScriptHtmlDataLexer': ('pip._vendor.pygments.lexers.typoscript', 'TypoScriptHtmlData', ('typoscripthtmldata',), (), ()),
507
- 'TypoScriptLexer': ('pip._vendor.pygments.lexers.typoscript', 'TypoScript', ('typoscript',), ('*.typoscript',), ('text/x-typoscript',)),
508
- 'UL4Lexer': ('pip._vendor.pygments.lexers.ul4', 'UL4', ('ul4',), ('*.ul4',), ()),
509
- 'UcodeLexer': ('pip._vendor.pygments.lexers.unicon', 'ucode', ('ucode',), ('*.u', '*.u1', '*.u2'), ()),
510
- 'UniconLexer': ('pip._vendor.pygments.lexers.unicon', 'Unicon', ('unicon',), ('*.icn',), ('text/unicon',)),
511
- 'UnixConfigLexer': ('pip._vendor.pygments.lexers.configs', 'Unix/Linux config files', ('unixconfig', 'linuxconfig'), (), ()),
512
- 'UrbiscriptLexer': ('pip._vendor.pygments.lexers.urbi', 'UrbiScript', ('urbiscript',), ('*.u',), ('application/x-urbiscript',)),
513
- 'UsdLexer': ('pip._vendor.pygments.lexers.usd', 'USD', ('usd', 'usda'), ('*.usd', '*.usda'), ()),
514
- 'VBScriptLexer': ('pip._vendor.pygments.lexers.basic', 'VBScript', ('vbscript',), ('*.vbs', '*.VBS'), ()),
515
- 'VCLLexer': ('pip._vendor.pygments.lexers.varnish', 'VCL', ('vcl',), ('*.vcl',), ('text/x-vclsrc',)),
516
- 'VCLSnippetLexer': ('pip._vendor.pygments.lexers.varnish', 'VCLSnippets', ('vclsnippets', 'vclsnippet'), (), ('text/x-vclsnippet',)),
517
- 'VCTreeStatusLexer': ('pip._vendor.pygments.lexers.console', 'VCTreeStatus', ('vctreestatus',), (), ()),
518
- 'VGLLexer': ('pip._vendor.pygments.lexers.dsls', 'VGL', ('vgl',), ('*.rpf',), ()),
519
- 'ValaLexer': ('pip._vendor.pygments.lexers.c_like', 'Vala', ('vala', 'vapi'), ('*.vala', '*.vapi'), ('text/x-vala',)),
520
- 'VbNetAspxLexer': ('pip._vendor.pygments.lexers.dotnet', 'aspx-vb', ('aspx-vb',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()),
521
- 'VbNetLexer': ('pip._vendor.pygments.lexers.dotnet', 'VB.net', ('vb.net', 'vbnet', 'lobas', 'oobas', 'sobas'), ('*.vb', '*.bas'), ('text/x-vbnet', 'text/x-vba')),
522
- 'VelocityHtmlLexer': ('pip._vendor.pygments.lexers.templates', 'HTML+Velocity', ('html+velocity',), (), ('text/html+velocity',)),
523
- 'VelocityLexer': ('pip._vendor.pygments.lexers.templates', 'Velocity', ('velocity',), ('*.vm', '*.fhtml'), ()),
524
- 'VelocityXmlLexer': ('pip._vendor.pygments.lexers.templates', 'XML+Velocity', ('xml+velocity',), (), ('application/xml+velocity',)),
525
- 'VerilogLexer': ('pip._vendor.pygments.lexers.hdl', 'verilog', ('verilog', 'v'), ('*.v',), ('text/x-verilog',)),
526
- 'VhdlLexer': ('pip._vendor.pygments.lexers.hdl', 'vhdl', ('vhdl',), ('*.vhdl', '*.vhd'), ('text/x-vhdl',)),
527
- 'VimLexer': ('pip._vendor.pygments.lexers.textedit', 'VimL', ('vim',), ('*.vim', '.vimrc', '.exrc', '.gvimrc', '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc'), ('text/x-vim',)),
528
- 'WDiffLexer': ('pip._vendor.pygments.lexers.diff', 'WDiff', ('wdiff',), ('*.wdiff',), ()),
529
- 'WatLexer': ('pip._vendor.pygments.lexers.webassembly', 'WebAssembly', ('wast', 'wat'), ('*.wat', '*.wast'), ()),
530
- 'WebIDLLexer': ('pip._vendor.pygments.lexers.webidl', 'Web IDL', ('webidl',), ('*.webidl',), ()),
531
- 'WhileyLexer': ('pip._vendor.pygments.lexers.whiley', 'Whiley', ('whiley',), ('*.whiley',), ('text/x-whiley',)),
532
- 'WoWTocLexer': ('pip._vendor.pygments.lexers.wowtoc', 'World of Warcraft TOC', ('wowtoc',), ('*.toc',), ()),
533
- 'WrenLexer': ('pip._vendor.pygments.lexers.wren', 'Wren', ('wren',), ('*.wren',), ()),
534
- 'X10Lexer': ('pip._vendor.pygments.lexers.x10', 'X10', ('x10', 'xten'), ('*.x10',), ('text/x-x10',)),
535
- 'XMLUL4Lexer': ('pip._vendor.pygments.lexers.ul4', 'XML+UL4', ('xml+ul4',), ('*.xmlul4',), ()),
536
- 'XQueryLexer': ('pip._vendor.pygments.lexers.webmisc', 'XQuery', ('xquery', 'xqy', 'xq', 'xql', 'xqm'), ('*.xqy', '*.xquery', '*.xq', '*.xql', '*.xqm'), ('text/xquery', 'application/xquery')),
537
- 'XmlDjangoLexer': ('pip._vendor.pygments.lexers.templates', 'XML+Django/Jinja', ('xml+django', 'xml+jinja'), ('*.xml.j2', '*.xml.jinja2'), ('application/xml+django', 'application/xml+jinja')),
538
- 'XmlErbLexer': ('pip._vendor.pygments.lexers.templates', 'XML+Ruby', ('xml+ruby', 'xml+erb'), (), ('application/xml+ruby',)),
539
- 'XmlLexer': ('pip._vendor.pygments.lexers.html', 'XML', ('xml',), ('*.xml', '*.xsl', '*.rss', '*.xslt', '*.xsd', '*.wsdl', '*.wsf'), ('text/xml', 'application/xml', 'image/svg+xml', 'application/rss+xml', 'application/atom+xml')),
540
- 'XmlPhpLexer': ('pip._vendor.pygments.lexers.templates', 'XML+PHP', ('xml+php',), (), ('application/xml+php',)),
541
- 'XmlSmartyLexer': ('pip._vendor.pygments.lexers.templates', 'XML+Smarty', ('xml+smarty',), (), ('application/xml+smarty',)),
542
- 'XorgLexer': ('pip._vendor.pygments.lexers.xorg', 'Xorg', ('xorg.conf',), ('xorg.conf',), ()),
543
- 'XsltLexer': ('pip._vendor.pygments.lexers.html', 'XSLT', ('xslt',), ('*.xsl', '*.xslt', '*.xpl'), ('application/xsl+xml', 'application/xslt+xml')),
544
- 'XtendLexer': ('pip._vendor.pygments.lexers.jvm', 'Xtend', ('xtend',), ('*.xtend',), ('text/x-xtend',)),
545
- 'XtlangLexer': ('pip._vendor.pygments.lexers.lisp', 'xtlang', ('extempore',), ('*.xtm',), ()),
546
- 'YamlJinjaLexer': ('pip._vendor.pygments.lexers.templates', 'YAML+Jinja', ('yaml+jinja', 'salt', 'sls'), ('*.sls', '*.yaml.j2', '*.yml.j2', '*.yaml.jinja2', '*.yml.jinja2'), ('text/x-yaml+jinja', 'text/x-sls')),
547
- 'YamlLexer': ('pip._vendor.pygments.lexers.data', 'YAML', ('yaml',), ('*.yaml', '*.yml'), ('text/x-yaml',)),
548
- 'YangLexer': ('pip._vendor.pygments.lexers.yang', 'YANG', ('yang',), ('*.yang',), ('application/yang',)),
549
- 'ZeekLexer': ('pip._vendor.pygments.lexers.dsls', 'Zeek', ('zeek', 'bro'), ('*.zeek', '*.bro'), ()),
550
- 'ZephirLexer': ('pip._vendor.pygments.lexers.php', 'Zephir', ('zephir',), ('*.zep',), ()),
551
- 'ZigLexer': ('pip._vendor.pygments.lexers.zig', 'Zig', ('zig',), ('*.zig',), ('text/zig',)),
552
- 'apdlexer': ('pip._vendor.pygments.lexers.apdlexer', 'ANSYS parametric design language', ('ansys', 'apdl'), ('*.ans',), ()),
553
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/Big-Web/MMSD/env/Lib/site-packages/pip/_vendor/requests/exceptions.py DELETED
@@ -1,141 +0,0 @@
1
- """
2
- requests.exceptions
3
- ~~~~~~~~~~~~~~~~~~~
4
-
5
- This module contains the set of Requests' exceptions.
6
- """
7
- from pip._vendor.urllib3.exceptions import HTTPError as BaseHTTPError
8
-
9
- from .compat import JSONDecodeError as CompatJSONDecodeError
10
-
11
-
12
- class RequestException(IOError):
13
- """There was an ambiguous exception that occurred while handling your
14
- request.
15
- """
16
-
17
- def __init__(self, *args, **kwargs):
18
- """Initialize RequestException with `request` and `response` objects."""
19
- response = kwargs.pop("response", None)
20
- self.response = response
21
- self.request = kwargs.pop("request", None)
22
- if response is not None and not self.request and hasattr(response, "request"):
23
- self.request = self.response.request
24
- super().__init__(*args, **kwargs)
25
-
26
-
27
- class InvalidJSONError(RequestException):
28
- """A JSON error occurred."""
29
-
30
-
31
- class JSONDecodeError(InvalidJSONError, CompatJSONDecodeError):
32
- """Couldn't decode the text into json"""
33
-
34
- def __init__(self, *args, **kwargs):
35
- """
36
- Construct the JSONDecodeError instance first with all
37
- args. Then use it's args to construct the IOError so that
38
- the json specific args aren't used as IOError specific args
39
- and the error message from JSONDecodeError is preserved.
40
- """
41
- CompatJSONDecodeError.__init__(self, *args)
42
- InvalidJSONError.__init__(self, *self.args, **kwargs)
43
-
44
-
45
- class HTTPError(RequestException):
46
- """An HTTP error occurred."""
47
-
48
-
49
- class ConnectionError(RequestException):
50
- """A Connection error occurred."""
51
-
52
-
53
- class ProxyError(ConnectionError):
54
- """A proxy error occurred."""
55
-
56
-
57
- class SSLError(ConnectionError):
58
- """An SSL error occurred."""
59
-
60
-
61
- class Timeout(RequestException):
62
- """The request timed out.
63
-
64
- Catching this error will catch both
65
- :exc:`~requests.exceptions.ConnectTimeout` and
66
- :exc:`~requests.exceptions.ReadTimeout` errors.
67
- """
68
-
69
-
70
- class ConnectTimeout(ConnectionError, Timeout):
71
- """The request timed out while trying to connect to the remote server.
72
-
73
- Requests that produced this error are safe to retry.
74
- """
75
-
76
-
77
- class ReadTimeout(Timeout):
78
- """The server did not send any data in the allotted amount of time."""
79
-
80
-
81
- class URLRequired(RequestException):
82
- """A valid URL is required to make a request."""
83
-
84
-
85
- class TooManyRedirects(RequestException):
86
- """Too many redirects."""
87
-
88
-
89
- class MissingSchema(RequestException, ValueError):
90
- """The URL scheme (e.g. http or https) is missing."""
91
-
92
-
93
- class InvalidSchema(RequestException, ValueError):
94
- """The URL scheme provided is either invalid or unsupported."""
95
-
96
-
97
- class InvalidURL(RequestException, ValueError):
98
- """The URL provided was somehow invalid."""
99
-
100
-
101
- class InvalidHeader(RequestException, ValueError):
102
- """The header value provided was somehow invalid."""
103
-
104
-
105
- class InvalidProxyURL(InvalidURL):
106
- """The proxy URL provided is invalid."""
107
-
108
-
109
- class ChunkedEncodingError(RequestException):
110
- """The server declared chunked encoding but sent an invalid chunk."""
111
-
112
-
113
- class ContentDecodingError(RequestException, BaseHTTPError):
114
- """Failed to decode response content."""
115
-
116
-
117
- class StreamConsumedError(RequestException, TypeError):
118
- """The content for this response was already consumed."""
119
-
120
-
121
- class RetryError(RequestException):
122
- """Custom retries logic failed"""
123
-
124
-
125
- class UnrewindableBodyError(RequestException):
126
- """Requests encountered an error when trying to rewind a body."""
127
-
128
-
129
- # Warnings
130
-
131
-
132
- class RequestsWarning(Warning):
133
- """Base warning for Requests."""
134
-
135
-
136
- class FileModeWarning(RequestsWarning, DeprecationWarning):
137
- """A file was opened in text mode, but Requests determined its binary length."""
138
-
139
-
140
- class RequestsDependencyWarning(RequestsWarning):
141
- """An imported dependency doesn't match the expected version range."""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/Big-Web/MMSD/env/Lib/site-packages/pip/_vendor/resolvelib/__init__.py DELETED
@@ -1,26 +0,0 @@
1
- __all__ = [
2
- "__version__",
3
- "AbstractProvider",
4
- "AbstractResolver",
5
- "BaseReporter",
6
- "InconsistentCandidate",
7
- "Resolver",
8
- "RequirementsConflicted",
9
- "ResolutionError",
10
- "ResolutionImpossible",
11
- "ResolutionTooDeep",
12
- ]
13
-
14
- __version__ = "1.0.1"
15
-
16
-
17
- from .providers import AbstractProvider, AbstractResolver
18
- from .reporters import BaseReporter
19
- from .resolvers import (
20
- InconsistentCandidate,
21
- RequirementsConflicted,
22
- ResolutionError,
23
- ResolutionImpossible,
24
- ResolutionTooDeep,
25
- Resolver,
26
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/Big-Web/MMSD/env/Lib/site-packages/pip/_vendor/rich/_cell_widths.py DELETED
@@ -1,451 +0,0 @@
1
- # Auto generated by make_terminal_widths.py
2
-
3
- CELL_WIDTHS = [
4
- (0, 0, 0),
5
- (1, 31, -1),
6
- (127, 159, -1),
7
- (768, 879, 0),
8
- (1155, 1161, 0),
9
- (1425, 1469, 0),
10
- (1471, 1471, 0),
11
- (1473, 1474, 0),
12
- (1476, 1477, 0),
13
- (1479, 1479, 0),
14
- (1552, 1562, 0),
15
- (1611, 1631, 0),
16
- (1648, 1648, 0),
17
- (1750, 1756, 0),
18
- (1759, 1764, 0),
19
- (1767, 1768, 0),
20
- (1770, 1773, 0),
21
- (1809, 1809, 0),
22
- (1840, 1866, 0),
23
- (1958, 1968, 0),
24
- (2027, 2035, 0),
25
- (2045, 2045, 0),
26
- (2070, 2073, 0),
27
- (2075, 2083, 0),
28
- (2085, 2087, 0),
29
- (2089, 2093, 0),
30
- (2137, 2139, 0),
31
- (2259, 2273, 0),
32
- (2275, 2306, 0),
33
- (2362, 2362, 0),
34
- (2364, 2364, 0),
35
- (2369, 2376, 0),
36
- (2381, 2381, 0),
37
- (2385, 2391, 0),
38
- (2402, 2403, 0),
39
- (2433, 2433, 0),
40
- (2492, 2492, 0),
41
- (2497, 2500, 0),
42
- (2509, 2509, 0),
43
- (2530, 2531, 0),
44
- (2558, 2558, 0),
45
- (2561, 2562, 0),
46
- (2620, 2620, 0),
47
- (2625, 2626, 0),
48
- (2631, 2632, 0),
49
- (2635, 2637, 0),
50
- (2641, 2641, 0),
51
- (2672, 2673, 0),
52
- (2677, 2677, 0),
53
- (2689, 2690, 0),
54
- (2748, 2748, 0),
55
- (2753, 2757, 0),
56
- (2759, 2760, 0),
57
- (2765, 2765, 0),
58
- (2786, 2787, 0),
59
- (2810, 2815, 0),
60
- (2817, 2817, 0),
61
- (2876, 2876, 0),
62
- (2879, 2879, 0),
63
- (2881, 2884, 0),
64
- (2893, 2893, 0),
65
- (2901, 2902, 0),
66
- (2914, 2915, 0),
67
- (2946, 2946, 0),
68
- (3008, 3008, 0),
69
- (3021, 3021, 0),
70
- (3072, 3072, 0),
71
- (3076, 3076, 0),
72
- (3134, 3136, 0),
73
- (3142, 3144, 0),
74
- (3146, 3149, 0),
75
- (3157, 3158, 0),
76
- (3170, 3171, 0),
77
- (3201, 3201, 0),
78
- (3260, 3260, 0),
79
- (3263, 3263, 0),
80
- (3270, 3270, 0),
81
- (3276, 3277, 0),
82
- (3298, 3299, 0),
83
- (3328, 3329, 0),
84
- (3387, 3388, 0),
85
- (3393, 3396, 0),
86
- (3405, 3405, 0),
87
- (3426, 3427, 0),
88
- (3457, 3457, 0),
89
- (3530, 3530, 0),
90
- (3538, 3540, 0),
91
- (3542, 3542, 0),
92
- (3633, 3633, 0),
93
- (3636, 3642, 0),
94
- (3655, 3662, 0),
95
- (3761, 3761, 0),
96
- (3764, 3772, 0),
97
- (3784, 3789, 0),
98
- (3864, 3865, 0),
99
- (3893, 3893, 0),
100
- (3895, 3895, 0),
101
- (3897, 3897, 0),
102
- (3953, 3966, 0),
103
- (3968, 3972, 0),
104
- (3974, 3975, 0),
105
- (3981, 3991, 0),
106
- (3993, 4028, 0),
107
- (4038, 4038, 0),
108
- (4141, 4144, 0),
109
- (4146, 4151, 0),
110
- (4153, 4154, 0),
111
- (4157, 4158, 0),
112
- (4184, 4185, 0),
113
- (4190, 4192, 0),
114
- (4209, 4212, 0),
115
- (4226, 4226, 0),
116
- (4229, 4230, 0),
117
- (4237, 4237, 0),
118
- (4253, 4253, 0),
119
- (4352, 4447, 2),
120
- (4957, 4959, 0),
121
- (5906, 5908, 0),
122
- (5938, 5940, 0),
123
- (5970, 5971, 0),
124
- (6002, 6003, 0),
125
- (6068, 6069, 0),
126
- (6071, 6077, 0),
127
- (6086, 6086, 0),
128
- (6089, 6099, 0),
129
- (6109, 6109, 0),
130
- (6155, 6157, 0),
131
- (6277, 6278, 0),
132
- (6313, 6313, 0),
133
- (6432, 6434, 0),
134
- (6439, 6440, 0),
135
- (6450, 6450, 0),
136
- (6457, 6459, 0),
137
- (6679, 6680, 0),
138
- (6683, 6683, 0),
139
- (6742, 6742, 0),
140
- (6744, 6750, 0),
141
- (6752, 6752, 0),
142
- (6754, 6754, 0),
143
- (6757, 6764, 0),
144
- (6771, 6780, 0),
145
- (6783, 6783, 0),
146
- (6832, 6848, 0),
147
- (6912, 6915, 0),
148
- (6964, 6964, 0),
149
- (6966, 6970, 0),
150
- (6972, 6972, 0),
151
- (6978, 6978, 0),
152
- (7019, 7027, 0),
153
- (7040, 7041, 0),
154
- (7074, 7077, 0),
155
- (7080, 7081, 0),
156
- (7083, 7085, 0),
157
- (7142, 7142, 0),
158
- (7144, 7145, 0),
159
- (7149, 7149, 0),
160
- (7151, 7153, 0),
161
- (7212, 7219, 0),
162
- (7222, 7223, 0),
163
- (7376, 7378, 0),
164
- (7380, 7392, 0),
165
- (7394, 7400, 0),
166
- (7405, 7405, 0),
167
- (7412, 7412, 0),
168
- (7416, 7417, 0),
169
- (7616, 7673, 0),
170
- (7675, 7679, 0),
171
- (8203, 8207, 0),
172
- (8232, 8238, 0),
173
- (8288, 8291, 0),
174
- (8400, 8432, 0),
175
- (8986, 8987, 2),
176
- (9001, 9002, 2),
177
- (9193, 9196, 2),
178
- (9200, 9200, 2),
179
- (9203, 9203, 2),
180
- (9725, 9726, 2),
181
- (9748, 9749, 2),
182
- (9800, 9811, 2),
183
- (9855, 9855, 2),
184
- (9875, 9875, 2),
185
- (9889, 9889, 2),
186
- (9898, 9899, 2),
187
- (9917, 9918, 2),
188
- (9924, 9925, 2),
189
- (9934, 9934, 2),
190
- (9940, 9940, 2),
191
- (9962, 9962, 2),
192
- (9970, 9971, 2),
193
- (9973, 9973, 2),
194
- (9978, 9978, 2),
195
- (9981, 9981, 2),
196
- (9989, 9989, 2),
197
- (9994, 9995, 2),
198
- (10024, 10024, 2),
199
- (10060, 10060, 2),
200
- (10062, 10062, 2),
201
- (10067, 10069, 2),
202
- (10071, 10071, 2),
203
- (10133, 10135, 2),
204
- (10160, 10160, 2),
205
- (10175, 10175, 2),
206
- (11035, 11036, 2),
207
- (11088, 11088, 2),
208
- (11093, 11093, 2),
209
- (11503, 11505, 0),
210
- (11647, 11647, 0),
211
- (11744, 11775, 0),
212
- (11904, 11929, 2),
213
- (11931, 12019, 2),
214
- (12032, 12245, 2),
215
- (12272, 12283, 2),
216
- (12288, 12329, 2),
217
- (12330, 12333, 0),
218
- (12334, 12350, 2),
219
- (12353, 12438, 2),
220
- (12441, 12442, 0),
221
- (12443, 12543, 2),
222
- (12549, 12591, 2),
223
- (12593, 12686, 2),
224
- (12688, 12771, 2),
225
- (12784, 12830, 2),
226
- (12832, 12871, 2),
227
- (12880, 19903, 2),
228
- (19968, 42124, 2),
229
- (42128, 42182, 2),
230
- (42607, 42610, 0),
231
- (42612, 42621, 0),
232
- (42654, 42655, 0),
233
- (42736, 42737, 0),
234
- (43010, 43010, 0),
235
- (43014, 43014, 0),
236
- (43019, 43019, 0),
237
- (43045, 43046, 0),
238
- (43052, 43052, 0),
239
- (43204, 43205, 0),
240
- (43232, 43249, 0),
241
- (43263, 43263, 0),
242
- (43302, 43309, 0),
243
- (43335, 43345, 0),
244
- (43360, 43388, 2),
245
- (43392, 43394, 0),
246
- (43443, 43443, 0),
247
- (43446, 43449, 0),
248
- (43452, 43453, 0),
249
- (43493, 43493, 0),
250
- (43561, 43566, 0),
251
- (43569, 43570, 0),
252
- (43573, 43574, 0),
253
- (43587, 43587, 0),
254
- (43596, 43596, 0),
255
- (43644, 43644, 0),
256
- (43696, 43696, 0),
257
- (43698, 43700, 0),
258
- (43703, 43704, 0),
259
- (43710, 43711, 0),
260
- (43713, 43713, 0),
261
- (43756, 43757, 0),
262
- (43766, 43766, 0),
263
- (44005, 44005, 0),
264
- (44008, 44008, 0),
265
- (44013, 44013, 0),
266
- (44032, 55203, 2),
267
- (63744, 64255, 2),
268
- (64286, 64286, 0),
269
- (65024, 65039, 0),
270
- (65040, 65049, 2),
271
- (65056, 65071, 0),
272
- (65072, 65106, 2),
273
- (65108, 65126, 2),
274
- (65128, 65131, 2),
275
- (65281, 65376, 2),
276
- (65504, 65510, 2),
277
- (66045, 66045, 0),
278
- (66272, 66272, 0),
279
- (66422, 66426, 0),
280
- (68097, 68099, 0),
281
- (68101, 68102, 0),
282
- (68108, 68111, 0),
283
- (68152, 68154, 0),
284
- (68159, 68159, 0),
285
- (68325, 68326, 0),
286
- (68900, 68903, 0),
287
- (69291, 69292, 0),
288
- (69446, 69456, 0),
289
- (69633, 69633, 0),
290
- (69688, 69702, 0),
291
- (69759, 69761, 0),
292
- (69811, 69814, 0),
293
- (69817, 69818, 0),
294
- (69888, 69890, 0),
295
- (69927, 69931, 0),
296
- (69933, 69940, 0),
297
- (70003, 70003, 0),
298
- (70016, 70017, 0),
299
- (70070, 70078, 0),
300
- (70089, 70092, 0),
301
- (70095, 70095, 0),
302
- (70191, 70193, 0),
303
- (70196, 70196, 0),
304
- (70198, 70199, 0),
305
- (70206, 70206, 0),
306
- (70367, 70367, 0),
307
- (70371, 70378, 0),
308
- (70400, 70401, 0),
309
- (70459, 70460, 0),
310
- (70464, 70464, 0),
311
- (70502, 70508, 0),
312
- (70512, 70516, 0),
313
- (70712, 70719, 0),
314
- (70722, 70724, 0),
315
- (70726, 70726, 0),
316
- (70750, 70750, 0),
317
- (70835, 70840, 0),
318
- (70842, 70842, 0),
319
- (70847, 70848, 0),
320
- (70850, 70851, 0),
321
- (71090, 71093, 0),
322
- (71100, 71101, 0),
323
- (71103, 71104, 0),
324
- (71132, 71133, 0),
325
- (71219, 71226, 0),
326
- (71229, 71229, 0),
327
- (71231, 71232, 0),
328
- (71339, 71339, 0),
329
- (71341, 71341, 0),
330
- (71344, 71349, 0),
331
- (71351, 71351, 0),
332
- (71453, 71455, 0),
333
- (71458, 71461, 0),
334
- (71463, 71467, 0),
335
- (71727, 71735, 0),
336
- (71737, 71738, 0),
337
- (71995, 71996, 0),
338
- (71998, 71998, 0),
339
- (72003, 72003, 0),
340
- (72148, 72151, 0),
341
- (72154, 72155, 0),
342
- (72160, 72160, 0),
343
- (72193, 72202, 0),
344
- (72243, 72248, 0),
345
- (72251, 72254, 0),
346
- (72263, 72263, 0),
347
- (72273, 72278, 0),
348
- (72281, 72283, 0),
349
- (72330, 72342, 0),
350
- (72344, 72345, 0),
351
- (72752, 72758, 0),
352
- (72760, 72765, 0),
353
- (72767, 72767, 0),
354
- (72850, 72871, 0),
355
- (72874, 72880, 0),
356
- (72882, 72883, 0),
357
- (72885, 72886, 0),
358
- (73009, 73014, 0),
359
- (73018, 73018, 0),
360
- (73020, 73021, 0),
361
- (73023, 73029, 0),
362
- (73031, 73031, 0),
363
- (73104, 73105, 0),
364
- (73109, 73109, 0),
365
- (73111, 73111, 0),
366
- (73459, 73460, 0),
367
- (92912, 92916, 0),
368
- (92976, 92982, 0),
369
- (94031, 94031, 0),
370
- (94095, 94098, 0),
371
- (94176, 94179, 2),
372
- (94180, 94180, 0),
373
- (94192, 94193, 2),
374
- (94208, 100343, 2),
375
- (100352, 101589, 2),
376
- (101632, 101640, 2),
377
- (110592, 110878, 2),
378
- (110928, 110930, 2),
379
- (110948, 110951, 2),
380
- (110960, 111355, 2),
381
- (113821, 113822, 0),
382
- (119143, 119145, 0),
383
- (119163, 119170, 0),
384
- (119173, 119179, 0),
385
- (119210, 119213, 0),
386
- (119362, 119364, 0),
387
- (121344, 121398, 0),
388
- (121403, 121452, 0),
389
- (121461, 121461, 0),
390
- (121476, 121476, 0),
391
- (121499, 121503, 0),
392
- (121505, 121519, 0),
393
- (122880, 122886, 0),
394
- (122888, 122904, 0),
395
- (122907, 122913, 0),
396
- (122915, 122916, 0),
397
- (122918, 122922, 0),
398
- (123184, 123190, 0),
399
- (123628, 123631, 0),
400
- (125136, 125142, 0),
401
- (125252, 125258, 0),
402
- (126980, 126980, 2),
403
- (127183, 127183, 2),
404
- (127374, 127374, 2),
405
- (127377, 127386, 2),
406
- (127488, 127490, 2),
407
- (127504, 127547, 2),
408
- (127552, 127560, 2),
409
- (127568, 127569, 2),
410
- (127584, 127589, 2),
411
- (127744, 127776, 2),
412
- (127789, 127797, 2),
413
- (127799, 127868, 2),
414
- (127870, 127891, 2),
415
- (127904, 127946, 2),
416
- (127951, 127955, 2),
417
- (127968, 127984, 2),
418
- (127988, 127988, 2),
419
- (127992, 128062, 2),
420
- (128064, 128064, 2),
421
- (128066, 128252, 2),
422
- (128255, 128317, 2),
423
- (128331, 128334, 2),
424
- (128336, 128359, 2),
425
- (128378, 128378, 2),
426
- (128405, 128406, 2),
427
- (128420, 128420, 2),
428
- (128507, 128591, 2),
429
- (128640, 128709, 2),
430
- (128716, 128716, 2),
431
- (128720, 128722, 2),
432
- (128725, 128727, 2),
433
- (128747, 128748, 2),
434
- (128756, 128764, 2),
435
- (128992, 129003, 2),
436
- (129292, 129338, 2),
437
- (129340, 129349, 2),
438
- (129351, 129400, 2),
439
- (129402, 129483, 2),
440
- (129485, 129535, 2),
441
- (129648, 129652, 2),
442
- (129656, 129658, 2),
443
- (129664, 129670, 2),
444
- (129680, 129704, 2),
445
- (129712, 129718, 2),
446
- (129728, 129730, 2),
447
- (129744, 129750, 2),
448
- (131072, 196605, 2),
449
- (196608, 262141, 2),
450
- (917760, 917999, 0),
451
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/CVPR/LIVE/thrust/thrust/detail/reference_forward_declaration.h DELETED
@@ -1,28 +0,0 @@
1
- /*
2
- * Copyright 2008-2013 NVIDIA Corporation
3
- *
4
- * Licensed under the Apache License, Version 2.0 (the "License");
5
- * you may not use this file except in compliance with the License.
6
- * You may obtain a copy of the License at
7
- *
8
- * http://www.apache.org/licenses/LICENSE-2.0
9
- *
10
- * Unless required by applicable law or agreed to in writing, software
11
- * distributed under the License is distributed on an "AS IS" BASIS,
12
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
- * See the License for the specific language governing permissions and
14
- * limitations under the License.
15
- */
16
-
17
- #pragma once
18
-
19
- #include <thrust/detail/config.h>
20
- #include <thrust/detail/use_default.h>
21
-
22
- namespace thrust
23
- {
24
-
25
- template<typename Element, typename Pointer, typename Derived = use_default> class reference;
26
-
27
- } // end thrust
28
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/CVPR/lama-example/saicinpainting/evaluation/losses/fid/__init__.py DELETED
File without changes
spaces/CVPR/monoscene_lite/monoscene/unet3d_kitti.py DELETED
@@ -1,88 +0,0 @@
1
- # encoding: utf-8
2
- import torch
3
- import torch.nn as nn
4
- import torch.nn.functional as F
5
- from monoscene.modules import SegmentationHead
6
- from monoscene.CRP3D import CPMegaVoxels
7
- from monoscene.modules import Process, Upsample, Downsample
8
-
9
-
10
- class UNet3D(nn.Module):
11
- def __init__(
12
- self,
13
- class_num,
14
- norm_layer,
15
- full_scene_size,
16
- feature,
17
- project_scale,
18
- context_prior=None,
19
- bn_momentum=0.1,
20
- ):
21
- super(UNet3D, self).__init__()
22
- self.business_layer = []
23
- self.project_scale = project_scale
24
- self.full_scene_size = full_scene_size
25
- self.feature = feature
26
-
27
- size_l1 = (
28
- int(self.full_scene_size[0] / project_scale),
29
- int(self.full_scene_size[1] / project_scale),
30
- int(self.full_scene_size[2] / project_scale),
31
- )
32
- size_l2 = (size_l1[0] // 2, size_l1[1] // 2, size_l1[2] // 2)
33
- size_l3 = (size_l2[0] // 2, size_l2[1] // 2, size_l2[2] // 2)
34
-
35
- dilations = [1, 2, 3]
36
- self.process_l1 = nn.Sequential(
37
- Process(self.feature, norm_layer, bn_momentum, dilations=[1, 2, 3]),
38
- Downsample(self.feature, norm_layer, bn_momentum),
39
- )
40
- self.process_l2 = nn.Sequential(
41
- Process(self.feature * 2, norm_layer, bn_momentum, dilations=[1, 2, 3]),
42
- Downsample(self.feature * 2, norm_layer, bn_momentum),
43
- )
44
-
45
- self.up_13_l2 = Upsample(
46
- self.feature * 4, self.feature * 2, norm_layer, bn_momentum
47
- )
48
- self.up_12_l1 = Upsample(
49
- self.feature * 2, self.feature, norm_layer, bn_momentum
50
- )
51
- self.up_l1_lfull = Upsample(
52
- self.feature, self.feature // 2, norm_layer, bn_momentum
53
- )
54
-
55
- self.ssc_head = SegmentationHead(
56
- self.feature // 2, self.feature // 2, class_num, dilations
57
- )
58
-
59
- self.context_prior = context_prior
60
- if context_prior:
61
- self.CP_mega_voxels = CPMegaVoxels(
62
- self.feature * 4, size_l3, bn_momentum=bn_momentum
63
- )
64
-
65
- def forward(self, input_dict):
66
- res = {}
67
-
68
- x3d_l1 = input_dict["x3d"]
69
-
70
- x3d_l2 = self.process_l1(x3d_l1)
71
-
72
- x3d_l3 = self.process_l2(x3d_l2)
73
-
74
- if self.context_prior:
75
- ret = self.CP_mega_voxels(x3d_l3)
76
- x3d_l3 = ret["x"]
77
- for k in ret.keys():
78
- res[k] = ret[k]
79
-
80
- x3d_up_l2 = self.up_13_l2(x3d_l3) + x3d_l2
81
- x3d_up_l1 = self.up_12_l1(x3d_up_l2) + x3d_l1
82
- x3d_up_lfull = self.up_l1_lfull(x3d_up_l1)
83
-
84
- ssc_logit_full = self.ssc_head(x3d_up_lfull)
85
-
86
- res["ssc_logit"] = ssc_logit_full
87
-
88
- return res
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/CVPR/regionclip-demo/detectron2/data/transforms/build.py DELETED
@@ -1,89 +0,0 @@
1
- # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
2
- # from . import transforms as T
3
- import torchvision.transforms as T
4
- from PIL import Image
5
- from timm.data import create_transform
6
- from .torchvision_transforms.transforms import Resize as New_Resize
7
-
8
- def build_clip_transforms(cfg, is_train=True):
9
- if cfg.AUG.USE_TIMM and is_train:
10
- print('=> use timm transform for training')
11
- timm_cfg = cfg.AUG.TIMM_AUG
12
- transforms = create_transform(
13
- input_size=cfg.TRAIN.IMAGE_SIZE[0],
14
- is_training=True,
15
- use_prefetcher=False,
16
- no_aug=False,
17
- re_prob=timm_cfg.RE_PROB,
18
- re_mode=timm_cfg.RE_MODE,
19
- re_count=timm_cfg.RE_COUNT,
20
- scale=cfg.AUG.SCALE,
21
- ratio=cfg.AUG.RATIO,
22
- hflip=timm_cfg.HFLIP,
23
- vflip=timm_cfg.VFLIP,
24
- color_jitter=timm_cfg.COLOR_JITTER,
25
- auto_augment=timm_cfg.AUTO_AUGMENT,
26
- interpolation=timm_cfg.INTERPOLATION,
27
- mean=cfg.MODEL.PIXEL_MEAN,
28
- std=cfg.MODEL.PIXEL_STD,
29
- )
30
-
31
- return transforms
32
-
33
- # normalize_transform = T.Normalize((0.48145466, 0.4578275, 0.40821073), (0.26862954, 0.26130258, 0.27577711))
34
- # assert isinstance(cfg.DATASET.OUTPUT_SIZE, (list, tuple)), 'DATASET.OUTPUT_SIZE should be list or tuple'
35
- # NOTE: normalization is applied in rcnn.py, to keep consistent as Detectron2
36
- # normalize = T.Normalize(mean=cfg.MODEL.PIXEL_MEAN, std=cfg.MODEL.PIXEL_STD) # T.Normalize(mean=cfg.INPUT.PIXEL_MEAN, std=cfg.INPUT.PIXEL_STD)
37
-
38
- transforms = None
39
- if is_train:
40
- aug = cfg.AUG
41
- scale = aug.SCALE
42
- ratio = aug.RATIO
43
- if len(cfg.AUG.TRAIN.IMAGE_SIZE) == 2: # Data Augmentation from MSR-CLIP
44
- ts = [
45
- T.RandomResizedCrop(
46
- cfg.AUG.TRAIN.IMAGE_SIZE[0], scale=scale, ratio=ratio,
47
- interpolation=cfg.AUG.INTERPOLATION
48
- ),
49
- T.RandomHorizontalFlip(),
50
- ]
51
- elif len(cfg.AUG.TRAIN.IMAGE_SIZE) == 1 and cfg.AUG.TRAIN.MAX_SIZE is not None: # designed for pretraining fastrcnn
52
- ts = [
53
- New_Resize(
54
- cfg.AUG.TRAIN.IMAGE_SIZE[0], max_size=cfg.AUG.TRAIN.MAX_SIZE,
55
- interpolation=cfg.AUG.INTERPOLATION
56
- ),
57
- T.RandomHorizontalFlip(),
58
- ]
59
-
60
- cj = aug.COLOR_JITTER
61
- if cj[-1] > 0.0:
62
- ts.append(T.RandomApply([T.ColorJitter(*cj[:-1])], p=cj[-1]))
63
-
64
- gs = aug.GRAY_SCALE
65
- if gs > 0.0:
66
- ts.append(T.RandomGrayscale(gs))
67
-
68
- gb = aug.GAUSSIAN_BLUR
69
- if gb > 0.0:
70
- ts.append(T.RandomApply([GaussianBlur([.1, 2.])], p=gb))
71
-
72
- ts.append(T.ToTensor())
73
- # NOTE: normalization is applied in rcnn.py, to keep consistent as Detectron2
74
- #ts.append(normalize)
75
-
76
- transforms = T.Compose(ts)
77
- else:
78
- # for zeroshot inference of grounding evaluation
79
- transforms = T.Compose([
80
- T.Resize(
81
- cfg.AUG.TEST.IMAGE_SIZE[0],
82
- interpolation=cfg.AUG.TEST.INTERPOLATION
83
- ),
84
- T.ToTensor(),
85
- ])
86
- return transforms
87
-
88
- return transforms
89
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/ChenWu98/Stable-CycleDiffusion/utils.py DELETED
@@ -1,6 +0,0 @@
1
- def is_google_colab():
2
- try:
3
- import google.colab
4
- return True
5
- except:
6
- return False
 
 
 
 
 
 
 
spaces/CikeyQI/meme-api/meme_generator/memes/klee_eat/__init__.py DELETED
@@ -1,33 +0,0 @@
1
- from pathlib import Path
2
- from typing import List
3
-
4
- from PIL.Image import Image as IMG
5
- from pil_utils import BuildImage
6
-
7
- from meme_generator import add_meme
8
- from meme_generator.utils import save_gif
9
-
10
- img_dir = Path(__file__).parent / "images"
11
-
12
-
13
- def klee_eat(images: List[BuildImage], texts, args):
14
- img = images[0].convert("RGBA").square().resize((83, 83))
15
- # fmt: off
16
- locs = [
17
- (0, 174), (0, 174), (0, 174), (0, 174), (0, 174),
18
- (12, 160), (19, 152), (23, 148), (26, 145), (32, 140),
19
- (37, 136), (42, 131), (49, 127), (70, 126), (88, 128),
20
- (-30, 210), (-19, 207), (-14, 200), (-10, 188), (-7, 179),
21
- (-3, 170), (-3, 175), (-1, 174), (0, 174), (0, 174),
22
- (0, 174), (0, 174), (0, 174), (0, 174), (0, 174), (0, 174)
23
- ]
24
- # fmt: on
25
- frames: List[IMG] = []
26
- for i in range(31):
27
- frame = BuildImage.open(img_dir / f"{i}.png")
28
- frame.paste(img, locs[i], below=True)
29
- frames.append(frame.image)
30
- return save_gif(frames, 0.1)
31
-
32
-
33
- add_meme("klee_eat", klee_eat, min_images=1, max_images=1, keywords=["可莉吃"])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/Cong723/gpt-academic-public/crazy_functions/test_project/cpp/cppipc/policy.h DELETED
@@ -1,25 +0,0 @@
1
- #pragma once
2
-
3
- #include <type_traits>
4
-
5
- #include "libipc/def.h"
6
- #include "libipc/prod_cons.h"
7
-
8
- #include "libipc/circ/elem_array.h"
9
-
10
- namespace ipc {
11
- namespace policy {
12
-
13
- template <template <typename, std::size_t...> class Elems, typename Flag>
14
- struct choose;
15
-
16
- template <typename Flag>
17
- struct choose<circ::elem_array, Flag> {
18
- using flag_t = Flag;
19
-
20
- template <std::size_t DataSize, std::size_t AlignSize>
21
- using elems_t = circ::elem_array<ipc::prod_cons_impl<flag_t>, DataSize, AlignSize>;
22
- };
23
-
24
- } // namespace policy
25
- } // namespace ipc
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/DJQmUKV/rvc-inference/infer_pack/attentions.py DELETED
@@ -1,417 +0,0 @@
1
- import copy
2
- import math
3
- import numpy as np
4
- import torch
5
- from torch import nn
6
- from torch.nn import functional as F
7
-
8
- from infer_pack import commons
9
- from infer_pack import modules
10
- from infer_pack.modules import LayerNorm
11
-
12
-
13
- class Encoder(nn.Module):
14
- def __init__(
15
- self,
16
- hidden_channels,
17
- filter_channels,
18
- n_heads,
19
- n_layers,
20
- kernel_size=1,
21
- p_dropout=0.0,
22
- window_size=10,
23
- **kwargs
24
- ):
25
- super().__init__()
26
- self.hidden_channels = hidden_channels
27
- self.filter_channels = filter_channels
28
- self.n_heads = n_heads
29
- self.n_layers = n_layers
30
- self.kernel_size = kernel_size
31
- self.p_dropout = p_dropout
32
- self.window_size = window_size
33
-
34
- self.drop = nn.Dropout(p_dropout)
35
- self.attn_layers = nn.ModuleList()
36
- self.norm_layers_1 = nn.ModuleList()
37
- self.ffn_layers = nn.ModuleList()
38
- self.norm_layers_2 = nn.ModuleList()
39
- for i in range(self.n_layers):
40
- self.attn_layers.append(
41
- MultiHeadAttention(
42
- hidden_channels,
43
- hidden_channels,
44
- n_heads,
45
- p_dropout=p_dropout,
46
- window_size=window_size,
47
- )
48
- )
49
- self.norm_layers_1.append(LayerNorm(hidden_channels))
50
- self.ffn_layers.append(
51
- FFN(
52
- hidden_channels,
53
- hidden_channels,
54
- filter_channels,
55
- kernel_size,
56
- p_dropout=p_dropout,
57
- )
58
- )
59
- self.norm_layers_2.append(LayerNorm(hidden_channels))
60
-
61
- def forward(self, x, x_mask):
62
- attn_mask = x_mask.unsqueeze(2) * x_mask.unsqueeze(-1)
63
- x = x * x_mask
64
- for i in range(self.n_layers):
65
- y = self.attn_layers[i](x, x, attn_mask)
66
- y = self.drop(y)
67
- x = self.norm_layers_1[i](x + y)
68
-
69
- y = self.ffn_layers[i](x, x_mask)
70
- y = self.drop(y)
71
- x = self.norm_layers_2[i](x + y)
72
- x = x * x_mask
73
- return x
74
-
75
-
76
- class Decoder(nn.Module):
77
- def __init__(
78
- self,
79
- hidden_channels,
80
- filter_channels,
81
- n_heads,
82
- n_layers,
83
- kernel_size=1,
84
- p_dropout=0.0,
85
- proximal_bias=False,
86
- proximal_init=True,
87
- **kwargs
88
- ):
89
- super().__init__()
90
- self.hidden_channels = hidden_channels
91
- self.filter_channels = filter_channels
92
- self.n_heads = n_heads
93
- self.n_layers = n_layers
94
- self.kernel_size = kernel_size
95
- self.p_dropout = p_dropout
96
- self.proximal_bias = proximal_bias
97
- self.proximal_init = proximal_init
98
-
99
- self.drop = nn.Dropout(p_dropout)
100
- self.self_attn_layers = nn.ModuleList()
101
- self.norm_layers_0 = nn.ModuleList()
102
- self.encdec_attn_layers = nn.ModuleList()
103
- self.norm_layers_1 = nn.ModuleList()
104
- self.ffn_layers = nn.ModuleList()
105
- self.norm_layers_2 = nn.ModuleList()
106
- for i in range(self.n_layers):
107
- self.self_attn_layers.append(
108
- MultiHeadAttention(
109
- hidden_channels,
110
- hidden_channels,
111
- n_heads,
112
- p_dropout=p_dropout,
113
- proximal_bias=proximal_bias,
114
- proximal_init=proximal_init,
115
- )
116
- )
117
- self.norm_layers_0.append(LayerNorm(hidden_channels))
118
- self.encdec_attn_layers.append(
119
- MultiHeadAttention(
120
- hidden_channels, hidden_channels, n_heads, p_dropout=p_dropout
121
- )
122
- )
123
- self.norm_layers_1.append(LayerNorm(hidden_channels))
124
- self.ffn_layers.append(
125
- FFN(
126
- hidden_channels,
127
- hidden_channels,
128
- filter_channels,
129
- kernel_size,
130
- p_dropout=p_dropout,
131
- causal=True,
132
- )
133
- )
134
- self.norm_layers_2.append(LayerNorm(hidden_channels))
135
-
136
- def forward(self, x, x_mask, h, h_mask):
137
- """
138
- x: decoder input
139
- h: encoder output
140
- """
141
- self_attn_mask = commons.subsequent_mask(x_mask.size(2)).to(
142
- device=x.device, dtype=x.dtype
143
- )
144
- encdec_attn_mask = h_mask.unsqueeze(2) * x_mask.unsqueeze(-1)
145
- x = x * x_mask
146
- for i in range(self.n_layers):
147
- y = self.self_attn_layers[i](x, x, self_attn_mask)
148
- y = self.drop(y)
149
- x = self.norm_layers_0[i](x + y)
150
-
151
- y = self.encdec_attn_layers[i](x, h, encdec_attn_mask)
152
- y = self.drop(y)
153
- x = self.norm_layers_1[i](x + y)
154
-
155
- y = self.ffn_layers[i](x, x_mask)
156
- y = self.drop(y)
157
- x = self.norm_layers_2[i](x + y)
158
- x = x * x_mask
159
- return x
160
-
161
-
162
- class MultiHeadAttention(nn.Module):
163
- def __init__(
164
- self,
165
- channels,
166
- out_channels,
167
- n_heads,
168
- p_dropout=0.0,
169
- window_size=None,
170
- heads_share=True,
171
- block_length=None,
172
- proximal_bias=False,
173
- proximal_init=False,
174
- ):
175
- super().__init__()
176
- assert channels % n_heads == 0
177
-
178
- self.channels = channels
179
- self.out_channels = out_channels
180
- self.n_heads = n_heads
181
- self.p_dropout = p_dropout
182
- self.window_size = window_size
183
- self.heads_share = heads_share
184
- self.block_length = block_length
185
- self.proximal_bias = proximal_bias
186
- self.proximal_init = proximal_init
187
- self.attn = None
188
-
189
- self.k_channels = channels // n_heads
190
- self.conv_q = nn.Conv1d(channels, channels, 1)
191
- self.conv_k = nn.Conv1d(channels, channels, 1)
192
- self.conv_v = nn.Conv1d(channels, channels, 1)
193
- self.conv_o = nn.Conv1d(channels, out_channels, 1)
194
- self.drop = nn.Dropout(p_dropout)
195
-
196
- if window_size is not None:
197
- n_heads_rel = 1 if heads_share else n_heads
198
- rel_stddev = self.k_channels**-0.5
199
- self.emb_rel_k = nn.Parameter(
200
- torch.randn(n_heads_rel, window_size * 2 + 1, self.k_channels)
201
- * rel_stddev
202
- )
203
- self.emb_rel_v = nn.Parameter(
204
- torch.randn(n_heads_rel, window_size * 2 + 1, self.k_channels)
205
- * rel_stddev
206
- )
207
-
208
- nn.init.xavier_uniform_(self.conv_q.weight)
209
- nn.init.xavier_uniform_(self.conv_k.weight)
210
- nn.init.xavier_uniform_(self.conv_v.weight)
211
- if proximal_init:
212
- with torch.no_grad():
213
- self.conv_k.weight.copy_(self.conv_q.weight)
214
- self.conv_k.bias.copy_(self.conv_q.bias)
215
-
216
- def forward(self, x, c, attn_mask=None):
217
- q = self.conv_q(x)
218
- k = self.conv_k(c)
219
- v = self.conv_v(c)
220
-
221
- x, self.attn = self.attention(q, k, v, mask=attn_mask)
222
-
223
- x = self.conv_o(x)
224
- return x
225
-
226
- def attention(self, query, key, value, mask=None):
227
- # reshape [b, d, t] -> [b, n_h, t, d_k]
228
- b, d, t_s, t_t = (*key.size(), query.size(2))
229
- query = query.view(b, self.n_heads, self.k_channels, t_t).transpose(2, 3)
230
- key = key.view(b, self.n_heads, self.k_channels, t_s).transpose(2, 3)
231
- value = value.view(b, self.n_heads, self.k_channels, t_s).transpose(2, 3)
232
-
233
- scores = torch.matmul(query / math.sqrt(self.k_channels), key.transpose(-2, -1))
234
- if self.window_size is not None:
235
- assert (
236
- t_s == t_t
237
- ), "Relative attention is only available for self-attention."
238
- key_relative_embeddings = self._get_relative_embeddings(self.emb_rel_k, t_s)
239
- rel_logits = self._matmul_with_relative_keys(
240
- query / math.sqrt(self.k_channels), key_relative_embeddings
241
- )
242
- scores_local = self._relative_position_to_absolute_position(rel_logits)
243
- scores = scores + scores_local
244
- if self.proximal_bias:
245
- assert t_s == t_t, "Proximal bias is only available for self-attention."
246
- scores = scores + self._attention_bias_proximal(t_s).to(
247
- device=scores.device, dtype=scores.dtype
248
- )
249
- if mask is not None:
250
- scores = scores.masked_fill(mask == 0, -1e4)
251
- if self.block_length is not None:
252
- assert (
253
- t_s == t_t
254
- ), "Local attention is only available for self-attention."
255
- block_mask = (
256
- torch.ones_like(scores)
257
- .triu(-self.block_length)
258
- .tril(self.block_length)
259
- )
260
- scores = scores.masked_fill(block_mask == 0, -1e4)
261
- p_attn = F.softmax(scores, dim=-1) # [b, n_h, t_t, t_s]
262
- p_attn = self.drop(p_attn)
263
- output = torch.matmul(p_attn, value)
264
- if self.window_size is not None:
265
- relative_weights = self._absolute_position_to_relative_position(p_attn)
266
- value_relative_embeddings = self._get_relative_embeddings(
267
- self.emb_rel_v, t_s
268
- )
269
- output = output + self._matmul_with_relative_values(
270
- relative_weights, value_relative_embeddings
271
- )
272
- output = (
273
- output.transpose(2, 3).contiguous().view(b, d, t_t)
274
- ) # [b, n_h, t_t, d_k] -> [b, d, t_t]
275
- return output, p_attn
276
-
277
- def _matmul_with_relative_values(self, x, y):
278
- """
279
- x: [b, h, l, m]
280
- y: [h or 1, m, d]
281
- ret: [b, h, l, d]
282
- """
283
- ret = torch.matmul(x, y.unsqueeze(0))
284
- return ret
285
-
286
- def _matmul_with_relative_keys(self, x, y):
287
- """
288
- x: [b, h, l, d]
289
- y: [h or 1, m, d]
290
- ret: [b, h, l, m]
291
- """
292
- ret = torch.matmul(x, y.unsqueeze(0).transpose(-2, -1))
293
- return ret
294
-
295
- def _get_relative_embeddings(self, relative_embeddings, length):
296
- max_relative_position = 2 * self.window_size + 1
297
- # Pad first before slice to avoid using cond ops.
298
- pad_length = max(length - (self.window_size + 1), 0)
299
- slice_start_position = max((self.window_size + 1) - length, 0)
300
- slice_end_position = slice_start_position + 2 * length - 1
301
- if pad_length > 0:
302
- padded_relative_embeddings = F.pad(
303
- relative_embeddings,
304
- commons.convert_pad_shape([[0, 0], [pad_length, pad_length], [0, 0]]),
305
- )
306
- else:
307
- padded_relative_embeddings = relative_embeddings
308
- used_relative_embeddings = padded_relative_embeddings[
309
- :, slice_start_position:slice_end_position
310
- ]
311
- return used_relative_embeddings
312
-
313
- def _relative_position_to_absolute_position(self, x):
314
- """
315
- x: [b, h, l, 2*l-1]
316
- ret: [b, h, l, l]
317
- """
318
- batch, heads, length, _ = x.size()
319
- # Concat columns of pad to shift from relative to absolute indexing.
320
- x = F.pad(x, commons.convert_pad_shape([[0, 0], [0, 0], [0, 0], [0, 1]]))
321
-
322
- # Concat extra elements so to add up to shape (len+1, 2*len-1).
323
- x_flat = x.view([batch, heads, length * 2 * length])
324
- x_flat = F.pad(
325
- x_flat, commons.convert_pad_shape([[0, 0], [0, 0], [0, length - 1]])
326
- )
327
-
328
- # Reshape and slice out the padded elements.
329
- x_final = x_flat.view([batch, heads, length + 1, 2 * length - 1])[
330
- :, :, :length, length - 1 :
331
- ]
332
- return x_final
333
-
334
- def _absolute_position_to_relative_position(self, x):
335
- """
336
- x: [b, h, l, l]
337
- ret: [b, h, l, 2*l-1]
338
- """
339
- batch, heads, length, _ = x.size()
340
- # padd along column
341
- x = F.pad(
342
- x, commons.convert_pad_shape([[0, 0], [0, 0], [0, 0], [0, length - 1]])
343
- )
344
- x_flat = x.view([batch, heads, length**2 + length * (length - 1)])
345
- # add 0's in the beginning that will skew the elements after reshape
346
- x_flat = F.pad(x_flat, commons.convert_pad_shape([[0, 0], [0, 0], [length, 0]]))
347
- x_final = x_flat.view([batch, heads, length, 2 * length])[:, :, :, 1:]
348
- return x_final
349
-
350
- def _attention_bias_proximal(self, length):
351
- """Bias for self-attention to encourage attention to close positions.
352
- Args:
353
- length: an integer scalar.
354
- Returns:
355
- a Tensor with shape [1, 1, length, length]
356
- """
357
- r = torch.arange(length, dtype=torch.float32)
358
- diff = torch.unsqueeze(r, 0) - torch.unsqueeze(r, 1)
359
- return torch.unsqueeze(torch.unsqueeze(-torch.log1p(torch.abs(diff)), 0), 0)
360
-
361
-
362
- class FFN(nn.Module):
363
- def __init__(
364
- self,
365
- in_channels,
366
- out_channels,
367
- filter_channels,
368
- kernel_size,
369
- p_dropout=0.0,
370
- activation=None,
371
- causal=False,
372
- ):
373
- super().__init__()
374
- self.in_channels = in_channels
375
- self.out_channels = out_channels
376
- self.filter_channels = filter_channels
377
- self.kernel_size = kernel_size
378
- self.p_dropout = p_dropout
379
- self.activation = activation
380
- self.causal = causal
381
-
382
- if causal:
383
- self.padding = self._causal_padding
384
- else:
385
- self.padding = self._same_padding
386
-
387
- self.conv_1 = nn.Conv1d(in_channels, filter_channels, kernel_size)
388
- self.conv_2 = nn.Conv1d(filter_channels, out_channels, kernel_size)
389
- self.drop = nn.Dropout(p_dropout)
390
-
391
- def forward(self, x, x_mask):
392
- x = self.conv_1(self.padding(x * x_mask))
393
- if self.activation == "gelu":
394
- x = x * torch.sigmoid(1.702 * x)
395
- else:
396
- x = torch.relu(x)
397
- x = self.drop(x)
398
- x = self.conv_2(self.padding(x * x_mask))
399
- return x * x_mask
400
-
401
- def _causal_padding(self, x):
402
- if self.kernel_size == 1:
403
- return x
404
- pad_l = self.kernel_size - 1
405
- pad_r = 0
406
- padding = [[0, 0], [0, 0], [pad_l, pad_r]]
407
- x = F.pad(x, commons.convert_pad_shape(padding))
408
- return x
409
-
410
- def _same_padding(self, x):
411
- if self.kernel_size == 1:
412
- return x
413
- pad_l = (self.kernel_size - 1) // 2
414
- pad_r = self.kernel_size // 2
415
- padding = [[0, 0], [0, 0], [pad_l, pad_r]]
416
- x = F.pad(x, commons.convert_pad_shape(padding))
417
- return x
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/DQChoi/gpt-demo/venv/lib/python3.11/site-packages/PIL/ImageEnhance.py DELETED
@@ -1,103 +0,0 @@
1
- #
2
- # The Python Imaging Library.
3
- # $Id$
4
- #
5
- # image enhancement classes
6
- #
7
- # For a background, see "Image Processing By Interpolation and
8
- # Extrapolation", Paul Haeberli and Douglas Voorhies. Available
9
- # at http://www.graficaobscura.com/interp/index.html
10
- #
11
- # History:
12
- # 1996-03-23 fl Created
13
- # 2009-06-16 fl Fixed mean calculation
14
- #
15
- # Copyright (c) Secret Labs AB 1997.
16
- # Copyright (c) Fredrik Lundh 1996.
17
- #
18
- # See the README file for information on usage and redistribution.
19
- #
20
-
21
- from . import Image, ImageFilter, ImageStat
22
-
23
-
24
- class _Enhance:
25
- def enhance(self, factor):
26
- """
27
- Returns an enhanced image.
28
-
29
- :param factor: A floating point value controlling the enhancement.
30
- Factor 1.0 always returns a copy of the original image,
31
- lower factors mean less color (brightness, contrast,
32
- etc), and higher values more. There are no restrictions
33
- on this value.
34
- :rtype: :py:class:`~PIL.Image.Image`
35
- """
36
- return Image.blend(self.degenerate, self.image, factor)
37
-
38
-
39
- class Color(_Enhance):
40
- """Adjust image color balance.
41
-
42
- This class can be used to adjust the colour balance of an image, in
43
- a manner similar to the controls on a colour TV set. An enhancement
44
- factor of 0.0 gives a black and white image. A factor of 1.0 gives
45
- the original image.
46
- """
47
-
48
- def __init__(self, image):
49
- self.image = image
50
- self.intermediate_mode = "L"
51
- if "A" in image.getbands():
52
- self.intermediate_mode = "LA"
53
-
54
- self.degenerate = image.convert(self.intermediate_mode).convert(image.mode)
55
-
56
-
57
- class Contrast(_Enhance):
58
- """Adjust image contrast.
59
-
60
- This class can be used to control the contrast of an image, similar
61
- to the contrast control on a TV set. An enhancement factor of 0.0
62
- gives a solid grey image. A factor of 1.0 gives the original image.
63
- """
64
-
65
- def __init__(self, image):
66
- self.image = image
67
- mean = int(ImageStat.Stat(image.convert("L")).mean[0] + 0.5)
68
- self.degenerate = Image.new("L", image.size, mean).convert(image.mode)
69
-
70
- if "A" in image.getbands():
71
- self.degenerate.putalpha(image.getchannel("A"))
72
-
73
-
74
- class Brightness(_Enhance):
75
- """Adjust image brightness.
76
-
77
- This class can be used to control the brightness of an image. An
78
- enhancement factor of 0.0 gives a black image. A factor of 1.0 gives the
79
- original image.
80
- """
81
-
82
- def __init__(self, image):
83
- self.image = image
84
- self.degenerate = Image.new(image.mode, image.size, 0)
85
-
86
- if "A" in image.getbands():
87
- self.degenerate.putalpha(image.getchannel("A"))
88
-
89
-
90
- class Sharpness(_Enhance):
91
- """Adjust image sharpness.
92
-
93
- This class can be used to adjust the sharpness of an image. An
94
- enhancement factor of 0.0 gives a blurred image, a factor of 1.0 gives the
95
- original image, and a factor of 2.0 gives a sharpened image.
96
- """
97
-
98
- def __init__(self, image):
99
- self.image = image
100
- self.degenerate = image.filter(ImageFilter.SMOOTH)
101
-
102
- if "A" in image.getbands():
103
- self.degenerate.putalpha(image.getchannel("A"))
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/DQChoi/gpt-demo/venv/lib/python3.11/site-packages/dotenv/version.py DELETED
@@ -1 +0,0 @@
1
- __version__ = "1.0.0"
 
 
spaces/DQChoi/gpt-demo/venv/lib/python3.11/site-packages/fontTools/ttLib/tables/S_V_G_.py DELETED
@@ -1,215 +0,0 @@
1
- """Compiles/decompiles SVG table.
2
-
3
- https://docs.microsoft.com/en-us/typography/opentype/spec/svg
4
-
5
- The XML format is:
6
-
7
- .. code-block:: xml
8
-
9
- <SVG>
10
- <svgDoc endGlyphID="1" startGlyphID="1">
11
- <![CDATA[ <complete SVG doc> ]]
12
- </svgDoc>
13
- ...
14
- <svgDoc endGlyphID="n" startGlyphID="m">
15
- <![CDATA[ <complete SVG doc> ]]
16
- </svgDoc>
17
- </SVG>
18
- """
19
-
20
- from fontTools.misc.textTools import bytesjoin, safeEval, strjoin, tobytes, tostr
21
- from fontTools.misc import sstruct
22
- from . import DefaultTable
23
- from collections.abc import Sequence
24
- from dataclasses import dataclass, astuple
25
- from io import BytesIO
26
- import struct
27
- import logging
28
-
29
-
30
- log = logging.getLogger(__name__)
31
-
32
-
33
- SVG_format_0 = """
34
- > # big endian
35
- version: H
36
- offsetToSVGDocIndex: L
37
- reserved: L
38
- """
39
-
40
- SVG_format_0Size = sstruct.calcsize(SVG_format_0)
41
-
42
- doc_index_entry_format_0 = """
43
- > # big endian
44
- startGlyphID: H
45
- endGlyphID: H
46
- svgDocOffset: L
47
- svgDocLength: L
48
- """
49
-
50
- doc_index_entry_format_0Size = sstruct.calcsize(doc_index_entry_format_0)
51
-
52
-
53
- class table_S_V_G_(DefaultTable.DefaultTable):
54
- def decompile(self, data, ttFont):
55
- self.docList = []
56
- # Version 0 is the standardized version of the table; and current.
57
- # https://www.microsoft.com/typography/otspec/svg.htm
58
- sstruct.unpack(SVG_format_0, data[:SVG_format_0Size], self)
59
- if self.version != 0:
60
- log.warning(
61
- "Unknown SVG table version '%s'. Decompiling as version 0.",
62
- self.version,
63
- )
64
- # read in SVG Documents Index
65
- # data starts with the first entry of the entry list.
66
- pos = subTableStart = self.offsetToSVGDocIndex
67
- self.numEntries = struct.unpack(">H", data[pos : pos + 2])[0]
68
- pos += 2
69
- if self.numEntries > 0:
70
- data2 = data[pos:]
71
- entries = []
72
- for i in range(self.numEntries):
73
- record_data = data2[
74
- i
75
- * doc_index_entry_format_0Size : (i + 1)
76
- * doc_index_entry_format_0Size
77
- ]
78
- docIndexEntry = sstruct.unpack(
79
- doc_index_entry_format_0, record_data, DocumentIndexEntry()
80
- )
81
- entries.append(docIndexEntry)
82
-
83
- for entry in entries:
84
- start = entry.svgDocOffset + subTableStart
85
- end = start + entry.svgDocLength
86
- doc = data[start:end]
87
- compressed = False
88
- if doc.startswith(b"\x1f\x8b"):
89
- import gzip
90
-
91
- bytesIO = BytesIO(doc)
92
- with gzip.GzipFile(None, "r", fileobj=bytesIO) as gunzipper:
93
- doc = gunzipper.read()
94
- del bytesIO
95
- compressed = True
96
- doc = tostr(doc, "utf_8")
97
- self.docList.append(
98
- SVGDocument(doc, entry.startGlyphID, entry.endGlyphID, compressed)
99
- )
100
-
101
- def compile(self, ttFont):
102
- version = 0
103
- offsetToSVGDocIndex = (
104
- SVG_format_0Size # I start the SVGDocIndex right after the header.
105
- )
106
- # get SGVDoc info.
107
- docList = []
108
- entryList = []
109
- numEntries = len(self.docList)
110
- datum = struct.pack(">H", numEntries)
111
- entryList.append(datum)
112
- curOffset = len(datum) + doc_index_entry_format_0Size * numEntries
113
- seenDocs = {}
114
- allCompressed = getattr(self, "compressed", False)
115
- for i, doc in enumerate(self.docList):
116
- if isinstance(doc, (list, tuple)):
117
- doc = SVGDocument(*doc)
118
- self.docList[i] = doc
119
- docBytes = tobytes(doc.data, encoding="utf_8")
120
- if (allCompressed or doc.compressed) and not docBytes.startswith(
121
- b"\x1f\x8b"
122
- ):
123
- import gzip
124
-
125
- bytesIO = BytesIO()
126
- # mtime=0 strips the useless timestamp and makes gzip output reproducible;
127
- # equivalent to `gzip -n`
128
- with gzip.GzipFile(None, "w", fileobj=bytesIO, mtime=0) as gzipper:
129
- gzipper.write(docBytes)
130
- gzipped = bytesIO.getvalue()
131
- if len(gzipped) < len(docBytes):
132
- docBytes = gzipped
133
- del gzipped, bytesIO
134
- docLength = len(docBytes)
135
- if docBytes in seenDocs:
136
- docOffset = seenDocs[docBytes]
137
- else:
138
- docOffset = curOffset
139
- curOffset += docLength
140
- seenDocs[docBytes] = docOffset
141
- docList.append(docBytes)
142
- entry = struct.pack(
143
- ">HHLL", doc.startGlyphID, doc.endGlyphID, docOffset, docLength
144
- )
145
- entryList.append(entry)
146
- entryList.extend(docList)
147
- svgDocData = bytesjoin(entryList)
148
-
149
- reserved = 0
150
- header = struct.pack(">HLL", version, offsetToSVGDocIndex, reserved)
151
- data = [header, svgDocData]
152
- data = bytesjoin(data)
153
- return data
154
-
155
- def toXML(self, writer, ttFont):
156
- for i, doc in enumerate(self.docList):
157
- if isinstance(doc, (list, tuple)):
158
- doc = SVGDocument(*doc)
159
- self.docList[i] = doc
160
- attrs = {"startGlyphID": doc.startGlyphID, "endGlyphID": doc.endGlyphID}
161
- if doc.compressed:
162
- attrs["compressed"] = 1
163
- writer.begintag("svgDoc", **attrs)
164
- writer.newline()
165
- writer.writecdata(doc.data)
166
- writer.newline()
167
- writer.endtag("svgDoc")
168
- writer.newline()
169
-
170
- def fromXML(self, name, attrs, content, ttFont):
171
- if name == "svgDoc":
172
- if not hasattr(self, "docList"):
173
- self.docList = []
174
- doc = strjoin(content)
175
- doc = doc.strip()
176
- startGID = int(attrs["startGlyphID"])
177
- endGID = int(attrs["endGlyphID"])
178
- compressed = bool(safeEval(attrs.get("compressed", "0")))
179
- self.docList.append(SVGDocument(doc, startGID, endGID, compressed))
180
- else:
181
- log.warning("Unknown %s %s", name, content)
182
-
183
-
184
- class DocumentIndexEntry(object):
185
- def __init__(self):
186
- self.startGlyphID = None # USHORT
187
- self.endGlyphID = None # USHORT
188
- self.svgDocOffset = None # ULONG
189
- self.svgDocLength = None # ULONG
190
-
191
- def __repr__(self):
192
- return (
193
- "startGlyphID: %s, endGlyphID: %s, svgDocOffset: %s, svgDocLength: %s"
194
- % (self.startGlyphID, self.endGlyphID, self.svgDocOffset, self.svgDocLength)
195
- )
196
-
197
-
198
- @dataclass
199
- class SVGDocument(Sequence):
200
- data: str
201
- startGlyphID: int
202
- endGlyphID: int
203
- compressed: bool = False
204
-
205
- # Previously, the SVG table's docList attribute contained a lists of 3 items:
206
- # [doc, startGlyphID, endGlyphID]; later, we added a `compressed` attribute.
207
- # For backward compatibility with code that depends of them being sequences of
208
- # fixed length=3, we subclass the Sequence abstract base class and pretend only
209
- # the first three items are present. 'compressed' is only accessible via named
210
- # attribute lookup like regular dataclasses: i.e. `doc.compressed`, not `doc[3]`
211
- def __getitem__(self, index):
212
- return astuple(self)[:3][index]
213
-
214
- def __len__(self):
215
- return 3
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/DQChoi/gpt-demo/venv/lib/python3.11/site-packages/fsspec/asyn.py DELETED
@@ -1,1029 +0,0 @@
1
- import asyncio
2
- import asyncio.events
3
- import functools
4
- import inspect
5
- import io
6
- import numbers
7
- import os
8
- import re
9
- import threading
10
- from contextlib import contextmanager
11
- from glob import has_magic
12
- from typing import TYPE_CHECKING, Iterable
13
-
14
- from .callbacks import _DEFAULT_CALLBACK
15
- from .exceptions import FSTimeoutError
16
- from .implementations.local import (
17
- LocalFileSystem,
18
- make_path_posix,
19
- trailing_sep,
20
- trailing_sep_maybe_asterisk,
21
- )
22
- from .spec import AbstractBufferedFile, AbstractFileSystem
23
- from .utils import is_exception, other_paths
24
-
25
- private = re.compile("_[^_]")
26
- iothread = [None] # dedicated fsspec IO thread
27
- loop = [None] # global event loop for any non-async instance
28
- _lock = None # global lock placeholder
29
- get_running_loop = asyncio.get_running_loop
30
-
31
-
32
- def get_lock():
33
- """Allocate or return a threading lock.
34
-
35
- The lock is allocated on first use to allow setting one lock per forked process.
36
- """
37
- global _lock
38
- if not _lock:
39
- _lock = threading.Lock()
40
- return _lock
41
-
42
-
43
- def reset_lock():
44
- """Reset the global lock.
45
-
46
- This should be called only on the init of a forked process to reset the lock to
47
- None, enabling the new forked process to get a new lock.
48
- """
49
- global _lock
50
-
51
- iothread[0] = None
52
- loop[0] = None
53
- _lock = None
54
-
55
-
56
- async def _runner(event, coro, result, timeout=None):
57
- timeout = timeout if timeout else None # convert 0 or 0.0 to None
58
- if timeout is not None:
59
- coro = asyncio.wait_for(coro, timeout=timeout)
60
- try:
61
- result[0] = await coro
62
- except Exception as ex:
63
- result[0] = ex
64
- finally:
65
- event.set()
66
-
67
-
68
- def sync(loop, func, *args, timeout=None, **kwargs):
69
- """
70
- Make loop run coroutine until it returns. Runs in other thread
71
-
72
- Examples
73
- --------
74
- >>> fsspec.asyn.sync(fsspec.asyn.get_loop(), func, *args,
75
- timeout=timeout, **kwargs)
76
- """
77
- timeout = timeout if timeout else None # convert 0 or 0.0 to None
78
- # NB: if the loop is not running *yet*, it is OK to submit work
79
- # and we will wait for it
80
- if loop is None or loop.is_closed():
81
- raise RuntimeError("Loop is not running")
82
- try:
83
- loop0 = asyncio.events.get_running_loop()
84
- if loop0 is loop:
85
- raise NotImplementedError("Calling sync() from within a running loop")
86
- except RuntimeError:
87
- pass
88
- coro = func(*args, **kwargs)
89
- result = [None]
90
- event = threading.Event()
91
- asyncio.run_coroutine_threadsafe(_runner(event, coro, result, timeout), loop)
92
- while True:
93
- # this loops allows thread to get interrupted
94
- if event.wait(1):
95
- break
96
- if timeout is not None:
97
- timeout -= 1
98
- if timeout < 0:
99
- raise FSTimeoutError
100
-
101
- return_result = result[0]
102
- if isinstance(return_result, asyncio.TimeoutError):
103
- # suppress asyncio.TimeoutError, raise FSTimeoutError
104
- raise FSTimeoutError from return_result
105
- elif isinstance(return_result, BaseException):
106
- raise return_result
107
- else:
108
- return return_result
109
-
110
-
111
- def sync_wrapper(func, obj=None):
112
- """Given a function, make so can be called in async or blocking contexts
113
-
114
- Leave obj=None if defining within a class. Pass the instance if attaching
115
- as an attribute of the instance.
116
- """
117
-
118
- @functools.wraps(func)
119
- def wrapper(*args, **kwargs):
120
- self = obj or args[0]
121
- return sync(self.loop, func, *args, **kwargs)
122
-
123
- return wrapper
124
-
125
-
126
- @contextmanager
127
- def _selector_policy():
128
- original_policy = asyncio.get_event_loop_policy()
129
- try:
130
- if os.name == "nt" and hasattr(asyncio, "WindowsSelectorEventLoopPolicy"):
131
- asyncio.set_event_loop_policy(asyncio.WindowsSelectorEventLoopPolicy())
132
-
133
- yield
134
- finally:
135
- asyncio.set_event_loop_policy(original_policy)
136
-
137
-
138
- def get_loop():
139
- """Create or return the default fsspec IO loop
140
-
141
- The loop will be running on a separate thread.
142
- """
143
- if loop[0] is None:
144
- with get_lock():
145
- # repeat the check just in case the loop got filled between the
146
- # previous two calls from another thread
147
- if loop[0] is None:
148
- with _selector_policy():
149
- loop[0] = asyncio.new_event_loop()
150
- th = threading.Thread(target=loop[0].run_forever, name="fsspecIO")
151
- th.daemon = True
152
- th.start()
153
- iothread[0] = th
154
- return loop[0]
155
-
156
-
157
- if TYPE_CHECKING:
158
- import resource
159
-
160
- ResourceError = resource.error
161
- else:
162
- try:
163
- import resource
164
- except ImportError:
165
- resource = None
166
- ResourceError = OSError
167
- else:
168
- ResourceError = getattr(resource, "error", OSError)
169
-
170
- _DEFAULT_BATCH_SIZE = 128
171
- _NOFILES_DEFAULT_BATCH_SIZE = 1280
172
-
173
-
174
- def _get_batch_size(nofiles=False):
175
- from fsspec.config import conf
176
-
177
- if nofiles:
178
- if "nofiles_gather_batch_size" in conf:
179
- return conf["nofiles_gather_batch_size"]
180
- else:
181
- if "gather_batch_size" in conf:
182
- return conf["gather_batch_size"]
183
- if nofiles:
184
- return _NOFILES_DEFAULT_BATCH_SIZE
185
- if resource is None:
186
- return _DEFAULT_BATCH_SIZE
187
-
188
- try:
189
- soft_limit, _ = resource.getrlimit(resource.RLIMIT_NOFILE)
190
- except (ImportError, ValueError, ResourceError):
191
- return _DEFAULT_BATCH_SIZE
192
-
193
- if soft_limit == resource.RLIM_INFINITY:
194
- return -1
195
- else:
196
- return soft_limit // 8
197
-
198
-
199
- def running_async() -> bool:
200
- """Being executed by an event loop?"""
201
- try:
202
- asyncio.get_running_loop()
203
- return True
204
- except RuntimeError:
205
- return False
206
-
207
-
208
- async def _run_coros_in_chunks(
209
- coros,
210
- batch_size=None,
211
- callback=_DEFAULT_CALLBACK,
212
- timeout=None,
213
- return_exceptions=False,
214
- nofiles=False,
215
- ):
216
- """Run the given coroutines in chunks.
217
-
218
- Parameters
219
- ----------
220
- coros: list of coroutines to run
221
- batch_size: int or None
222
- Number of coroutines to submit/wait on simultaneously.
223
- If -1, then it will not be any throttling. If
224
- None, it will be inferred from _get_batch_size()
225
- callback: fsspec.callbacks.Callback instance
226
- Gets a relative_update when each coroutine completes
227
- timeout: number or None
228
- If given, each coroutine times out after this time. Note that, since
229
- there are multiple batches, the total run time of this function will in
230
- general be longer
231
- return_exceptions: bool
232
- Same meaning as in asyncio.gather
233
- nofiles: bool
234
- If inferring the batch_size, does this operation involve local files?
235
- If yes, you normally expect smaller batches.
236
- """
237
-
238
- if batch_size is None:
239
- batch_size = _get_batch_size(nofiles=nofiles)
240
-
241
- if batch_size == -1:
242
- batch_size = len(coros)
243
-
244
- assert batch_size > 0
245
- results = []
246
- for start in range(0, len(coros), batch_size):
247
- chunk = [
248
- asyncio.Task(asyncio.wait_for(c, timeout=timeout))
249
- for c in coros[start : start + batch_size]
250
- ]
251
- if callback is not _DEFAULT_CALLBACK:
252
- [
253
- t.add_done_callback(lambda *_, **__: callback.relative_update(1))
254
- for t in chunk
255
- ]
256
- results.extend(
257
- await asyncio.gather(*chunk, return_exceptions=return_exceptions),
258
- )
259
- return results
260
-
261
-
262
- # these methods should be implemented as async by any async-able backend
263
- async_methods = [
264
- "_ls",
265
- "_cat_file",
266
- "_get_file",
267
- "_put_file",
268
- "_rm_file",
269
- "_cp_file",
270
- "_pipe_file",
271
- "_expand_path",
272
- "_info",
273
- "_isfile",
274
- "_isdir",
275
- "_exists",
276
- "_walk",
277
- "_glob",
278
- "_find",
279
- "_du",
280
- "_size",
281
- "_mkdir",
282
- "_makedirs",
283
- ]
284
-
285
-
286
- class AsyncFileSystem(AbstractFileSystem):
287
- """Async file operations, default implementations
288
-
289
- Passes bulk operations to asyncio.gather for concurrent operation.
290
-
291
- Implementations that have concurrent batch operations and/or async methods
292
- should inherit from this class instead of AbstractFileSystem. Docstrings are
293
- copied from the un-underscored method in AbstractFileSystem, if not given.
294
- """
295
-
296
- # note that methods do not have docstring here; they will be copied
297
- # for _* methods and inferred for overridden methods.
298
-
299
- async_impl = True
300
- mirror_sync_methods = True
301
- disable_throttling = False
302
-
303
- def __init__(self, *args, asynchronous=False, loop=None, batch_size=None, **kwargs):
304
- self.asynchronous = asynchronous
305
- self._pid = os.getpid()
306
- if not asynchronous:
307
- self._loop = loop or get_loop()
308
- else:
309
- self._loop = None
310
- self.batch_size = batch_size
311
- super().__init__(*args, **kwargs)
312
-
313
- @property
314
- def loop(self):
315
- if self._pid != os.getpid():
316
- raise RuntimeError("This class is not fork-safe")
317
- return self._loop
318
-
319
- async def _rm_file(self, path, **kwargs):
320
- raise NotImplementedError
321
-
322
- async def _rm(self, path, recursive=False, batch_size=None, **kwargs):
323
- # TODO: implement on_error
324
- batch_size = batch_size or self.batch_size
325
- path = await self._expand_path(path, recursive=recursive)
326
- return await _run_coros_in_chunks(
327
- [self._rm_file(p, **kwargs) for p in reversed(path)],
328
- batch_size=batch_size,
329
- nofiles=True,
330
- )
331
-
332
- async def _cp_file(self, path1, path2, **kwargs):
333
- raise NotImplementedError
334
-
335
- async def _copy(
336
- self,
337
- path1,
338
- path2,
339
- recursive=False,
340
- on_error=None,
341
- maxdepth=None,
342
- batch_size=None,
343
- **kwargs,
344
- ):
345
- if on_error is None and recursive:
346
- on_error = "ignore"
347
- elif on_error is None:
348
- on_error = "raise"
349
-
350
- source_is_str = isinstance(path1, str)
351
- paths = await self._expand_path(path1, maxdepth=maxdepth, recursive=recursive)
352
- if source_is_str and (not recursive or maxdepth is not None):
353
- # Non-recursive glob does not copy directories
354
- paths = [p for p in paths if not (trailing_sep(p) or await self._isdir(p))]
355
- if not paths:
356
- return
357
-
358
- isdir = isinstance(path2, str) and (
359
- trailing_sep(path2) or await self._isdir(path2)
360
- )
361
- path2 = other_paths(
362
- paths,
363
- path2,
364
- exists=isdir and source_is_str and not trailing_sep_maybe_asterisk(path1),
365
- is_dir=isdir,
366
- flatten=not source_is_str,
367
- )
368
- batch_size = batch_size or self.batch_size
369
- coros = [self._cp_file(p1, p2, **kwargs) for p1, p2 in zip(paths, path2)]
370
- result = await _run_coros_in_chunks(
371
- coros, batch_size=batch_size, return_exceptions=True, nofiles=True
372
- )
373
-
374
- for ex in filter(is_exception, result):
375
- if on_error == "ignore" and isinstance(ex, FileNotFoundError):
376
- continue
377
- raise ex
378
-
379
- async def _pipe_file(self, path, value, **kwargs):
380
- raise NotImplementedError
381
-
382
- async def _pipe(self, path, value=None, batch_size=None, **kwargs):
383
- if isinstance(path, str):
384
- path = {path: value}
385
- batch_size = batch_size or self.batch_size
386
- return await _run_coros_in_chunks(
387
- [self._pipe_file(k, v, **kwargs) for k, v in path.items()],
388
- batch_size=batch_size,
389
- nofiles=True,
390
- )
391
-
392
- async def _process_limits(self, url, start, end):
393
- """Helper for "Range"-based _cat_file"""
394
- size = None
395
- suff = False
396
- if start is not None and start < 0:
397
- # if start is negative and end None, end is the "suffix length"
398
- if end is None:
399
- end = -start
400
- start = ""
401
- suff = True
402
- else:
403
- size = size or (await self._info(url))["size"]
404
- start = size + start
405
- elif start is None:
406
- start = 0
407
- if not suff:
408
- if end is not None and end < 0:
409
- if start is not None:
410
- size = size or (await self._info(url))["size"]
411
- end = size + end
412
- elif end is None:
413
- end = ""
414
- if isinstance(end, numbers.Integral):
415
- end -= 1 # bytes range is inclusive
416
- return "bytes=%s-%s" % (start, end)
417
-
418
- async def _cat_file(self, path, start=None, end=None, **kwargs):
419
- raise NotImplementedError
420
-
421
- async def _cat(
422
- self, path, recursive=False, on_error="raise", batch_size=None, **kwargs
423
- ):
424
- paths = await self._expand_path(path, recursive=recursive)
425
- coros = [self._cat_file(path, **kwargs) for path in paths]
426
- batch_size = batch_size or self.batch_size
427
- out = await _run_coros_in_chunks(
428
- coros, batch_size=batch_size, nofiles=True, return_exceptions=True
429
- )
430
- if on_error == "raise":
431
- ex = next(filter(is_exception, out), False)
432
- if ex:
433
- raise ex
434
- if (
435
- len(paths) > 1
436
- or isinstance(path, list)
437
- or paths[0] != self._strip_protocol(path)
438
- ):
439
- return {
440
- k: v
441
- for k, v in zip(paths, out)
442
- if on_error != "omit" or not is_exception(v)
443
- }
444
- else:
445
- return out[0]
446
-
447
- async def _cat_ranges(
448
- self,
449
- paths,
450
- starts,
451
- ends,
452
- max_gap=None,
453
- batch_size=None,
454
- on_error="return",
455
- **kwargs,
456
- ):
457
- # TODO: on_error
458
- if max_gap is not None:
459
- # use utils.merge_offset_ranges
460
- raise NotImplementedError
461
- if not isinstance(paths, list):
462
- raise TypeError
463
- if not isinstance(starts, Iterable):
464
- starts = [starts] * len(paths)
465
- if not isinstance(ends, Iterable):
466
- ends = [starts] * len(paths)
467
- if len(starts) != len(paths) or len(ends) != len(paths):
468
- raise ValueError
469
- coros = [
470
- self._cat_file(p, start=s, end=e, **kwargs)
471
- for p, s, e in zip(paths, starts, ends)
472
- ]
473
- batch_size = batch_size or self.batch_size
474
- return await _run_coros_in_chunks(
475
- coros, batch_size=batch_size, nofiles=True, return_exceptions=True
476
- )
477
-
478
- async def _put_file(self, lpath, rpath, **kwargs):
479
- raise NotImplementedError
480
-
481
- async def _put(
482
- self,
483
- lpath,
484
- rpath,
485
- recursive=False,
486
- callback=_DEFAULT_CALLBACK,
487
- batch_size=None,
488
- maxdepth=None,
489
- **kwargs,
490
- ):
491
- """Copy file(s) from local.
492
-
493
- Copies a specific file or tree of files (if recursive=True). If rpath
494
- ends with a "/", it will be assumed to be a directory, and target files
495
- will go within.
496
-
497
- The put_file method will be called concurrently on a batch of files. The
498
- batch_size option can configure the amount of futures that can be executed
499
- at the same time. If it is -1, then all the files will be uploaded concurrently.
500
- The default can be set for this instance by passing "batch_size" in the
501
- constructor, or for all instances by setting the "gather_batch_size" key
502
- in ``fsspec.config.conf``, falling back to 1/8th of the system limit .
503
- """
504
- source_is_str = isinstance(lpath, str)
505
- if source_is_str:
506
- lpath = make_path_posix(lpath)
507
- fs = LocalFileSystem()
508
- lpaths = fs.expand_path(lpath, recursive=recursive, maxdepth=maxdepth)
509
- if source_is_str and (not recursive or maxdepth is not None):
510
- # Non-recursive glob does not copy directories
511
- lpaths = [p for p in lpaths if not (trailing_sep(p) or fs.isdir(p))]
512
- if not lpaths:
513
- return
514
-
515
- isdir = isinstance(rpath, str) and (
516
- trailing_sep(rpath) or await self._isdir(rpath)
517
- )
518
- rpath = self._strip_protocol(rpath)
519
- rpaths = other_paths(
520
- lpaths,
521
- rpath,
522
- exists=isdir and source_is_str and not trailing_sep_maybe_asterisk(lpath),
523
- is_dir=isdir,
524
- flatten=not source_is_str,
525
- )
526
-
527
- is_dir = {l: os.path.isdir(l) for l in lpaths}
528
- rdirs = [r for l, r in zip(lpaths, rpaths) if is_dir[l]]
529
- file_pairs = [(l, r) for l, r in zip(lpaths, rpaths) if not is_dir[l]]
530
-
531
- await asyncio.gather(*[self._makedirs(d, exist_ok=True) for d in rdirs])
532
- batch_size = batch_size or self.batch_size
533
-
534
- coros = []
535
- callback.set_size(len(file_pairs))
536
- for lfile, rfile in file_pairs:
537
- callback.branch(lfile, rfile, kwargs)
538
- coros.append(self._put_file(lfile, rfile, **kwargs))
539
-
540
- return await _run_coros_in_chunks(
541
- coros, batch_size=batch_size, callback=callback
542
- )
543
-
544
- async def _get_file(self, rpath, lpath, **kwargs):
545
- raise NotImplementedError
546
-
547
- async def _get(
548
- self,
549
- rpath,
550
- lpath,
551
- recursive=False,
552
- callback=_DEFAULT_CALLBACK,
553
- maxdepth=None,
554
- **kwargs,
555
- ):
556
- """Copy file(s) to local.
557
-
558
- Copies a specific file or tree of files (if recursive=True). If lpath
559
- ends with a "/", it will be assumed to be a directory, and target files
560
- will go within. Can submit a list of paths, which may be glob-patterns
561
- and will be expanded.
562
-
563
- The get_file method will be called concurrently on a batch of files. The
564
- batch_size option can configure the amount of futures that can be executed
565
- at the same time. If it is -1, then all the files will be uploaded concurrently.
566
- The default can be set for this instance by passing "batch_size" in the
567
- constructor, or for all instances by setting the "gather_batch_size" key
568
- in ``fsspec.config.conf``, falling back to 1/8th of the system limit .
569
- """
570
- source_is_str = isinstance(rpath, str)
571
- # First check for rpath trailing slash as _strip_protocol removes it.
572
- source_not_trailing_sep = source_is_str and not trailing_sep_maybe_asterisk(
573
- rpath
574
- )
575
- rpath = self._strip_protocol(rpath)
576
- rpaths = await self._expand_path(rpath, recursive=recursive)
577
- if source_is_str and (not recursive or maxdepth is not None):
578
- # Non-recursive glob does not copy directories
579
- rpaths = [
580
- p for p in rpaths if not (trailing_sep(p) or await self._isdir(p))
581
- ]
582
- if not rpaths:
583
- return
584
-
585
- lpath = make_path_posix(lpath)
586
- isdir = isinstance(lpath, str) and (
587
- trailing_sep(lpath) or LocalFileSystem().isdir(lpath)
588
- )
589
- lpaths = other_paths(
590
- rpaths,
591
- lpath,
592
- exists=isdir and source_not_trailing_sep,
593
- is_dir=isdir,
594
- flatten=not source_is_str,
595
- )
596
- [os.makedirs(os.path.dirname(lp), exist_ok=True) for lp in lpaths]
597
- batch_size = kwargs.pop("batch_size", self.batch_size)
598
-
599
- coros = []
600
- callback.set_size(len(lpaths))
601
- for lpath, rpath in zip(lpaths, rpaths):
602
- callback.branch(rpath, lpath, kwargs)
603
- coros.append(self._get_file(rpath, lpath, **kwargs))
604
- return await _run_coros_in_chunks(
605
- coros, batch_size=batch_size, callback=callback
606
- )
607
-
608
- async def _isfile(self, path):
609
- try:
610
- return (await self._info(path))["type"] == "file"
611
- except: # noqa: E722
612
- return False
613
-
614
- async def _isdir(self, path):
615
- try:
616
- return (await self._info(path))["type"] == "directory"
617
- except OSError:
618
- return False
619
-
620
- async def _size(self, path):
621
- return (await self._info(path)).get("size", None)
622
-
623
- async def _sizes(self, paths, batch_size=None):
624
- batch_size = batch_size or self.batch_size
625
- return await _run_coros_in_chunks(
626
- [self._size(p) for p in paths], batch_size=batch_size
627
- )
628
-
629
- async def _exists(self, path):
630
- try:
631
- await self._info(path)
632
- return True
633
- except FileNotFoundError:
634
- return False
635
-
636
- async def _info(self, path, **kwargs):
637
- raise NotImplementedError
638
-
639
- async def _ls(self, path, detail=True, **kwargs):
640
- raise NotImplementedError
641
-
642
- async def _walk(self, path, maxdepth=None, **kwargs):
643
- if maxdepth is not None and maxdepth < 1:
644
- raise ValueError("maxdepth must be at least 1")
645
-
646
- path = self._strip_protocol(path)
647
- full_dirs = {}
648
- dirs = {}
649
- files = {}
650
-
651
- detail = kwargs.pop("detail", False)
652
- try:
653
- listing = await self._ls(path, detail=True, **kwargs)
654
- except (FileNotFoundError, OSError):
655
- if detail:
656
- yield path, {}, {}
657
- else:
658
- yield path, [], []
659
- return
660
-
661
- for info in listing:
662
- # each info name must be at least [path]/part , but here
663
- # we check also for names like [path]/part/
664
- pathname = info["name"].rstrip("/")
665
- name = pathname.rsplit("/", 1)[-1]
666
- if info["type"] == "directory" and pathname != path:
667
- # do not include "self" path
668
- full_dirs[name] = pathname
669
- dirs[name] = info
670
- elif pathname == path:
671
- # file-like with same name as give path
672
- files[""] = info
673
- else:
674
- files[name] = info
675
-
676
- if detail:
677
- yield path, dirs, files
678
- else:
679
- yield path, list(dirs), list(files)
680
-
681
- if maxdepth is not None:
682
- maxdepth -= 1
683
- if maxdepth < 1:
684
- return
685
-
686
- for d in dirs:
687
- async for _ in self._walk(
688
- full_dirs[d], maxdepth=maxdepth, detail=detail, **kwargs
689
- ):
690
- yield _
691
-
692
- async def _glob(self, path, **kwargs):
693
- import re
694
-
695
- ends = path.endswith("/")
696
- path = self._strip_protocol(path)
697
- indstar = path.find("*") if path.find("*") >= 0 else len(path)
698
- indques = path.find("?") if path.find("?") >= 0 else len(path)
699
- indbrace = path.find("[") if path.find("[") >= 0 else len(path)
700
-
701
- ind = min(indstar, indques, indbrace)
702
-
703
- detail = kwargs.pop("detail", False)
704
-
705
- if not has_magic(path):
706
- root = path
707
- depth = 1
708
- if ends:
709
- path += "/*"
710
- elif await self._exists(path):
711
- if not detail:
712
- return [path]
713
- else:
714
- return {path: await self._info(path)}
715
- else:
716
- if not detail:
717
- return [] # glob of non-existent returns empty
718
- else:
719
- return {}
720
- elif "/" in path[:ind]:
721
- ind2 = path[:ind].rindex("/")
722
- root = path[: ind2 + 1]
723
- depth = None if "**" in path else path[ind2 + 1 :].count("/") + 1
724
- else:
725
- root = ""
726
- depth = None if "**" in path else path[ind + 1 :].count("/") + 1
727
-
728
- allpaths = await self._find(
729
- root, maxdepth=depth, withdirs=True, detail=True, **kwargs
730
- )
731
- # Escape characters special to python regex, leaving our supported
732
- # special characters in place.
733
- # See https://www.gnu.org/software/bash/manual/html_node/Pattern-Matching.html
734
- # for shell globbing details.
735
- pattern = (
736
- "^"
737
- + (
738
- path.replace("\\", r"\\")
739
- .replace(".", r"\.")
740
- .replace("+", r"\+")
741
- .replace("//", "/")
742
- .replace("(", r"\(")
743
- .replace(")", r"\)")
744
- .replace("|", r"\|")
745
- .replace("^", r"\^")
746
- .replace("$", r"\$")
747
- .replace("{", r"\{")
748
- .replace("}", r"\}")
749
- .rstrip("/")
750
- .replace("?", ".")
751
- )
752
- + "$"
753
- )
754
- pattern = re.sub("[*]{2}", "=PLACEHOLDER=", pattern)
755
- pattern = re.sub("[*]", "[^/]*", pattern)
756
- pattern = re.compile(pattern.replace("=PLACEHOLDER=", ".*"))
757
- out = {
758
- p: allpaths[p]
759
- for p in sorted(allpaths)
760
- if pattern.match(p.replace("//", "/").rstrip("/"))
761
- }
762
- if detail:
763
- return out
764
- else:
765
- return list(out)
766
-
767
- async def _du(self, path, total=True, maxdepth=None, **kwargs):
768
- sizes = {}
769
- # async for?
770
- for f in await self._find(path, maxdepth=maxdepth, **kwargs):
771
- info = await self._info(f)
772
- sizes[info["name"]] = info["size"]
773
- if total:
774
- return sum(sizes.values())
775
- else:
776
- return sizes
777
-
778
- async def _find(self, path, maxdepth=None, withdirs=False, **kwargs):
779
- path = self._strip_protocol(path)
780
- out = dict()
781
- detail = kwargs.pop("detail", False)
782
- # async for?
783
- async for _, dirs, files in self._walk(path, maxdepth, detail=True, **kwargs):
784
- if withdirs:
785
- files.update(dirs)
786
- out.update({info["name"]: info for name, info in files.items()})
787
- if not out and (await self._isfile(path)):
788
- # walk works on directories, but find should also return [path]
789
- # when path happens to be a file
790
- out[path] = {}
791
- names = sorted(out)
792
- if not detail:
793
- return names
794
- else:
795
- return {name: out[name] for name in names}
796
-
797
- async def _expand_path(self, path, recursive=False, maxdepth=None):
798
- if maxdepth is not None and maxdepth < 1:
799
- raise ValueError("maxdepth must be at least 1")
800
-
801
- if isinstance(path, str):
802
- out = await self._expand_path([path], recursive, maxdepth)
803
- else:
804
- out = set()
805
- path = [self._strip_protocol(p) for p in path]
806
- for p in path: # can gather here
807
- if has_magic(p):
808
- bit = set(await self._glob(p))
809
- out |= bit
810
- if recursive:
811
- # glob call above expanded one depth so if maxdepth is defined
812
- # then decrement it in expand_path call below. If it is zero
813
- # after decrementing then avoid expand_path call.
814
- if maxdepth is not None and maxdepth <= 1:
815
- continue
816
- out |= set(
817
- await self._expand_path(
818
- list(bit),
819
- recursive=recursive,
820
- maxdepth=maxdepth - 1 if maxdepth is not None else None,
821
- )
822
- )
823
- continue
824
- elif recursive:
825
- rec = set(await self._find(p, maxdepth=maxdepth, withdirs=True))
826
- out |= rec
827
- if p not in out and (recursive is False or (await self._exists(p))):
828
- # should only check once, for the root
829
- out.add(p)
830
- if not out:
831
- raise FileNotFoundError(path)
832
- return list(sorted(out))
833
-
834
- async def _mkdir(self, path, create_parents=True, **kwargs):
835
- pass # not necessary to implement, may not have directories
836
-
837
- async def _makedirs(self, path, exist_ok=False):
838
- pass # not necessary to implement, may not have directories
839
-
840
- async def open_async(self, path, mode="rb", **kwargs):
841
- if "b" not in mode or kwargs.get("compression"):
842
- raise ValueError
843
- raise NotImplementedError
844
-
845
-
846
- def mirror_sync_methods(obj):
847
- """Populate sync and async methods for obj
848
-
849
- For each method will create a sync version if the name refers to an async method
850
- (coroutine) and there is no override in the child class; will create an async
851
- method for the corresponding sync method if there is no implementation.
852
-
853
- Uses the methods specified in
854
- - async_methods: the set that an implementation is expected to provide
855
- - default_async_methods: that can be derived from their sync version in
856
- AbstractFileSystem
857
- - AsyncFileSystem: async-specific default coroutines
858
- """
859
- from fsspec import AbstractFileSystem
860
-
861
- for method in async_methods + dir(AsyncFileSystem):
862
- if not method.startswith("_"):
863
- continue
864
- smethod = method[1:]
865
- if private.match(method):
866
- isco = inspect.iscoroutinefunction(getattr(obj, method, None))
867
- unsync = getattr(getattr(obj, smethod, False), "__func__", None)
868
- is_default = unsync is getattr(AbstractFileSystem, smethod, "")
869
- if isco and is_default:
870
- mth = sync_wrapper(getattr(obj, method), obj=obj)
871
- setattr(obj, smethod, mth)
872
- if not mth.__doc__:
873
- mth.__doc__ = getattr(
874
- getattr(AbstractFileSystem, smethod, None), "__doc__", ""
875
- )
876
-
877
-
878
- class FSSpecCoroutineCancel(Exception):
879
- pass
880
-
881
-
882
- def _dump_running_tasks(
883
- printout=True, cancel=True, exc=FSSpecCoroutineCancel, with_task=False
884
- ):
885
- import traceback
886
-
887
- tasks = [t for t in asyncio.tasks.all_tasks(loop[0]) if not t.done()]
888
- if printout:
889
- [task.print_stack() for task in tasks]
890
- out = [
891
- {
892
- "locals": task._coro.cr_frame.f_locals,
893
- "file": task._coro.cr_frame.f_code.co_filename,
894
- "firstline": task._coro.cr_frame.f_code.co_firstlineno,
895
- "linelo": task._coro.cr_frame.f_lineno,
896
- "stack": traceback.format_stack(task._coro.cr_frame),
897
- "task": task if with_task else None,
898
- }
899
- for task in tasks
900
- ]
901
- if cancel:
902
- for t in tasks:
903
- cbs = t._callbacks
904
- t.cancel()
905
- asyncio.futures.Future.set_exception(t, exc)
906
- asyncio.futures.Future.cancel(t)
907
- [cb[0](t) for cb in cbs] # cancels any dependent concurrent.futures
908
- try:
909
- t._coro.throw(exc) # exits coro, unless explicitly handled
910
- except exc:
911
- pass
912
- return out
913
-
914
-
915
- class AbstractAsyncStreamedFile(AbstractBufferedFile):
916
- # no read buffering, and always auto-commit
917
- # TODO: readahead might still be useful here, but needs async version
918
-
919
- async def read(self, length=-1):
920
- """
921
- Return data from cache, or fetch pieces as necessary
922
-
923
- Parameters
924
- ----------
925
- length: int (-1)
926
- Number of bytes to read; if <0, all remaining bytes.
927
- """
928
- length = -1 if length is None else int(length)
929
- if self.mode != "rb":
930
- raise ValueError("File not in read mode")
931
- if length < 0:
932
- length = self.size - self.loc
933
- if self.closed:
934
- raise ValueError("I/O operation on closed file.")
935
- if length == 0:
936
- # don't even bother calling fetch
937
- return b""
938
- out = await self._fetch_range(self.loc, self.loc + length)
939
- self.loc += len(out)
940
- return out
941
-
942
- async def write(self, data):
943
- """
944
- Write data to buffer.
945
-
946
- Buffer only sent on flush() or if buffer is greater than
947
- or equal to blocksize.
948
-
949
- Parameters
950
- ----------
951
- data: bytes
952
- Set of bytes to be written.
953
- """
954
- if self.mode not in {"wb", "ab"}:
955
- raise ValueError("File not in write mode")
956
- if self.closed:
957
- raise ValueError("I/O operation on closed file.")
958
- if self.forced:
959
- raise ValueError("This file has been force-flushed, can only close")
960
- out = self.buffer.write(data)
961
- self.loc += out
962
- if self.buffer.tell() >= self.blocksize:
963
- await self.flush()
964
- return out
965
-
966
- async def close(self):
967
- """Close file
968
-
969
- Finalizes writes, discards cache
970
- """
971
- if getattr(self, "_unclosable", False):
972
- return
973
- if self.closed:
974
- return
975
- if self.mode == "rb":
976
- self.cache = None
977
- else:
978
- if not self.forced:
979
- await self.flush(force=True)
980
-
981
- if self.fs is not None:
982
- self.fs.invalidate_cache(self.path)
983
- self.fs.invalidate_cache(self.fs._parent(self.path))
984
-
985
- self.closed = True
986
-
987
- async def flush(self, force=False):
988
- if self.closed:
989
- raise ValueError("Flush on closed file")
990
- if force and self.forced:
991
- raise ValueError("Force flush cannot be called more than once")
992
- if force:
993
- self.forced = True
994
-
995
- if self.mode not in {"wb", "ab"}:
996
- # no-op to flush on read-mode
997
- return
998
-
999
- if not force and self.buffer.tell() < self.blocksize:
1000
- # Defer write on small block
1001
- return
1002
-
1003
- if self.offset is None:
1004
- # Initialize a multipart upload
1005
- self.offset = 0
1006
- try:
1007
- await self._initiate_upload()
1008
- except: # noqa: E722
1009
- self.closed = True
1010
- raise
1011
-
1012
- if await self._upload_chunk(final=force) is not False:
1013
- self.offset += self.buffer.seek(0, 2)
1014
- self.buffer = io.BytesIO()
1015
-
1016
- async def __aenter__(self):
1017
- return self
1018
-
1019
- async def __aexit__(self, exc_type, exc_val, exc_tb):
1020
- await self.close()
1021
-
1022
- async def _fetch_range(self, start, end):
1023
- raise NotImplementedError
1024
-
1025
- async def _initiate_upload(self):
1026
- pass
1027
-
1028
- async def _upload_chunk(self, final=False):
1029
- raise NotImplementedError
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/DQChoi/gpt-demo/venv/lib/python3.11/site-packages/gradio/components/scatter_plot.py DELETED
@@ -1,472 +0,0 @@
1
- """gr.ScatterPlot() component."""
2
-
3
- from __future__ import annotations
4
-
5
- from typing import Callable, Literal
6
-
7
- import altair as alt
8
- import pandas as pd
9
- from gradio_client.documentation import document, set_documentation_group
10
- from pandas.api.types import is_numeric_dtype
11
-
12
- from gradio.components.base import _Keywords
13
- from gradio.components.plot import AltairPlot, Plot
14
-
15
- set_documentation_group("component")
16
-
17
-
18
- @document()
19
- class ScatterPlot(Plot):
20
- """
21
- Create a scatter plot.
22
-
23
- Preprocessing: this component does *not* accept input.
24
- Postprocessing: expects a pandas dataframe with the data to plot.
25
-
26
- Demos: scatter_plot
27
- Guides: creating-a-dashboard-from-bigquery-data
28
- """
29
-
30
- def __init__(
31
- self,
32
- value: pd.DataFrame | Callable | None = None,
33
- x: str | None = None,
34
- y: str | None = None,
35
- *,
36
- color: str | None = None,
37
- size: str | None = None,
38
- shape: str | None = None,
39
- title: str | None = None,
40
- tooltip: list[str] | str | None = None,
41
- x_title: str | None = None,
42
- y_title: str | None = None,
43
- color_legend_title: str | None = None,
44
- size_legend_title: str | None = None,
45
- shape_legend_title: str | None = None,
46
- color_legend_position: Literal[
47
- "left",
48
- "right",
49
- "top",
50
- "bottom",
51
- "top-left",
52
- "top-right",
53
- "bottom-left",
54
- "bottom-right",
55
- "none",
56
- ]
57
- | None = None,
58
- size_legend_position: Literal[
59
- "left",
60
- "right",
61
- "top",
62
- "bottom",
63
- "top-left",
64
- "top-right",
65
- "bottom-left",
66
- "bottom-right",
67
- "none",
68
- ]
69
- | None = None,
70
- shape_legend_position: Literal[
71
- "left",
72
- "right",
73
- "top",
74
- "bottom",
75
- "top-left",
76
- "top-right",
77
- "bottom-left",
78
- "bottom-right",
79
- "none",
80
- ]
81
- | None = None,
82
- height: int | None = None,
83
- width: int | None = None,
84
- x_lim: list[int | float] | None = None,
85
- y_lim: list[int | float] | None = None,
86
- caption: str | None = None,
87
- interactive: bool | None = True,
88
- label: str | None = None,
89
- every: float | None = None,
90
- show_label: bool | None = None,
91
- container: bool = True,
92
- scale: int | None = None,
93
- min_width: int = 160,
94
- visible: bool = True,
95
- elem_id: str | None = None,
96
- elem_classes: list[str] | str | None = None,
97
- ):
98
- """
99
- Parameters:
100
- value: The pandas dataframe containing the data to display in a scatter plot, or a callable. If callable, the function will be called whenever the app loads to set the initial value of the component.
101
- x: Column corresponding to the x axis.
102
- y: Column corresponding to the y axis.
103
- color: The column to determine the point color. If the column contains numeric data, gradio will interpolate the column data so that small values correspond to light colors and large values correspond to dark values.
104
- size: The column used to determine the point size. Should contain numeric data so that gradio can map the data to the point size.
105
- shape: The column used to determine the point shape. Should contain categorical data. Gradio will map each unique value to a different shape.
106
- title: The title to display on top of the chart.
107
- tooltip: The column (or list of columns) to display on the tooltip when a user hovers a point on the plot.
108
- x_title: The title given to the x axis. By default, uses the value of the x parameter.
109
- y_title: The title given to the y axis. By default, uses the value of the y parameter.
110
- color_legend_title: The title given to the color legend. By default, uses the value of color parameter.
111
- size_legend_title: The title given to the size legend. By default, uses the value of the size parameter.
112
- shape_legend_title: The title given to the shape legend. By default, uses the value of the shape parameter.
113
- color_legend_position: The position of the color legend. If the string value 'none' is passed, this legend is omitted. For other valid position values see: https://vega.github.io/vega/docs/legends/#orientation.
114
- size_legend_position: The position of the size legend. If the string value 'none' is passed, this legend is omitted. For other valid position values see: https://vega.github.io/vega/docs/legends/#orientation.
115
- shape_legend_position: The position of the shape legend. If the string value 'none' is passed, this legend is omitted. For other valid position values see: https://vega.github.io/vega/docs/legends/#orientation.
116
- height: The height of the plot in pixels.
117
- width: The width of the plot in pixels.
118
- x_lim: A tuple or list containing the limits for the x-axis, specified as [x_min, x_max].
119
- y_lim: A tuple of list containing the limits for the y-axis, specified as [y_min, y_max].
120
- caption: The (optional) caption to display below the plot.
121
- interactive: Whether users should be able to interact with the plot by panning or zooming with their mouse or trackpad.
122
- label: The (optional) label to display on the top left corner of the plot.
123
- every: If `value` is a callable, run the function 'every' number of seconds while the client connection is open. Has no effect otherwise. Queue must be enabled. The event can be accessed (e.g. to cancel it) via this component's .load_event attribute.
124
- show_label: Whether the label should be displayed.
125
- visible: Whether the plot should be visible.
126
- elem_id: An optional string that is assigned as the id of this component in the HTML DOM. Can be used for targeting CSS styles.
127
- elem_classes: An optional list of strings that are assigned as the classes of this component in the HTML DOM. Can be used for targeting CSS styles.
128
- """
129
- self.x = x
130
- self.y = y
131
- self.color = color
132
- self.size = size
133
- self.shape = shape
134
- self.tooltip = tooltip
135
- self.title = title
136
- self.x_title = x_title
137
- self.y_title = y_title
138
- self.color_legend_title = color_legend_title
139
- self.color_legend_position = color_legend_position
140
- self.size_legend_title = size_legend_title
141
- self.size_legend_position = size_legend_position
142
- self.shape_legend_title = shape_legend_title
143
- self.shape_legend_position = shape_legend_position
144
- self.caption = caption
145
- self.interactive_chart = interactive
146
- self.width = width
147
- self.height = height
148
- self.x_lim = x_lim
149
- self.y_lim = y_lim
150
- super().__init__(
151
- value=value,
152
- label=label,
153
- every=every,
154
- show_label=show_label,
155
- container=container,
156
- scale=scale,
157
- min_width=min_width,
158
- visible=visible,
159
- elem_id=elem_id,
160
- elem_classes=elem_classes,
161
- )
162
-
163
- def get_config(self):
164
- config = super().get_config()
165
- config["caption"] = self.caption
166
- return config
167
-
168
- def get_block_name(self) -> str:
169
- return "plot"
170
-
171
- @staticmethod
172
- def update(
173
- value: pd.DataFrame | dict | Literal[_Keywords.NO_VALUE] = _Keywords.NO_VALUE,
174
- x: str | None = None,
175
- y: str | None = None,
176
- color: str | None = None,
177
- size: str | None = None,
178
- shape: str | None = None,
179
- title: str | None = None,
180
- tooltip: list[str] | str | None = None,
181
- x_title: str | None = None,
182
- y_title: str | None = None,
183
- color_legend_title: str | None = None,
184
- size_legend_title: str | None = None,
185
- shape_legend_title: str | None = None,
186
- color_legend_position: Literal[
187
- "left",
188
- "right",
189
- "top",
190
- "bottom",
191
- "top-left",
192
- "top-right",
193
- "bottom-left",
194
- "bottom-right",
195
- "none",
196
- ]
197
- | None = None,
198
- size_legend_position: Literal[
199
- "left",
200
- "right",
201
- "top",
202
- "bottom",
203
- "top-left",
204
- "top-right",
205
- "bottom-left",
206
- "bottom-right",
207
- "none",
208
- ]
209
- | None = None,
210
- shape_legend_position: Literal[
211
- "left",
212
- "right",
213
- "top",
214
- "bottom",
215
- "top-left",
216
- "top-right",
217
- "bottom-left",
218
- "bottom-right",
219
- "none",
220
- ]
221
- | None = None,
222
- height: int | None = None,
223
- width: int | None = None,
224
- x_lim: list[int | float] | None = None,
225
- y_lim: list[int | float] | None = None,
226
- interactive: bool | None = None,
227
- caption: str | None = None,
228
- label: str | None = None,
229
- show_label: bool | None = None,
230
- container: bool | None = None,
231
- scale: int | None = None,
232
- min_width: int | None = None,
233
- visible: bool | None = None,
234
- ):
235
- """Update an existing plot component.
236
-
237
- If updating any of the plot properties (color, size, etc) the value, x, and y parameters must be specified.
238
-
239
- Parameters:
240
- value: The pandas dataframe containing the data to display in a scatter plot.
241
- x: Column corresponding to the x axis.
242
- y: Column corresponding to the y axis.
243
- color: The column to determine the point color. If the column contains numeric data, gradio will interpolate the column data so that small values correspond to light colors and large values correspond to dark values.
244
- size: The column used to determine the point size. Should contain numeric data so that gradio can map the data to the point size.
245
- shape: The column used to determine the point shape. Should contain categorical data. Gradio will map each unique value to a different shape.
246
- title: The title to display on top of the chart.
247
- tooltip: The column (or list of columns) to display on the tooltip when a user hovers a point on the plot.
248
- x_title: The title given to the x axis. By default, uses the value of the x parameter.
249
- y_title: The title given to the y axis. By default, uses the value of the y parameter.
250
- color_legend_title: The title given to the color legend. By default, uses the value of color parameter.
251
- size_legend_title: The title given to the size legend. By default, uses the value of the size parameter.
252
- shape_legend_title: The title given to the shape legend. By default, uses the value of the shape parameter.
253
- color_legend_position: The position of the color legend. If the string value 'none' is passed, this legend is omitted. For other valid position values see: https://vega.github.io/vega/docs/legends/#orientation.
254
- size_legend_position: The position of the size legend. If the string value 'none' is passed, this legend is omitted. For other valid position values see: https://vega.github.io/vega/docs/legends/#orientation.
255
- shape_legend_position: The position of the shape legend. If the string value 'none' is passed, this legend is omitted. For other valid position values see: https://vega.github.io/vega/docs/legends/#orientation.
256
- height: The height of the plot in pixels.
257
- width: The width of the plot in pixels.
258
- x_lim: A tuple or list containing the limits for the x-axis, specified as [x_min, x_max].
259
- y_lim: A tuple of list containing the limits for the y-axis, specified as [y_min, y_max].
260
- interactive: Whether users should be able to interact with the plot by panning or zooming with their mouse or trackpad.
261
- caption: The (optional) caption to display below the plot.
262
- label: The (optional) label to display in the top left corner of the plot.
263
- show_label: Whether the label should be displayed.
264
- visible: Whether the plot should be visible.
265
- """
266
- properties = [
267
- x,
268
- y,
269
- color,
270
- size,
271
- shape,
272
- title,
273
- tooltip,
274
- x_title,
275
- y_title,
276
- color_legend_title,
277
- size_legend_title,
278
- shape_legend_title,
279
- color_legend_position,
280
- size_legend_position,
281
- shape_legend_position,
282
- height,
283
- width,
284
- x_lim,
285
- y_lim,
286
- interactive,
287
- ]
288
- if any(properties):
289
- if not isinstance(value, pd.DataFrame):
290
- raise ValueError(
291
- "In order to update plot properties the value parameter "
292
- "must be provided, and it must be a Dataframe. Please pass a value "
293
- "parameter to gr.ScatterPlot.update."
294
- )
295
- if x is None or y is None:
296
- raise ValueError(
297
- "In order to update plot properties, the x and y axis data "
298
- "must be specified. Please pass valid values for x an y to "
299
- "gr.ScatterPlot.update."
300
- )
301
- chart = ScatterPlot.create_plot(value, *properties)
302
- value = {"type": "altair", "plot": chart.to_json(), "chart": "scatter"}
303
-
304
- updated_config = {
305
- "label": label,
306
- "show_label": show_label,
307
- "container": container,
308
- "scale": scale,
309
- "min_width": min_width,
310
- "visible": visible,
311
- "value": value,
312
- "caption": caption,
313
- "__type__": "update",
314
- }
315
- return updated_config
316
-
317
- @staticmethod
318
- def create_plot(
319
- value: pd.DataFrame,
320
- x: str,
321
- y: str,
322
- color: str | None = None,
323
- size: str | None = None,
324
- shape: str | None = None,
325
- title: str | None = None,
326
- tooltip: list[str] | str | None = None,
327
- x_title: str | None = None,
328
- y_title: str | None = None,
329
- color_legend_title: str | None = None,
330
- size_legend_title: str | None = None,
331
- shape_legend_title: str | None = None,
332
- color_legend_position: Literal[
333
- "left",
334
- "right",
335
- "top",
336
- "bottom",
337
- "top-left",
338
- "top-right",
339
- "bottom-left",
340
- "bottom-right",
341
- "none",
342
- ]
343
- | None = None,
344
- size_legend_position: Literal[
345
- "left",
346
- "right",
347
- "top",
348
- "bottom",
349
- "top-left",
350
- "top-right",
351
- "bottom-left",
352
- "bottom-right",
353
- "none",
354
- ]
355
- | None = None,
356
- shape_legend_position: Literal[
357
- "left",
358
- "right",
359
- "top",
360
- "bottom",
361
- "top-left",
362
- "top-right",
363
- "bottom-left",
364
- "bottom-right",
365
- "none",
366
- ]
367
- | None = None,
368
- height: int | None = None,
369
- width: int | None = None,
370
- x_lim: list[int | float] | None = None,
371
- y_lim: list[int | float] | None = None,
372
- interactive: bool | None = True,
373
- ):
374
- """Helper for creating the scatter plot."""
375
- interactive = True if interactive is None else interactive
376
- encodings = {
377
- "x": alt.X(
378
- x, # type: ignore
379
- title=x_title or x, # type: ignore
380
- scale=AltairPlot.create_scale(x_lim), # type: ignore
381
- ), # ignore: type
382
- "y": alt.Y(
383
- y, # type: ignore
384
- title=y_title or y, # type: ignore
385
- scale=AltairPlot.create_scale(y_lim), # type: ignore
386
- ),
387
- }
388
- properties = {}
389
- if title:
390
- properties["title"] = title
391
- if height:
392
- properties["height"] = height
393
- if width:
394
- properties["width"] = width
395
- if color:
396
- if is_numeric_dtype(value[color]):
397
- domain = [value[color].min(), value[color].max()]
398
- range_ = [0, 1]
399
- type_ = "quantitative"
400
- else:
401
- domain = value[color].unique().tolist()
402
- range_ = list(range(len(domain)))
403
- type_ = "nominal"
404
-
405
- encodings["color"] = {
406
- "field": color,
407
- "type": type_,
408
- "legend": AltairPlot.create_legend(
409
- position=color_legend_position, title=color_legend_title or color
410
- ),
411
- "scale": {"domain": domain, "range": range_},
412
- }
413
- if tooltip:
414
- encodings["tooltip"] = tooltip
415
- if size:
416
- encodings["size"] = {
417
- "field": size,
418
- "type": "quantitative" if is_numeric_dtype(value[size]) else "nominal",
419
- "legend": AltairPlot.create_legend(
420
- position=size_legend_position, title=size_legend_title or size
421
- ),
422
- }
423
- if shape:
424
- encodings["shape"] = {
425
- "field": shape,
426
- "type": "quantitative" if is_numeric_dtype(value[shape]) else "nominal",
427
- "legend": AltairPlot.create_legend(
428
- position=shape_legend_position, title=shape_legend_title or shape
429
- ),
430
- }
431
- chart = (
432
- alt.Chart(value) # type: ignore
433
- .mark_point(clip=True) # type: ignore
434
- .encode(**encodings)
435
- .properties(background="transparent", **properties)
436
- )
437
- if interactive:
438
- chart = chart.interactive()
439
-
440
- return chart
441
-
442
- def postprocess(self, y: pd.DataFrame | dict | None) -> dict[str, str] | None:
443
- # if None or update
444
- if y is None or isinstance(y, dict):
445
- return y
446
- if self.x is None or self.y is None:
447
- raise ValueError("No value provided for required parameters `x` and `y`.")
448
- chart = self.create_plot(
449
- value=y,
450
- x=self.x,
451
- y=self.y,
452
- color=self.color,
453
- size=self.size,
454
- shape=self.shape,
455
- title=self.title,
456
- tooltip=self.tooltip,
457
- x_title=self.x_title,
458
- y_title=self.y_title,
459
- color_legend_title=self.color_legend_title,
460
- size_legend_title=self.size_legend_title,
461
- shape_legend_title=self.size_legend_title,
462
- color_legend_position=self.color_legend_position,
463
- size_legend_position=self.size_legend_position,
464
- shape_legend_position=self.shape_legend_position,
465
- interactive=self.interactive_chart,
466
- height=self.height,
467
- width=self.width,
468
- x_lim=self.x_lim,
469
- y_lim=self.y_lim,
470
- )
471
-
472
- return {"type": "altair", "plot": chart.to_json(), "chart": "scatter"}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/DQChoi/gpt-demo/venv/lib/python3.11/site-packages/gradio/external_utils.py DELETED
@@ -1,140 +0,0 @@
1
- """Utility function for gradio/external.py"""
2
-
3
- import base64
4
- import math
5
- import operator
6
- import re
7
- import warnings
8
- from typing import Dict, List, Tuple
9
-
10
- import requests
11
- import yaml
12
-
13
- from gradio import components
14
-
15
- ##################
16
- # Helper functions for processing tabular data
17
- ##################
18
-
19
-
20
- def get_tabular_examples(model_name: str) -> Dict[str, List[float]]:
21
- readme = requests.get(f"https://huggingface.co/{model_name}/resolve/main/README.md")
22
- if readme.status_code != 200:
23
- warnings.warn(f"Cannot load examples from README for {model_name}", UserWarning)
24
- example_data = {}
25
- else:
26
- yaml_regex = re.search(
27
- "(?:^|[\r\n])---[\n\r]+([\\S\\s]*?)[\n\r]+---([\n\r]|$)", readme.text
28
- )
29
- if yaml_regex is None:
30
- example_data = {}
31
- else:
32
- example_yaml = next(
33
- yaml.safe_load_all(readme.text[: yaml_regex.span()[-1]])
34
- )
35
- example_data = example_yaml.get("widget", {}).get("structuredData", {})
36
- if not example_data:
37
- raise ValueError(
38
- f"No example data found in README.md of {model_name} - Cannot build gradio demo. "
39
- "See the README.md here: https://huggingface.co/scikit-learn/tabular-playground/blob/main/README.md "
40
- "for a reference on how to provide example data to your model."
41
- )
42
- # replace nan with string NaN for inference API
43
- for data in example_data.values():
44
- for i, val in enumerate(data):
45
- if isinstance(val, float) and math.isnan(val):
46
- data[i] = "NaN"
47
- return example_data
48
-
49
-
50
- def cols_to_rows(
51
- example_data: Dict[str, List[float]]
52
- ) -> Tuple[List[str], List[List[float]]]:
53
- headers = list(example_data.keys())
54
- n_rows = max(len(example_data[header] or []) for header in headers)
55
- data = []
56
- for row_index in range(n_rows):
57
- row_data = []
58
- for header in headers:
59
- col = example_data[header] or []
60
- if row_index >= len(col):
61
- row_data.append("NaN")
62
- else:
63
- row_data.append(col[row_index])
64
- data.append(row_data)
65
- return headers, data
66
-
67
-
68
- def rows_to_cols(incoming_data: Dict) -> Dict[str, Dict[str, Dict[str, List[str]]]]:
69
- data_column_wise = {}
70
- for i, header in enumerate(incoming_data["headers"]):
71
- data_column_wise[header] = [str(row[i]) for row in incoming_data["data"]]
72
- return {"inputs": {"data": data_column_wise}}
73
-
74
-
75
- ##################
76
- # Helper functions for processing other kinds of data
77
- ##################
78
-
79
-
80
- def postprocess_label(scores: Dict) -> Dict:
81
- sorted_pred = sorted(scores.items(), key=operator.itemgetter(1), reverse=True)
82
- return {
83
- "label": sorted_pred[0][0],
84
- "confidences": [
85
- {"label": pred[0], "confidence": pred[1]} for pred in sorted_pred
86
- ],
87
- }
88
-
89
-
90
- def encode_to_base64(r: requests.Response) -> str:
91
- # Handles the different ways HF API returns the prediction
92
- base64_repr = base64.b64encode(r.content).decode("utf-8")
93
- data_prefix = ";base64,"
94
- # Case 1: base64 representation already includes data prefix
95
- if data_prefix in base64_repr:
96
- return base64_repr
97
- else:
98
- content_type = r.headers.get("content-type")
99
- # Case 2: the data prefix is a key in the response
100
- if content_type == "application/json":
101
- try:
102
- data = r.json()[0]
103
- content_type = data["content-type"]
104
- base64_repr = data["blob"]
105
- except KeyError as ke:
106
- raise ValueError(
107
- "Cannot determine content type returned by external API."
108
- ) from ke
109
- # Case 3: the data prefix is included in the response headers
110
- else:
111
- pass
112
- new_base64 = f"data:{content_type};base64,{base64_repr}"
113
- return new_base64
114
-
115
-
116
- ##################
117
- # Helper function for cleaning up an Interface loaded from HF Spaces
118
- ##################
119
-
120
-
121
- def streamline_spaces_interface(config: Dict) -> Dict:
122
- """Streamlines the interface config dictionary to remove unnecessary keys."""
123
- config["inputs"] = [
124
- components.get_component_instance(component)
125
- for component in config["input_components"]
126
- ]
127
- config["outputs"] = [
128
- components.get_component_instance(component)
129
- for component in config["output_components"]
130
- ]
131
- parameters = {
132
- "article",
133
- "description",
134
- "flagging_options",
135
- "inputs",
136
- "outputs",
137
- "title",
138
- }
139
- config = {k: config[k] for k in parameters}
140
- return config
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/DQChoi/gpt-demo/venv/lib/python3.11/site-packages/gradio/templates/cdn/assets/ColorPicker-5063dbc4.css DELETED
@@ -1 +0,0 @@
1
- label.svelte-1ojmf70.svelte-1ojmf70.svelte-1ojmf70{display:flex;align-items:center;cursor:pointer;color:var(--body-text-color);font-weight:var(--checkbox-label-text-weight);font-size:var(--checkbox-label-text-size);line-height:var(--line-md)}label.svelte-1ojmf70>.svelte-1ojmf70+.svelte-1ojmf70{margin-left:var(--size-2)}input.svelte-1ojmf70.svelte-1ojmf70.svelte-1ojmf70{--ring-color:transparent;position:relative;box-shadow:var(--input-shadow);border:1px solid var(--checkbox-border-color);border-radius:var(--checkbox-border-radius);background-color:var(--checkbox-background-color);line-height:var(--line-sm)}input.svelte-1ojmf70.svelte-1ojmf70.svelte-1ojmf70:checked,input.svelte-1ojmf70.svelte-1ojmf70.svelte-1ojmf70:checked:hover,input.svelte-1ojmf70.svelte-1ojmf70.svelte-1ojmf70:checked:focus{border-color:var(--checkbox-border-color-selected);background-image:var(--checkbox-check);background-color:var(--checkbox-background-color-selected)}input.svelte-1ojmf70.svelte-1ojmf70.svelte-1ojmf70:hover{border-color:var(--checkbox-border-color-hover);background-color:var(--checkbox-background-color-hover)}input.svelte-1ojmf70.svelte-1ojmf70.svelte-1ojmf70:focus{border-color:var(--checkbox-border-color-focus);background-color:var(--checkbox-background-color-focus)}input[disabled].svelte-1ojmf70.svelte-1ojmf70.svelte-1ojmf70,.disabled.svelte-1ojmf70.svelte-1ojmf70.svelte-1ojmf70{cursor:not-allowed}.wrap.svelte-1qxcj04.svelte-1qxcj04.svelte-1qxcj04{display:flex;flex-wrap:wrap;gap:var(--checkbox-label-gap)}label.svelte-1qxcj04.svelte-1qxcj04.svelte-1qxcj04{display:flex;align-items:center;transition:var(--button-transition);cursor:pointer;box-shadow:var(--checkbox-label-shadow);border:var(--checkbox-label-border-width) solid var(--checkbox-label-border-color);border-radius:var(--button-small-radius);background:var(--checkbox-label-background-fill);padding:var(--checkbox-label-padding);color:var(--checkbox-label-text-color);font-weight:var(--checkbox-label-text-weight);font-size:var(--checkbox-label-text-size);line-height:var(--line-md)}label.svelte-1qxcj04.svelte-1qxcj04.svelte-1qxcj04:hover{background:var(--checkbox-label-background-fill-hover)}label.svelte-1qxcj04.svelte-1qxcj04.svelte-1qxcj04:focus{background:var(--checkbox-label-background-fill-focus)}label.selected.svelte-1qxcj04.svelte-1qxcj04.svelte-1qxcj04{background:var(--checkbox-label-background-fill-selected);color:var(--checkbox-label-text-color-selected)}label.svelte-1qxcj04>.svelte-1qxcj04+.svelte-1qxcj04{margin-left:var(--size-2)}input.svelte-1qxcj04.svelte-1qxcj04.svelte-1qxcj04{--ring-color:transparent;position:relative;box-shadow:var(--checkbox-shadow);border:var(--checkbox-border-width) solid var(--checkbox-border-color);border-radius:var(--checkbox-border-radius);background-color:var(--checkbox-background-color);line-height:var(--line-sm)}input.svelte-1qxcj04.svelte-1qxcj04.svelte-1qxcj04:checked,input.svelte-1qxcj04.svelte-1qxcj04.svelte-1qxcj04:checked:hover,input.svelte-1qxcj04.svelte-1qxcj04.svelte-1qxcj04:checked:focus{border-color:var(--checkbox-border-color-selected);background-image:var(--checkbox-check);background-color:var(--checkbox-background-color-selected)}input.svelte-1qxcj04.svelte-1qxcj04.svelte-1qxcj04:hover{border-color:var(--checkbox-border-color-hover);background-color:var(--checkbox-background-color-hover)}input.svelte-1qxcj04.svelte-1qxcj04.svelte-1qxcj04:focus{border-color:var(--checkbox-border-color-focus);background-color:var(--checkbox-background-color-focus)}input[disabled].svelte-1qxcj04.svelte-1qxcj04.svelte-1qxcj04,.disabled.svelte-1qxcj04.svelte-1qxcj04.svelte-1qxcj04{cursor:not-allowed}.options.svelte-1aonegi{--window-padding:var(--size-8);position:fixed;z-index:var(--layer-top);margin-left:0;box-shadow:var(--shadow-drop-lg);border-radius:var(--container-radius);background:var(--background-fill-primary);min-width:fit-content;max-width:inherit;overflow:auto;color:var(--body-text-color);list-style:none}.item.svelte-1aonegi{display:flex;cursor:pointer;padding:var(--size-2)}.item.svelte-1aonegi:hover,.active.svelte-1aonegi{background:var(--background-fill-secondary)}.inner-item.svelte-1aonegi{padding-right:var(--size-1)}.hide.svelte-1aonegi{visibility:hidden}label.svelte-c0u3f0.svelte-c0u3f0.svelte-c0u3f0:not(.container),label.svelte-c0u3f0:not(.container) .wrap.svelte-c0u3f0.svelte-c0u3f0,label.svelte-c0u3f0:not(.container) .wrap-inner.svelte-c0u3f0.svelte-c0u3f0,label.svelte-c0u3f0:not(.container) .secondary-wrap.svelte-c0u3f0.svelte-c0u3f0,label.svelte-c0u3f0:not(.container) .token.svelte-c0u3f0.svelte-c0u3f0,label.svelte-c0u3f0:not(.container) input.svelte-c0u3f0.svelte-c0u3f0{height:100%}.container.svelte-c0u3f0 .wrap.svelte-c0u3f0.svelte-c0u3f0{box-shadow:var(--input-shadow);border:var(--input-border-width) solid var(--border-color-primary)}.wrap.svelte-c0u3f0.svelte-c0u3f0.svelte-c0u3f0{position:relative;border-radius:var(--input-radius);background:var(--input-background-fill)}.wrap.svelte-c0u3f0.svelte-c0u3f0.svelte-c0u3f0:focus-within{box-shadow:var(--input-shadow-focus);border-color:var(--input-border-color-focus)}.wrap-inner.svelte-c0u3f0.svelte-c0u3f0.svelte-c0u3f0{display:flex;position:relative;flex-wrap:wrap;align-items:center;gap:var(--checkbox-label-gap);padding:var(--checkbox-label-padding)}.token.svelte-c0u3f0.svelte-c0u3f0.svelte-c0u3f0{display:flex;align-items:center;transition:var(--button-transition);cursor:pointer;box-shadow:var(--checkbox-label-shadow);border:var(--checkbox-label-border-width) solid var(--checkbox-label-border-color);border-radius:var(--button-small-radius);background:var(--checkbox-label-background-fill);padding:var(--checkbox-label-padding);color:var(--checkbox-label-text-color);font-weight:var(--checkbox-label-text-weight);font-size:var(--checkbox-label-text-size);line-height:var(--line-md)}.token.svelte-c0u3f0>.svelte-c0u3f0+.svelte-c0u3f0{margin-left:var(--size-2)}.token-remove.svelte-c0u3f0.svelte-c0u3f0.svelte-c0u3f0{fill:var(--body-text-color);display:flex;justify-content:center;align-items:center;cursor:pointer;border:var(--checkbox-border-width) solid var(--border-color-primary);border-radius:var(--radius-full);background:var(--background-fill-primary);padding:var(--size-0-5);width:18px;height:18px}.secondary-wrap.svelte-c0u3f0.svelte-c0u3f0.svelte-c0u3f0{display:flex;flex:1 1 0%;align-items:center;border:none;min-width:min-content}input.svelte-c0u3f0.svelte-c0u3f0.svelte-c0u3f0{margin:var(--spacing-sm);outline:none;border:none;background:inherit;width:var(--size-full);color:var(--body-text-color);font-size:var(--input-text-size)}input.svelte-c0u3f0.svelte-c0u3f0.svelte-c0u3f0:disabled{-webkit-text-fill-color:var(--body-text-color);-webkit-opacity:1;opacity:1;cursor:not-allowed}.remove-all.svelte-c0u3f0.svelte-c0u3f0.svelte-c0u3f0{margin-left:var(--size-1);width:20px;height:20px}.hide.svelte-c0u3f0.svelte-c0u3f0.svelte-c0u3f0{display:none}.subdued.svelte-c0u3f0.svelte-c0u3f0.svelte-c0u3f0{color:var(--body-text-color-subdued)}label.svelte-gigvtq.svelte-gigvtq:not(.container),label.svelte-gigvtq:not(.container)>input.svelte-gigvtq{height:100%;border:none}.container.svelte-gigvtq>input.svelte-gigvtq{border:var(--input-border-width) solid var(--input-border-color);border-radius:var(--input-radius)}input[type=number].svelte-gigvtq.svelte-gigvtq{display:block;position:relative;outline:none!important;box-shadow:var(--input-shadow);background:var(--input-background-fill);padding:var(--input-padding);width:100%;color:var(--body-text-color);font-size:var(--input-text-size);line-height:var(--line-sm)}input.svelte-gigvtq.svelte-gigvtq:disabled{-webkit-text-fill-color:var(--body-text-color);-webkit-opacity:1;opacity:1}input.svelte-gigvtq.svelte-gigvtq:focus{box-shadow:var(--input-shadow-focus);border-color:var(--input-border-color-focus)}input.svelte-gigvtq.svelte-gigvtq::placeholder{color:var(--input-placeholder-color)}input.svelte-gigvtq.svelte-gigvtq:out-of-range{border:var(--input-border-width) solid var(--error-border-color)}.wrap.svelte-1p9xokt.svelte-1p9xokt.svelte-1p9xokt{display:flex;flex-wrap:wrap;gap:var(--checkbox-label-gap)}label.svelte-1p9xokt.svelte-1p9xokt.svelte-1p9xokt{display:flex;align-items:center;transition:var(--button-transition);cursor:pointer;box-shadow:var(--checkbox-label-shadow);border:var(--checkbox-label-border-width) solid var(--checkbox-label-border-color);border-radius:var(--button-small-radius);background:var(--checkbox-label-background-fill);padding:var(--checkbox-label-padding);color:var(--checkbox-label-text-color);font-weight:var(--checkbox-label-text-weight);font-size:var(--checkbox-label-text-size);line-height:var(--line-md)}label.svelte-1p9xokt.svelte-1p9xokt.svelte-1p9xokt:hover{background:var(--checkbox-label-background-fill-hover)}label.svelte-1p9xokt.svelte-1p9xokt.svelte-1p9xokt:focus{background:var(--checkbox-label-background-fill-focus)}label.selected.svelte-1p9xokt.svelte-1p9xokt.svelte-1p9xokt{background:var(--checkbox-label-background-fill-selected);color:var(--checkbox-label-text-color-selected)}label.svelte-1p9xokt>.svelte-1p9xokt+.svelte-1p9xokt{margin-left:var(--size-2)}input.svelte-1p9xokt.svelte-1p9xokt.svelte-1p9xokt{--ring-color:transparent;position:relative;box-shadow:var(--checkbox-shadow);border:var(--checkbox-border-width) solid var(--checkbox-border-color);border-radius:var(--radius-full);background-color:var(--checkbox-background-color);line-height:var(--line-sm)}input.svelte-1p9xokt.svelte-1p9xokt.svelte-1p9xokt:checked,input.svelte-1p9xokt.svelte-1p9xokt.svelte-1p9xokt:checked:hover,input.svelte-1p9xokt.svelte-1p9xokt.svelte-1p9xokt:checked:focus{border-color:var(--checkbox-border-color-selected);background-image:var(--radio-circle);background-color:var(--checkbox-background-color-selected)}input.svelte-1p9xokt.svelte-1p9xokt.svelte-1p9xokt:hover{border-color:var(--checkbox-border-color-hover);background-color:var(--checkbox-background-color-hover)}input.svelte-1p9xokt.svelte-1p9xokt.svelte-1p9xokt:focus{border-color:var(--checkbox-border-color-focus);background-color:var(--checkbox-background-color-focus)}input[disabled].svelte-1p9xokt.svelte-1p9xokt.svelte-1p9xokt,.disabled.svelte-1p9xokt.svelte-1p9xokt.svelte-1p9xokt{cursor:not-allowed}label.svelte-1kcgrqr.svelte-1kcgrqr{display:block;width:100%}input.svelte-1kcgrqr.svelte-1kcgrqr,textarea.svelte-1kcgrqr.svelte-1kcgrqr{display:block;position:relative;outline:none!important;box-shadow:var(--input-shadow);background:var(--input-background-fill);padding:var(--input-padding);width:100%;color:var(--body-text-color);font-weight:var(--input-text-weight);font-size:var(--input-text-size);line-height:var(--line-sm);border:none}label.svelte-1kcgrqr.svelte-1kcgrqr:not(.container),label.svelte-1kcgrqr:not(.container)>input.svelte-1kcgrqr,label.svelte-1kcgrqr:not(.container)>textarea.svelte-1kcgrqr{height:100%}.container.svelte-1kcgrqr>input.svelte-1kcgrqr,.container.svelte-1kcgrqr>textarea.svelte-1kcgrqr{border:var(--input-border-width) solid var(--input-border-color);border-radius:var(--input-radius)}input.svelte-1kcgrqr.svelte-1kcgrqr:disabled,textarea.svelte-1kcgrqr.svelte-1kcgrqr:disabled{-webkit-text-fill-color:var(--body-text-color);-webkit-opacity:1;opacity:1}input.svelte-1kcgrqr.svelte-1kcgrqr:focus,textarea.svelte-1kcgrqr.svelte-1kcgrqr:focus{box-shadow:var(--input-shadow-focus);border-color:var(--input-border-color-focus)}input.svelte-1kcgrqr.svelte-1kcgrqr::placeholder,textarea.svelte-1kcgrqr.svelte-1kcgrqr::placeholder{color:var(--input-placeholder-color)}button.svelte-1kcgrqr.svelte-1kcgrqr{display:flex;position:absolute;top:var(--block-label-margin);right:var(--block-label-margin);align-items:center;box-shadow:var(--shadow-drop);border:1px solid var(--color-border-primary);border-top:none;border-right:none;border-radius:var(--block-label-right-radius);background:var(--block-label-background-fill);padding:5px;width:22px;height:22px;overflow:hidden;color:var(--block-label-color);font:var(--font-sans);font-size:var(--button-small-text-size)}.wrap.svelte-1cl284s{display:flex;flex-direction:column;width:100%}.head.svelte-1cl284s{display:flex;justify-content:space-between}input[type=number].svelte-1cl284s{display:block;position:relative;outline:none!important;box-shadow:var(--input-shadow);border:var(--input-border-width) solid var(--input-border-color);border-radius:var(--input-radius);background:var(--input-background-fill);padding:var(--size-2) var(--size-2);height:var(--size-6);color:var(--body-text-color);font-size:var(--input-text-size);line-height:var(--line-sm);text-align:center}input.svelte-1cl284s:disabled{-webkit-text-fill-color:var(--body-text-color);-webkit-opacity:1;opacity:1}input[type=number].svelte-1cl284s:focus{box-shadow:var(--input-shadow-focus);border-color:var(--input-border-color-focus)}input.svelte-1cl284s::placeholder{color:var(--input-placeholder-color)}input[type=range].svelte-1cl284s{width:100%;accent-color:var(--slider-color)}input[disabled].svelte-1cl284s{cursor:not-allowed}input.svelte-56zyyb{display:block;position:relative;background:var(--background-fill-primary);line-height:var(--line-sm)}