parquet-converter commited on
Commit
8ba65d9
·
1 Parent(s): d3d5d2b

Update parquet files (step 28 of 296)

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. spaces/1acneusushi/gradio-2dmoleculeeditor/data/Assassins Creed Iii 103 Skidrow Patch Everything You Need to Know About the Latest Version of the Game.md +0 -136
  2. spaces/1acneusushi/gradio-2dmoleculeeditor/data/Cracked Dc Unlocker Unlimited Credits New Versionl The Latest and Most Powerful Version of DC-Unlocker.md +0 -163
  3. spaces/1acneusushi/gradio-2dmoleculeeditor/data/Fisiologiaanimalhill.md +0 -35
  4. spaces/1gistliPinn/ChatGPT4/Examples/Boeing 737-300 500 CBT - Lufthansa Full Versionl !!HOT!!.md +0 -6
  5. spaces/1pelhydcardo/ChatGPT-prompt-generator/assets/2020 Design 12 The Most Trusted Software for Kitchen and Bathroom Designers.md +0 -186
  6. spaces/1pelhydcardo/ChatGPT-prompt-generator/assets/APKPure 3 The Android App Store that Saves You Time Space and Data.md +0 -124
  7. spaces/1pelhydcardo/ChatGPT-prompt-generator/assets/Best Ways to Download YouTube Videos Reddit Users Recommend.md +0 -183
  8. spaces/1phancelerku/anime-remove-background/Bitcoin Mining Simulator Idle Clicker Tycoon Mod APK.md +0 -162
  9. spaces/1phancelerku/anime-remove-background/Download Index of Cricket League Mod APK v1.0.5 for Android - Unlimited Coins and Gems.md +0 -124
  10. spaces/4Taps/SadTalker/src/facerender/modules/generator.py +0 -251
  11. spaces/801artistry/RVC801/demucs/test.py +0 -109
  12. spaces/AI-Dashboards/HEDIS.Assessment.PHQ9.GADD7.SDoH/style.css +0 -28
  13. spaces/AIGC-Audio/AudioGPT/text_to_speech/tasks/tts/synta.py +0 -25
  14. spaces/AIGC-Audio/AudioGPT/text_to_speech/utils/audio/io.py +0 -22
  15. spaces/AIGC-Audio/Make_An_Audio/ldm/modules/encoders/open_clap/transform.py +0 -30
  16. spaces/AIWaves/SOP_Generation-single/Component/__init__.py +0 -3
  17. spaces/AdvertisingAgency/README/README.md +0 -10
  18. spaces/AgentVerse/agentVerse/ui/src/phaser3-rex-plugins/plugins/buildarcadeobject.d.ts +0 -2
  19. spaces/AgentVerse/agentVerse/ui/src/phaser3-rex-plugins/templates/spinner/clock/Factory.d.ts +0 -6
  20. spaces/AgentVerse/agentVerse/ui/src/phaser3-rex-plugins/templates/ui/lineprogress/LineProgress.d.ts +0 -2
  21. spaces/Androidonnxfork/CivitAi-to-Diffusers/diffusers/.github/ISSUE_TEMPLATE/feature_request.md +0 -20
  22. spaces/Andy1621/uniformer_image_detection/mmdet/models/dense_heads/__init__.py +0 -41
  23. spaces/Andy1621/uniformer_image_segmentation/configs/danet/danet_r101-d8_512x1024_40k_cityscapes.py +0 -2
  24. spaces/Andy1621/uniformer_image_segmentation/configs/encnet/encnet_r50-d8_512x512_160k_ade20k.py +0 -6
  25. spaces/Andyrasika/Andyrasika-dreamshaper-sdxl-1.0/app.py +0 -3
  26. spaces/AnimalEquality/chatbot/_proc/_docs/site_libs/quarto-search/autocomplete.umd.js +0 -3
  27. spaces/AnishKumbhar/ChatBot/text-generation-webui-main/extensions/openai/models.py +0 -78
  28. spaces/Anonymous-sub/Rerender/ControlNet/annotator/uniformer/mmcv/cnn/utils/fuse_conv_bn.py +0 -59
  29. spaces/Anustup/NS_AI_LABS/tests/segments_test.py +0 -48
  30. spaces/ArtGAN/Diffusion-API/diffusion_webui/utils/__init__.py +0 -0
  31. spaces/Ataturk-Chatbot/HuggingFaceChat/venv/lib/python3.11/site-packages/pip/_internal/utils/virtualenv.py +0 -104
  32. spaces/Ataturk-Chatbot/HuggingFaceChat/venv/lib/python3.11/site-packages/setuptools/_vendor/jaraco/text/__init__.py +0 -599
  33. spaces/Awiny/Image2Paragraph/models/grit_src/third_party/CenterNet2/detectron2/modeling/roi_heads/fast_rcnn.py +0 -462
  34. spaces/BAAI/vid2vid-zero/vid2vid_zero/p2p/seq_aligner.py +0 -197
  35. spaces/Bala2-03-2003/MygenvioceAI/README.md +0 -12
  36. spaces/BasToTheMax/tensor/app.py +0 -28
  37. spaces/Benson/text-generation/Examples/Cmo Descargar Mis Monstruos Cantando.md +0 -79
  38. spaces/Big-Web/MMSD/env/Lib/site-packages/pip/_internal/utils/egg_link.py +0 -72
  39. spaces/CVPR/LIVE/diffvg.cpp +0 -1792
  40. spaces/CVPR/LIVE/thrust/thrust/detail/complex/stream.h +0 -71
  41. spaces/CVPR/LIVE/thrust/thrust/system/omp/pointer.h +0 -360
  42. spaces/CVPR/MonoScene/monoscene/unet3d_nyu.py +0 -90
  43. spaces/CikeyQI/Yunzai/Yunzai/plugins/ws-plugin/resources/admin/index.html +0 -42
  44. spaces/ClearLove443/Robby-chatbot/pages/1_📄Robby-Chat.py +0 -100
  45. spaces/CoPoBio/skin_cancer_risk_prediction/app_DCCPH.py +0 -225
  46. spaces/Cyril666/ContourNet-ABI/maskrcnn_benchmark/utils/__init__.py +0 -0
  47. spaces/Cyril666/ContourNet-ABI/maskrcnn_benchmark/utils/imports.py +0 -23
  48. spaces/DQChoi/gpt-demo/venv/lib/python3.11/site-packages/PIL/ImageMath.py +0 -263
  49. spaces/DQChoi/gpt-demo/venv/lib/python3.11/site-packages/PIL/PngImagePlugin.py +0 -1456
  50. spaces/DQChoi/gpt-demo/venv/lib/python3.11/site-packages/huggingface_hub/utils/_git_credential.py +0 -96
spaces/1acneusushi/gradio-2dmoleculeeditor/data/Assassins Creed Iii 103 Skidrow Patch Everything You Need to Know About the Latest Version of the Game.md DELETED
@@ -1,136 +0,0 @@
1
-
2
- <h1>Assassins Creed III 103 Skidrow Patch: Everything You Need to Know</h1>
3
- <p>If you are a fan of the Assassins Creed series, you might be interested in playing the third installment of the franchise, Assassins Creed III. This game takes you to the American Revolution era, where you can explore the historical events and locations, as well as engage in stealth, combat, and parkour. However, if you want to play this game on your PC, you might encounter some issues and bugs that can ruin your gaming experience. That's why you might want to use the Assassins Creed III 103 Skidrow Patch, which is a crack and update for the game that fixes many problems and adds new features. In this article, we will tell you everything you need to know about this patch, including what it is, how to download and install it, and how to fix common issues and errors with it. Let's get started!</p>
4
- <h2>Assassins Creed Iii 103 Skidrow Patch</h2><br /><p><b><b>Download</b> ---> <a href="https://byltly.com/2uKwc2">https://byltly.com/2uKwc2</a></b></p><br /><br />
5
- <h2>What is Assassins Creed III?</h2>
6
- <h3>A brief overview of the game's story and gameplay</h3>
7
- <p>Assassins Creed III is an action-adventure game developed by Ubisoft Montreal and published by Ubisoft in 2012. It is the fifth main game in the Assassins Creed series, and a sequel to Assassins Creed: Revelations. The game follows the story of Desmond Miles, a modern-day assassin who relives the memories of his ancestors through a device called the Animus. In this game, Desmond accesses the memories of Ratonhnhaké:ton, also known as Connor, a half-English, half-Mohawk assassin who fights against the Templars during the American Revolution. The game features an open-world environment that spans various locations in colonial America, such as Boston, New York, the Frontier, and the Caribbean Sea. The game also introduces naval combat, hunting, crafting, and homestead management as new gameplay elements.</p>
8
- <h3>The main features and improvements of Assassins Creed III</h3>
9
- <p>Assassins Creed III is considered one of the most ambitious and innovative games in the series, as it offers many new features and improvements over its predecessors. Some of these features are:</p>
10
- <ul>
11
- <li>A new engine: The game uses a new engine called Anvil Next, which allows for more realistic graphics, animations, physics, weather effects, and crowd behavior.</li>
12
- <li>A new protagonist: The game introduces a new protagonist, Connor, who has a unique fighting style that combines tomahawks, bows, pistols, rope darts, and hidden blades. Connor also has access to various outfits and weapons that reflect his Native American heritage.</li>
13
- <li>A new setting: The game explores a new historical period, the American Revolution, which offers a rich and diverse backdrop for the story. The game also features historical figures such as George Washington, Benjamin Franklin, Thomas Jefferson, Samuel Adams, Paul Revere, Charles Lee, and more.</li>
14
- <li>A new gameplay mode: The game features a multiplayer mode that allows players to compete against each other in various modes such as deathmatch, domination, wolfpack, manhunt, artifact assault, and more. The multiplayer mode also has a story mode that reveals more about the Templars' plans.</li>
15
- </ul>
16
- <h2>What is Skidrow?</h2>
17
- <h3>A brief history and background of Skidrow group</h3>
18
- <p>Skidrow is a group of hackers and crackers who specialize in cracking and releasing games for PC. They are one of the most popular and notorious groups in the scene, as they have cracked hundreds of games since their inception in 1990. Some of their most famous releases include Grand Theft Auto V, The Witcher 3: Wild Hunt, Far Cry 5, and Red Dead Redemption 2. Skidrow is also known for their rivalry with other groups such as Reloaded, Codex, and CPY.</p>
19
- <p>Assassins Creed 3 update 1.03 skidrow crack<br />
20
- How to install Assassins Creed III skidrow patch 103<br />
21
- Assassins Creed III version 1.03 skidrow download<br />
22
- Assassins Creed 3 skidrow patch 103 fix<br />
23
- Assassins Creed III skidrow update 1.03 error<br />
24
- Download Assassins Creed 3 patch 1.03 skidrow free<br />
25
- Assassins Creed III skidrow patch 103 not working<br />
26
- Assassins Creed 3 update 1.03 skidrow torrent<br />
27
- Assassins Creed III skidrow patch 103 changelog<br />
28
- Assassins Creed 3 skidrow patch 103 gameplay<br />
29
- Assassins Creed III version 1.03 skidrow trainer<br />
30
- Assassins Creed 3 skidrow patch 103 lag<br />
31
- Assassins Creed III skidrow update 1.03 features<br />
32
- Assassins Creed 3 patch 1.03 skidrow size<br />
33
- Assassins Creed III skidrow patch 103 requirements<br />
34
- Assassins Creed 3 skidrow patch 103 mods<br />
35
- Assassins Creed III version 1.03 skidrow cheats<br />
36
- Assassins Creed 3 update 1.03 skidrow review<br />
37
- Assassins Creed III skidrow patch 103 steam<br />
38
- Assassins Creed 3 skidrow patch 103 save game<br />
39
- Assassins Creed III skidrow update 1.03 release date<br />
40
- Assassins Creed 3 patch 1.03 skidrow keygen<br />
41
- Assassins Creed III skidrow patch 103 multiplayer<br />
42
- Assassins Creed 3 skidrow patch 103 graphics<br />
43
- Assassins Creed III version 1.03 skidrow system requirements<br />
44
- Assassins Creed 3 update 1.03 skidrow repack<br />
45
- Assassins Creed III skidrow patch 103 dlc<br />
46
- Assassins Creed 3 skidrow patch 103 bugs<br />
47
- Assassins Creed III skidrow update 1.03 performance<br />
48
- Assassins Creed 3 patch 1.03 skidrow iso<br />
49
- Assassins Creed III skidrow patch 103 sound<br />
50
- Assassins Creed 3 skidrow patch 103 resolution<br />
51
- Assassins Creed III version 1.03 skidrow comparison<br />
52
- Assassins Creed 3 update 1.03 skidrow rar password<br />
53
- Assassins Creed III skidrow patch 103 achievements<br />
54
- Assassins Creed 3 skidrow patch 103 missions<br />
55
- Assassins Creed III skidrow update 1.03 unlocker<br />
56
- Assassins Creed 3 patch 1.03 skidrow direct link<br />
57
- Assassins Creed III skidrow patch 103 characters<br />
58
- Assassins Creed 3 skidrow patch 103 settings<br />
59
- Assassins Creed III version 1.03 skidrow crack only<br />
60
- Assassins Creed 3 update 1.03 skidrow megaupload<br />
61
- Assassins Creed III skidrow patch 103 screenshots<br />
62
- Assassins Creed 3 skidrow patch 103 video<br />
63
- Assassins Creed III skidrow update 1.03 guide<br />
64
- Assassins Creed iii patch v1.03 with theta crack download free full pc game torrent cracked by reloaded and blackbox repack working link no survey no password no virus no malware no adfly no survey no password no virus no malware no adfly no survey no password no virus no malware no adfly no survey no password no virus no malware no adfly</p>
65
- <h3>The benefits and risks of using Skidrow cracks and patches</h3>
66
- <p>Using Skidrow cracks and patches can have some benefits and risks for PC gamers. Some of the benefits are:</p>
67
- <ul>
68
- <li>You can play games for free without buying them.</li>
69
- <li>You can play games without DRM (digital rights management) restrictions or online activation.</li>
70
- <li>You can play games before their official release date or in regions where they are not available.</li>
71
- <li>You can play games with mods or cheats that are not supported by the official version.</li>
72
- </ul>
73
- <p>Some of the risks are:</p>
74
- <ul>
75
- <li>You can expose your PC to viruses or malware that can harm your system or steal your data.</li>
76
- <li>You can face legal consequences if you are caught downloading or distributing pirated games.</li>
77
- <li>You can miss out on updates or patches that fix bugs or add new content to the games.</li>
78
- <li>You can experience compatibility or performance issues with some games or hardware.</li>
79
- <li>You can lose access to online features or multiplayer modes that require an official account or connection.</li>
80
- </ul>
81
- <h2>What is Assassins Creed III 103 Skidrow Patch?</h2>
82
- <h3>A detailed description of the patch and its contents</h3>
83
- <p>Assassins Creed III 103 Skidrow Patch is a crack and update for Assassins Creed III that was released by Skidrow in 2013. It is also known as Assassins Creed III Update v1.03 + Crack Only Proper-Reloaded. This patch fixes many bugs and glitches that were present in the original version of the game, such as:</p>
84
- <ul>
85
- <li>Crashes or freezes during gameplay or cutscenes.</li>
86
- <li>Audio or subtitle synchronization issues.</li>
87
- <li>Missing textures or models.</li>
88
- <li>Incorrect animations or movements.</li>
89
- <li>Broken quests or objectives.</li>
90
- <li>Infinite loading screens or black screens.</li>
91
- </ul>
92
- <p>This patch also adds some new features and improvements to the game, such as:</p>
93
- <ul>
94
- <li>A new difficulty level: Nightmare Mode.</li>
95
- <li>A new multiplayer map: Saint Pierre.</li>
96
- <li>A new multiplayer character: The Siren.</li>
97
- <li>A new single-player mission: The Tyranny of King Washington - The Infamy (Part 1).</li>
98
- <li>A new single-player outfit: The Captain Kidd's Outfit.</li>
99
- <li>A new single-player weapon: The Sawtooth Sword.</li>
100
- </ul>
101
- <h3>How to download and install the patch correctly</h3>
102
- <p>To download and install Assassins Creed III 103 Skidrow Patch correctly, you need to follow these steps:</p>
103
- <ol type="1">
104
- <li>Download Assassins Creed III 103 Skidrow Patch from a reliable source such as <a href="https://www.skidrowreloaded.com/assassins-creed-iii-update-v1-03-crack-only-proper-reloaded/">Skidrow Reloaded</a>.</li>
105
- <li>Extract the files from the downloaded archive using a program such as WinRAR or 7-Zip.</li>
106
- <li>Copy all the files from the Crack folder to your Assassins Creed III installation folder (usually C:\Program Files (x86)\Ubisoft\Assassin's Creed III).</li>
107
- <li>Run AC3SP.exe as administrator to start playing the game with the patch applied.</li>
108
- </ol>
109
- <h2>How to fix common issues and errors with Assassins Creed III 103 Skidrow Patch?</h2>
110
- <h3>A list of possible problems and solutions for the patch users</h3>
111
- <p>If you encounter any issues or errors while using Assassins Creed III 103 Skidrow Patch, you can try some of these possible solutions:</p>
112
- <table border="1">
113
- <tr><th>Problem</th><th>Solution</th></tr>
114
- <tr><td>The game does not start or crashes at launch.</td><td>- Make sure your PC meets the minimum system requirements for Assassins Creed III.<br>- Make sure you have installed all the necessary drivers for your graphics card.<br>- Make sure you have disabled any antivirus or firewall programs that might interfere with - Try to run the game in compatibility mode for Windows 7 or 8.<br>- Try to update or reinstall DirectX and Microsoft Visual C++ Redistributable.<br>- Try to delete or rename the file AC3SP.ini in your installation folder.</td></tr>
115
- <tr><td>The game runs slowly or lags during gameplay.</td><td>- Make sure your PC meets the recommended system requirements for Assassins Creed III.<br>- Make sure you have adjusted the graphics settings to suit your PC's capabilities.<br>- Make sure you have closed any background programs that might consume your CPU or RAM.<br>- Make sure you have defragmented your hard drive and cleaned your registry.<br>- Try to lower the resolution or disable some effects such as anti-aliasing, shadows, or reflections.</td></tr>
116
- <tr><td>The game does not save or load properly.</td><td>- Make sure you have enough free space on your hard drive.<br>- Make sure you have not modified or deleted any game files.<br>- Make sure you have backed up your save files before applying the patch.<br>- Make sure you have run AC3SP.exe as administrator.<br>- Try to delete or rename the folder Ubisoft Game Launcher in C:\Program Files (x86)\Ubisoft.</td></tr>
117
- <tr><td>The game does not connect to the internet or multiplayer mode.</td><td>- Make sure you have a stable and fast internet connection.<br>- Make sure you have allowed the game through your firewall or router settings.<br>- Make sure you have updated your game to the latest version.<br>- Make sure you have created and logged in to a Ubisoft account.<br>- Try to use a VPN or proxy service to bypass any regional restrictions.</td></tr>
118
- <tr><td>The game shows an error message such as "AC3SP.exe has stopped working" or "Ubisoft Game Launcher error code 2".</td><td>- Make sure you have followed all the steps in the previous solutions.<br>- Make sure you have downloaded and installed the patch from a trusted source.<br>- Make sure you have copied all the files from the Crack folder correctly.<br>- Try to reinstall the game and the patch from scratch.<br>- Try to contact Skidrow for support and feedback.</td></tr>
119
- </table>
120
- <h3>How to contact Skidrow for support and feedback</h3>
121
- <p>If none of the solutions above work for you, or if you have any questions, suggestions, or feedback for Skidrow, you can try to contact them through their official website, <a href="https://www.skidrowreloaded.com/">Skidrow Reloaded</a>. There, you can find more information about their releases, updates, news, and comments. You can also join their community and chat with other users who might have similar issues or interests. However, be aware that Skidrow is not an official source of support for Assassins Creed III, and they might not respond to your messages or requests. Therefore, use their services at your own risk and discretion.</p>
122
- <h2>Conclusion</h2>
123
- <h3>A summary of the main points and a call to action for the readers</h3>
124
- <p>Assassins Creed III 103 Skidrow Patch is a crack and update for Assassins Creed III that fixes many bugs and glitches and adds new features and improvements to the game. It is a great way to enjoy one of the best games in the Assassins Creed series without spending any money or facing any restrictions. However, it also comes with some risks and challenges that might affect your PC's security or performance. Therefore, before using this patch, make sure you know what you are doing and follow the instructions carefully. If you encounter any problems or errors with this patch, try some of the solutions we provided above, or contact Skidrow for support and feedback. We hope this article was helpful and informative for you. If you liked it, please share it with your friends and fellow gamers. And if you want to play more games like Assassins Creed III, check out our website for more cracks and patches from Skidrow. Thank you for reading!</p>
125
- <h4>FAQs</h4>
126
- <p>Here are some frequently asked questions about Assassins Creed III 103 Skidrow Patch:</p>
127
- <ol type="1">
128
- <li>Q: Do I need to have Assassins Creed III installed before applying this patch?<br>A: Yes, you need to have Assassins Creed III installed on your PC before applying this patch. You can download Assassins Creed III from <a href="https://www.skidrowreloaded.com/assassins-creed-iii-skidrow/">Skidrow Reloaded</a>.</li>
129
- <li>Q: Do I need to apply any previous patches before applying this patch?<br>A: No, you do not need to apply any previous patches before applying this patch. This patch includes all the previous updates and fixes for Assassins Creed III.</li>
130
- <li>Q: Does this patch work with Steam or Uplay versions of Assassins Creed III?<br>A: No, this patch only works with Skidrow version of Assassins Creed III. If you have Steam or Uplay versions of Assassins Creed III, you need to uninstall them and install Skidrow version instead.</li>
131
- <li>Q: Does this patch include any DLCs (downloadable content) for Assassins Creed III?<br>A: Yes, this patch includes one DLC for Assassins Creed III: The Tyranny of King Washington - The Infamy (Part 1). This is a single-player mission that explores an alternate history where George Washington becomes a tyrant. You can access this mission from the main menu of the game.</li>
132
- <li>Q: Can I play online or multiplayer mode with this patch?<br>A: Yes, you can play online or multiplayer mode with this patch. However, you need to create and log in to a Ubisoft account first. You also need to allow the game through your firewall or router settings. You might also face some lag or connection issues depending on your internet speed and location.</li>
133
- </ol>
134
- </p> 0a6ba089eb<br />
135
- <br />
136
- <br />
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/1acneusushi/gradio-2dmoleculeeditor/data/Cracked Dc Unlocker Unlimited Credits New Versionl The Latest and Most Powerful Version of DC-Unlocker.md DELETED
@@ -1,163 +0,0 @@
1
-
2
- <h1>Cracked DC Unlocker Unlimited Credits New Version</h1>
3
- <p>Do you want to unlock your modem, router, or phone without paying for credits or waiting for hours? If yes, then you might be interested in Cracked DC Unlocker Unlimited Credits New Version. This is a software that allows you to bypass the limitations of the official DC Unlocker and use it for free and unlimited. But what is DC Unlocker and how does it work? And what are the advantages and disadvantages of using the cracked version? In this article, we will answer these questions and more. We will also show you how to download, install, and use Cracked DC Unlocker Unlimited Credits New Version on your device. So, let's get started!</p>
4
- <h2>Cracked Dc Unlocker Unlimited Credits New Versionl</h2><br /><p><b><b>DOWNLOAD</b> &#10145; <a href="https://byltly.com/2uKwc3">https://byltly.com/2uKwc3</a></b></p><br /><br />
5
- <h2>What is DC Unlocker?</h2>
6
- <p>DC Unlocker is a software that helps you to unlock various devices such as modems, routers, phones, and dongles from different brands and models. It supports over 6000 devices from Huawei, ZTE, LG, Nokia, Alcatel, Sony, Lenovo, Xiaomi, and more. It can also repair IMEI, firmware, bootloaders, NVM, security area, etc. It is one of the most popular and reliable unlocking tools in the market.</p>
7
- <h3>A brief introduction to DC Unlocker and its features</h3>
8
- <p>DC Unlocker was created in 2004 by a team of professionals who wanted to provide a fast and easy solution for unlocking devices. It works by connecting your device to your PC via USB cable and detecting its information automatically. Then, it generates an unlock code or performs a direct unlock depending on the device model. It also has an online server that updates the software regularly with new models and features.</p>
9
- <p>Some of the main features of DC Unlocker are:</p>
10
- <p>Cracked Dc Unlocker Software Free Download<br />
11
- Dc Unlocker Patched with Unlimited Credits 2017<br />
12
- Dc Unlocker Client Software V1.00.0565 Cracked<br />
13
- How to use Dc Unlocker Cracked Version<br />
14
- Dc Unlocker Free Read Bootloader Huawei Phones<br />
15
- Dc Unlocker Free Unlock Huawei Modems<br />
16
- Dc Unlocker Free Write Firmware Huawei Modems<br />
17
- Dc Unlocker Cracked Username and Password<br />
18
- Dc Unlocker Cracked for Qualcomm and Hisilicon Devices<br />
19
- Dc Unlocker Cracked Latest Version Download<br />
20
- Dc Unlocker Cracked No Sign in Support<br />
21
- Dc Unlocker Cracked No Credits Required<br />
22
- Dc Unlocker Cracked Tested and Working<br />
23
- Dc Unlocker Cracked XDA Forums<br />
24
- Dc Unlocker Cracked News Updates and Guides<br />
25
- Dc Unlocker Cracked for Different Provider SIM<br />
26
- Dc Unlocker Cracked RAR File Download<br />
27
- Dc Unlocker Cracked dccrap.exe Download<br />
28
- Dc Unlocker Cracked for Huawei Smart Phones<br />
29
- Dc Unlocker Cracked for Huawei Modems<br />
30
- How to Install Dc Unlocker Cracked Version<br />
31
- How to Update Dc Unlocker Cracked Version<br />
32
- How to Fix Errors in Dc Unlocker Cracked Version<br />
33
- How to Get Free Credits in Dc Unlocker Cracked Version<br />
34
- How to Bypass Login in Dc Unlocker Cracked Version<br />
35
- How to Change Language in Dc Unlocker Cracked Version<br />
36
- How to Support New Models in Dc Unlocker Cracked Version<br />
37
- How to Reset Counter in Dc Unlocker Cracked Version<br />
38
- How to Repair IMEI in Dc Unlocker Cracked Version<br />
39
- How to Backup and Restore Data in Dc Unlocker Cracked Version<br />
40
- How to Enable Voice Feature in Dc Unlocker Cracked Version<br />
41
- How to Flash Custom Firmware in Dc Unlocker Cracked Version<br />
42
- How to Remove FRP Lock in Dc Unlocker Cracked Version<br />
43
- How to Root and Unroot Devices in Dc Unlocker Cracked Version<br />
44
- How to Generate Code from IMEI in Dc Unlocker Cracked Version<br />
45
- How to Calculate Hash Code in Dc Unlocker Cracked Version<br />
46
- How to Detect Device Automatically in Dc Unlocker Cracked Version<br />
47
- How to Select COM Port Manually in Dc Unlocker Cracked Version<br />
48
- How to Scan for Available Ports in Dc Unlocker Cracked Version<br />
49
- How to Check Device Information in Dc Unlocker Cracked Version<br />
50
- How to Check Device Status in Dc Unlocker Cracked Version<br />
51
- How to Check Device Firmware Version in Dc Unlocker Cracked Version<br />
52
- How to Check Device Hardware Version in Dc Unlocker Cracked Version<br />
53
- How to Check Device Security Area Backup in Dc Unlocker Cracked Version<br />
54
- How to Check Device NV Items Backup in Dc Unlocker Cracked Version <br />
55
- How to Check Device SIM Lock Status in Dc Unlocker Cracked Version <br />
56
- How to Check Device Network Lock Status in Dc Unlocker Cracked Version <br />
57
- How to Check Device Bootloader Lock Status in Dc Unlocker Cracked Version <br />
58
- How to Check Device Warranty Status in Dc Unlocker Cracked Version</p>
59
- <ul>
60
- <li>It can unlock devices in a few seconds or minutes without requiring any technical skills or knowledge.</li>
61
- <li>It can unlock devices that are not supported by other tools or methods.</li>
62
- <li>It can unlock devices that are hard-locked or have a counter blocked by too many wrong codes.</li>
63
- <li>It can unlock devices that have customized firmware or software versions.</li>
64
- <li>It can unlock devices that have different SIM card sizes or types.</li>
65
- <li>It can unlock devices from any network or country.</li>
66
- </ul>
67
- <h3>How to use DC Unlocker to unlock modems, routers, and phones</h3>
68
- <p>To use DC Unlocker to unlock your device, you need to follow these steps:</p>
69
- <ol>
70
- <li>Download and install DC Unlocker on your PC from the official website: https://www.dc-unlocker.com/</li>
71
- <li>Buy credits from the website or from a reseller. You need credits to perform unlocking operations with DC Unlocker. The price of credits depends on the device model and the number of credits required. You can check the price list here: https://www.dc-unlocker.com/buy/user_prices</li>
72
- <li>Connect your device to your PC via USB cable. Make sure you have installed the drivers for your device on your PC. You can find the drivers here: https://www.dc-unlocker.com/downloads/drivers</li>
73
- <li>Run DC Unlocker as administrator and click on the magnifying glass icon to detect your device.</li>
74
- <li>Select your device model from the drop-down menu or leave it as auto-detect.</li>
75
- <li>Click on the unlocking tab and choose either unlock or read unlock code depending on your device model.</li>
76
- <li>Wait for a few seconds or minutes until the process is completed.</li>
77
- <li>Disconnect your device from your PC and insert a different SIM card.</li>
78
- <li>Enjoy your unlocked device!</li>
79
- </ol>
80
- <h2>What is Cracked DC Unlocker Unlimited Credits?</h2>
81
- <p>Cracked DC Unlocker Unlimited Credits is a modified version of DC Unlocker that allows you to use it without paying for credits or registering an account. It also gives you unlimited credits so you can unlock as many devices as you want. It is created by hackers who crack the original software and bypass its security features.</p>
82
- <h3>The difference between the official and the cracked version of DC Unlocker</h3>
83
- <p>The main difference between the official and the cracked version of DC Unlocker is that the official version is legal and safe while the cracked version is illegal and risky. The official version is supported by the developers who update it regularly with new models and features. It also has a customer service that can help you with any issues or questions. The cracked version is not supported by anyone and may contain viruses or malware that can harm your PC or device. It also may not work properly or at all with some models or versions.</p>
84
- <h3>The benefits and risks of using Cracked DC Unlocker Unlimited Credits</h3>
85
- <p>The benefits of using Cracked DC Unlocker Unlimited Credits are:</p>
86
- <ul>
87
- <li>You can save money by not paying for credits or subscriptions.</li>
88
- <li>You can unlock unlimited devices without any restrictions or limitations.</li>
89
- <li>You can unlock devices that are not supported by the official version.</li>
90
- </ul>
91
- <p>The risks of using Cracked DC Unlocker Unlimited Credits are:</p>
92
- <ul>
93
- <li>You may violate the terms and conditions of DC Unlocker and face legal consequences.</li>
94
- <li>You may damage your PC or device by installing viruses or malware from unknown sources.</li>
95
- <li>You may lose your warranty or support from your device manufacturer or network provider.</li>
96
- <li>You may brick your device by using incompatible or outdated software versions.</li>
97
- <li>You may lose your data or personal information by exposing them to hackers or third parties.</li>
98
- </ul>
99
- <h3>Where to download Cracked DC Unlocker Unlimited Credits New Version</h3>
100
- <p>If you still want to download Cracked DC Unlocker Unlimited Credits New Version despite its risks, you can find it on various websites that offer cracked software. However, we do not recommend or endorse any of these websites as they may contain harmful content or links. Use them at your own risk and discretion. Some examples of these websites are:</p>
101
- <table border="1">
102
- <tr><th>Name</th><th>URL</th></tr>
103
- <tr><td>GSM X Team</td><td>https://gsmxteam.net/dc-unlocker-crack/</td></tr>
104
- <tr><td>GSM Forum</td><td>https://forum.gsmhosting.com/vbb/f1000/dc-unlocker-2-client-1-00-1431-crack-2020-a-2848418/</td></tr>
105
- <tr><td>GSM Crack Tools</td><td>https://gsmcracktools.com/dc-unlocker-crack/</td></tr>
106
- <tr><td>GSM Box Crack</td><td>https://gsmboxcrack.com/dc-unlocker-crack/</td></tr>
107
- <tr><td>GSM Flash Tool</td><td>https://gsmflashtool.com/dc-unlocker-crack/</td></tr>
108
- </table>
109
- <h2>How to install and use Cracked DC Unlocker Unlimited Credits New Version</h2>
110
- <h3>The system requirements and compatibility of Cracked DC Unlocker Unlimited Credits New Version</h3>
111
- <p>To install and use Cracked DC Unlocker Unlimited Credits New Version on your PC, you need to have:</p>
112
- <ul>
113
- <li>A Windows operating system (XP/Vista/7/8/10)</li>
114
- <li>A minimum of 512 MB RAM</li>
115
- <li>A minimum of 100 MB free disk space</li>
116
- <li>A USB port and cable for connecting your device</li>
117
- <li>A compatible device model (check the list here: https://www.dc-unlocker.com/supported_models_1)</li>
118
- </ul>
119
- <h3>The step-by-step guide to install and use Cracked DC Unlocker Unlimited Credits New Version</h3>
120
- <p>To install and use Cracked DC Unlocker Unlimited Credits New Version on your PC, you need to follow these steps:</p>
121
- <ol>
122
- other source you trust) and extract the zip file to a folder on your PC.</li>
123
- <li>Run the setup.exe file as administrator and follow the instructions to install the software on your PC.</li>
124
- <li>After the installation is completed, run the DC Unlocker 2 Client.exe file as administrator from the folder where you installed the software.</li>
125
- <li>Connect your device to your PC via USB cable. Make sure you have installed the drivers for your device on your PC. You can find the drivers here: https://www.dc-unlocker.com/downloads/drivers</li>
126
- <li>Click on the magnifying glass icon to detect your device. You should see a message saying "Found Applications port COMX" where X is a number.</li>
127
- <li>Select your device model from the drop-down menu or leave it as auto-detect.</li>
128
- <li>Click on the unlocking tab and choose either unlock or read unlock code depending on your device model.</li>
129
- <li>Wait for a few seconds or minutes until the process is completed. You should see a message saying "Unlocking, please wait ..." and then "Unlock done".</li>
130
- <li>Disconnect your device from your PC and insert a different SIM card.</li>
131
- <li>Enjoy your unlocked device!</li>
132
- </ol>
133
- <h3>The troubleshooting tips and FAQs for Cracked DC Unlocker Unlimited Credits New Version</h3>
134
- <p>If you encounter any problems or errors while using Cracked DC Unlocker Unlimited Credits New Version, you can try these tips:</p>
135
- <ul>
136
- <li>Make sure you have downloaded the latest version of Cracked DC Unlocker Unlimited Credits New Version from a reliable source.</li>
137
- <li>Make sure you have installed the software correctly and run it as administrator.</li>
138
- <li>Make sure you have installed the drivers for your device on your PC.</li>
139
- <li>Make sure you have a stable internet connection and disable any antivirus or firewall programs that may interfere with the software.</li>
140
- <li>Make sure you have selected the correct device model and mode from the drop-down menu.</li>
141
- <li>Make sure you have enough battery power on your device before starting the unlocking process.</li>
142
- <li>If you get an error message saying "Card not found", try to reinstall the software or use a different PC.</li>
143
- <li>If you get an error message saying "Server connection error", try to restart the software or use a different internet connection.</li>
144
- <li>If you get an error message saying "Unlock failed", try to repeat the process or use a different device model or mode.</li>
145
- </ul>
146
- <p>If you have any questions or doubts about Cracked DC Unlocker Unlimited Credits New Version, you can check these FAQs:</p>
147
- <ol>
148
- <li>Q: Is Cracked DC Unlocker Unlimited Credits New Version safe to use?</li>
149
- <li>A: No, it is not safe to use. It is illegal and risky. It may contain viruses or malware that can harm your PC or device. It may also damage your device or void your warranty. We do not recommend using it at all.</li>
150
- <li>Q: Is Cracked DC Unlocker Unlimited Credits New Version free to use?</li>
151
- <li>A: Yes, it is free to use. You do not need to pay for credits or subscriptions to use it. However, you may pay a higher price in terms of security and quality. You may also face legal consequences for using it.</li>
152
- <li>Q: Does Cracked DC Unlocker Unlimited Credits New Version work with all devices?</li>
153
- <li>A: No, it does not work with all devices. It may not support some models or versions that are supported by the official version. It may also not work properly or at all with some devices. You may end up bricking your device by using it.</li>
154
- <li>Q: Can I update Cracked DC Unlocker Unlimited Credits New Version?</li>
155
- <li>A: No, you cannot update Cracked DC Unlocker Unlimited Credits New Version. It does not have an online server that updates it regularly with new models and features. It may also stop working if you update it manually or automatically.</li>
156
- <li>Q: Can I get support for Cracked DC Unlocker Unlimited Credits New Version?</li>
157
- <li>A: No, you cannot get support for Cracked DC Unlocker Unlimited Credits New Version. It does not have a customer service that can help you with any issues or questions. You are on your own if you use it.</li>
158
- </ol>
159
- <h2>Conclusion</h2>
160
- <p>In conclusion, Cracked DC Unlocker Unlimited Credits New Version is a software that allows you to unlock various devices such as modems, routers, and phones without paying for credits or registering an account. It also gives you unlimited credits so you can unlock as many devices as you want. However, it is illegal and risky to use. It may contain viruses or malware that can harm your PC or device. It may also damage your device or void your warranty. It may not work with all devices or versions. It may not be updated or supported by anyone. We do not recommend using it at all. Instead, we suggest using the official version of DC Unlocker which is legal and safe. It is supported by the developers who update it regularly with new models and features. It also has a customer service that can help you with any issues or questions. You can buy credits from the website or from a reseller at a reasonable price. You can also enjoy other features such as repairing IMEI, firmware, bootloaders, etc. You can download and install DC Unlocker from the official website: https://www.dc-unlocker.com/</p>
161
- <p>We hope this article has been helpful and informative for you. If you have any feedback or suggestions, please let us know in the comments below. Thank you for reading!</p> 0a6ba089eb<br />
162
- <br />
163
- <br />
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/1acneusushi/gradio-2dmoleculeeditor/data/Fisiologiaanimalhill.md DELETED
@@ -1,35 +0,0 @@
1
- <br />
2
- <h1>Fisiologia Animal Hill: A Comprehensive Guide to Animal Physiology</h1>
3
- <p>Fisiologia Animal Hill is a popular textbook that covers the principles and concepts of animal physiology in a clear and engaging way. The book is written by Richard W. Hill, Gordon A. Wyse, and Margaret Anderson, who are experts in the field of comparative physiology. The book is suitable for undergraduate and graduate students who want to learn about the diversity and adaptations of animals in different environments.</p>
4
- <h2>fisiologiaanimalhill</h2><br /><p><b><b>Download</b> &harr; <a href="https://byltly.com/2uKywd">https://byltly.com/2uKywd</a></b></p><br /><br />
5
- <p>In this article, we will provide an overview of the main topics and features of Fisiologia Animal Hill, and explain why it is a valuable resource for anyone interested in animal physiology. We will also share some tips on how to use the book effectively for your studies.</p>
6
- <h2>What is Fisiologia Animal Hill?</h2>
7
- <p>Fisiologia Animal Hill is a comprehensive and updated textbook that covers the fundamentals of animal physiology, from molecules to organisms. The book is divided into seven parts, each focusing on a major aspect of animal physiology:</p>
8
- <ul>
9
- <li>Part 1: Introduction to Physiology. This part introduces the basic concepts and methods of physiology, such as homeostasis, feedback loops, adaptation, acclimation, and evolution.</li>
10
- <li>Part 2: Physiological Processes. This part covers the cellular and molecular mechanisms of physiological processes, such as membrane transport, signal transduction, metabolism, gene expression, and epigenetics.</li>
11
- <li>Part 3: Neural and Sensory Physiology. This part explores the structure and function of the nervous system, including neurons, synapses, neurotransmitters, sensory receptors, sensory pathways, and sensory modalities.</li>
12
- <li>Part 4: Endocrine Physiology. This part examines the role of hormones in regulating physiological functions, such as growth, development, reproduction, stress response, circadian rhythms, and behavior.</li>
13
- <li>Part 5: Muscle Physiology. This part describes the properties and types of muscle tissue, including skeletal muscle, cardiac muscle, and smooth muscle. It also explains how muscles contract and generate force, power, and movement.</li>
14
- <li>Part 6: Cardiovascular Physiology. This part analyzes the structure and function of the circulatory system, including blood, blood vessels, heart, cardiac cycle, blood pressure, blood flow, and gas exchange.</li>
15
- <li>Part 7: Respiratory Physiology. This part investigates the structure and function of the respiratory system, including lungs, airways, ventilation, diffusion, oxygen transport, carbon dioxide transport, and acid-base balance.</li>
16
- </ul>
17
- <p>Each part consists of several chapters that provide detailed explanations and examples of the physiological phenomena and principles. The book also includes numerous figures, tables,
18
- diagrams
19
-
20
- <h2>What are the features of Fisiologia Animal Hill?</h2>
21
- <p>Fisiologia Animal Hill is not only a comprehensive textbook, but also a user-friendly and interactive learning tool. The book has several features that enhance its readability and usability, such as:</p>
22
- <p></p>
23
- <ul>
24
- <li>Learning objectives. Each chapter begins with a list of learning objectives that outline the main concepts and skills that students should master after reading the chapter.</li>
25
- <li>Key terms. Each chapter highlights the key terms that are essential for understanding the topic. The key terms are also listed at the end of the chapter and defined in the glossary.</li>
26
- <li>Concept checks. Each chapter includes several concept checks that test students' comprehension and application of the material. The concept checks are designed to stimulate critical thinking and problem-solving skills.</li>
27
- <li>Examples and applications. Each chapter provides numerous examples and applications of animal physiology in different contexts, such as ecology, evolution, medicine, biotechnology, and human health. The examples and applications illustrate the relevance and importance of animal physiology in real-world situations.</li>
28
- <li>Experimental approaches. Each chapter introduces some of the experimental methods and techniques that are used to study animal physiology. The experimental approaches show how physiological knowledge is derived from scientific inquiry and evidence.</li>
29
- <li>Summary. Each chapter ends with a summary that reviews the main points and take-home messages of the chapter.</li>
30
- <li>Questions. Each chapter concludes with a set of questions that assess students' recall, understanding, analysis, synthesis, and evaluation of the material. The questions range from multiple-choice to short-answer to essay questions.</li>
31
- <li>Online resources. The book is accompanied by an online platform that offers additional resources for students and instructors, such as animations, videos, quizzes, flashcards, case studies, and instructor's manual.</li>
32
- </ul>
33
- <p>These features make Fisiologia Animal Hill a valuable and effective textbook for learning animal physiology.</p> 81aa517590<br />
34
- <br />
35
- <br />
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/1gistliPinn/ChatGPT4/Examples/Boeing 737-300 500 CBT - Lufthansa Full Versionl !!HOT!!.md DELETED
@@ -1,6 +0,0 @@
1
- <h2>Boeing 737-300 500 CBT - Lufthansa Full Versionl</h2><br /><p><b><b>Download</b> ->>->>->> <a href="https://imgfil.com/2uy1WE">https://imgfil.com/2uy1WE</a></b></p><br /><br />
2
-
3
- In a duo situation, you need to think like a complete rhythm section: comping instrument, ... Boeing 737-300 500 CBT - Lufthansa Full Versionl 1fdad05405<br />
4
- <br />
5
- <br />
6
- <p></p>
 
 
 
 
 
 
 
spaces/1pelhydcardo/ChatGPT-prompt-generator/assets/2020 Design 12 The Most Trusted Software for Kitchen and Bathroom Designers.md DELETED
@@ -1,186 +0,0 @@
1
-
2
- <h1>How to Download 2020 Design 12: The Best Kitchen and Bathroom Design Software</h1>
3
- <p>If you are a kitchen and bathroom designer, you know how important it is to have a reliable and powerful software that can help you create stunning designs for your clients. You need a software that can handle complex layouts, realistic renderings, and online catalogs of manufacturer products. You need a software that can make your design process faster, easier, and more enjoyable. You need <strong>2020 Design Live</strong>, the latest version of the most popular kitchen and bathroom design software in North America.</p>
4
- <h2>download 2020 design 12</h2><br /><p><b><b>DOWNLOAD</b> &#9734;&#9734;&#9734; <a href="https://urlin.us/2uT1QC">https://urlin.us/2uT1QC</a></b></p><br /><br />
5
- <p>In this article, we will show you how to download 2020 Design 12, the desktop solution of 2020 Design Live, and how to use it to create amazing designs that will impress your clients. We will also answer some of the most frequently asked questions about this software. Let's get started!</p>
6
- <h2>What is 2020 Design 12?</h2>
7
- <p>2020 Design 12 is the desktop solution of 2020 Design Live, the kitchen and bathroom design software that runs on both desktop and cloud platforms. It is designed for professional designers who want to have access to the largest selection of manufacturer catalogs, online configurable cabinets, appliances, and plumbing, advanced lighting wizard, SketchUp integration, and more. It is also equipped with all the tools that will help you create photorealistic renderings, 360° panoramas, and detailed floor plans.</p>
8
- <h3>Features and benefits of 2020 Design 12</h3>
9
- <p>Some of the features and benefits of using 2020 Design 12 are:</p>
10
- <ul>
11
- <li>It has a new <strong>64-bit architecture</strong> that allows you to handle large and complex projects with ease.</li>
12
- <li>It has a new <strong>EZ Render</strong> rendering engine that produces high-quality images in minutes.</li>
13
- <li>It has a new <strong>Cloud Configurator</strong> that lets you customize cabinets, appliances, and plumbing online without downloading catalogs.</li>
14
- <li>It has a new <strong>Shaker Cabinet Door</strong> option that adds a modern touch to your designs.</li>
15
- <li>It has a new <strong>Screen Layout</strong> feature that lets you configure your workspace according to your preferences.</li>
16
- <li>It has a new <strong>Annotation Tool</strong> that lets you precisely mark the positions of light fixtures in your designs.</li>
17
- <li>It has a new <strong>SketchUp Importer</strong> that lets you import SketchUp models directly into your designs.</li>
18
- <li>It has a new <strong>Cabinet Door Replacement</strong> feature that lets you change the colors and styles of cabinet doors without changing catalogs.</li>
19
- <li>It has a new <strong>Catalog Manager</strong> that lets you easily manage your catalogs and updates.</li>
20
- <li>It has a new <strong>Pricing Tool</strong> that lets you generate accurate quotes for your clients based on manufacturer prices.</li>
21
- <li>It has a new <strong>Manager Starter Edition</strong>, a business process management application that helps you organize your projects, clients, and orders.</li>
22
- <li>It has a new <strong>User Interface</strong>, with improved icons, menus, toolbars, and dialogs.</li>
23
- <li>It has an improved <strong>User Experience</strong>, with enhanced performance, stability, and usability.</li>
24
- <li>It has an improved <strong>User Support</strong>, with online training, video tips, knowledge center, blogs, webinars, and more.</li>
25
- </ul>
26
- <h3>Requirements and compatibility of 2020 Design 12</h3>
27
- <p>To use 2020 Design 12, you need to have the following system requirements and compatibility:</p>
28
- <p>download 2020 design live software<br />
29
- download 2020 design v12 for kitchen and bathroom<br />
30
- download 2020 design v12 with new rendering engine<br />
31
- download 2020 design v12 with shaker cabinet doors<br />
32
- download 2020 design v12 with sketchup integration<br />
33
- download 2020 design v12 with advanced lighting wizard<br />
34
- download 2020 design v12 with cloud configurable catalogs<br />
35
- download 2020 design v12 with manufacturer products<br />
36
- download 2020 design v12 with 360 panoramas<br />
37
- download 2020 design v12 with space planning tools<br />
38
- download 2020 design v12 with decorative cloud items<br />
39
- download 2020 design v12 with personalization features<br />
40
- download 2020 design v12 with support and updates<br />
41
- download 2020 design v12 with free trial option<br />
42
- download 2020 design v12 with pricing information<br />
43
- download 2020 design v12 with testimonials and reviews<br />
44
- download 2020 design v12 with training resources<br />
45
- download 2020 design v12 with video tips and tutorials<br />
46
- download 2020 design v12 with webinar recordings<br />
47
- download 2020 design v12 with knowledge center access<br />
48
- download 2020 design v12 with blogs and news updates<br />
49
- download 2020 design v12 with edition comparison chart<br />
50
- download 2020 design live foundation edition<br />
51
- download 2020 design live essentials edition<br />
52
- download 2020 design live premium edition<br />
53
- how to download 2020 design v12 on windows pc<br />
54
- how to download 2020 design v12 on mac os<br />
55
- how to download 2020 design v12 on multiple devices<br />
56
- how to download 2020 design v12 offline installer<br />
57
- how to download 2020 design v12 latest version<br />
58
- how to install and activate 2020 design v12 software<br />
59
- how to update and upgrade to 2020 design v12 software<br />
60
- how to uninstall and remove 2020 design v12 software<br />
61
- how to troubleshoot and fix issues with 2020 design v12 software<br />
62
- how to contact customer service for 2020 design v12 software<br />
63
- how to use and learn from 2020 design v12 software<br />
64
- how to create and share designs with 2020 design v12 software<br />
65
- how to import and export files with 2020 design v12 software<br />
66
- how to customize and optimize settings with 2020 design v12 software<br />
67
- how to access and manage catalogs with 2020 design v12 software<br />
68
- benefits and features of downloading 2020 design v12 software<br />
69
- pros and cons of downloading 2020 design v12 software<br />
70
- alternatives and competitors of downloading 2020 design v12 software<br />
71
- discounts and coupons for downloading 2020 design v12 software<br />
72
- requirements and specifications for downloading 2020 design v12 software</p>
73
- <table>
74
- <tr>
75
- <th>Operating System</th>
76
- <th>Windows 10 (64-bit)</th>
77
- </tr>
78
- <tr>
79
- <td>Processor</td>
80
- <td>Intel Core i5 or higher</td>
81
- </tr>
82
- <tr>
83
- <td>Memory</td>
84
- <td>8 GB RAM or higher</td>
85
- </tr>
86
- <tr>
87
- <td>Hard Disk Space</td>
88
- <td>10 GB or higher</td>
89
- </tr>
90
- <tr>
91
- <td>Graphics Card</td>
92
- <td>NVIDIA GeForce GTX 1050 or higher</td>
93
- </tr>
94
- <tr>
95
- <td>Internet Connection</td>
96
- <td>High-speed broadband connection</td>
97
- </tr>
98
- <tr>
99
- <td>Screen Resolution</td>
100
- <td>1920 x 1080 or higher</td>
101
- </tr>
102
- <tr>
103
- <td>Mouse</td>
104
- <td>3-button mouse with scroll wheel</td>
105
- </tr>
106
- <tr>
107
- <td>Keyboard</td>
108
- <td>Standard keyboard with numeric keypad</td>
109
- </tr>
110
- <tr>
111
- <td>Printer</td>
112
- <td>Color printer (optional)</td>
113
- </tr>
114
- <h2>How to download and install 2020 Design 12</h2>
115
- <p>To download and install 2020 Design 12, you need to have a valid license and an active subscription. You also need to have an account on the 2020 website. If you don't have one, you can create one for free. Here are the steps to download and install 2020 Design 12:</p>
116
- <h3>Steps to download 2020 Design 12</h3>
117
- <ol>
118
- <li>Go to the <a href="">2020 website</a> and log in with your username and password.</li>
119
- <li>Click on the <strong>Downloads</strong> tab and select <strong>2020 Design Live Desktop Solution (2020 Design 12)</strong>.</li>
120
- <li>Select the language of your choice and click on the <strong>Download Now</strong> button.</li>
121
- <li>A pop-up window will appear asking you to save the file. Choose a location on your computer where you want to save the file and click on the <strong>Save File</strong> button.</li>
122
- <li>The file will start downloading. It may take some time depending on your internet speed. You can check the progress of the download on your browser.</li>
123
- <li>Once the download is complete, you will see a message saying that the file is ready to be opened. Click on the <strong>Open File</strong> button.</li>
124
- <li>A security warning may appear asking you if you want to run the file. Click on the <strong>Run Anyway</strong> button.</li>
125
- <li>The 2020 Design 12 installer will launch. Follow the instructions on the screen to complete the installation.</li>
126
- <li>You may need to restart your computer after the installation is finished.</li>
127
- <li>You can now launch 2020 Design 12 from your desktop or start menu.</li> <h3>Steps to install 2020 Design 12</h3>
128
- <p>To install 2020 Design 12, you need to have a valid license key and an active subscription. You also need to have an internet connection to activate the software. Here are the steps to install 2020 Design 12:</p>
129
- <ol>
130
- <li>After downloading the file, double-click on it to launch the installer.</li>
131
- <li>A welcome screen will appear. Click on the <strong>Next</strong> button.</li>
132
- <li>A license agreement screen will appear. Read the terms and conditions and check the box to accept them. Click on the <strong>Next</strong> button.</li>
133
- <li>A destination folder screen will appear. Choose a location on your computer where you want to install the software. You can use the default location or browse for a different one. Click on the <strong>Next</strong> button.</li>
134
- <li>A start menu folder screen will appear. Choose a name for the folder where you want to create shortcuts for the software. You can use the default name or type a different one. Click on the <strong>Next</strong> button.</li>
135
- <li>A ready to install screen will appear. Review your choices and click on the <strong>Install</strong> button.</li>
136
- <li>The installation will begin. It may take some time depending on your computer speed. You can check the progress of the installation on the screen.</li>
137
- <li>Once the installation is complete, you will see a message saying that 2020 Design 12 has been successfully installed. Click on the <strong>Finish</strong> button.</li>
138
- <li>The software will launch automatically. You will see a login screen where you need to enter your username and password that you used to create your account on the 2020 website. Click on the <strong>Login</strong> button.</li>
139
- <li>You will see an activation screen where you need to enter your license key that you received when you purchased the software. Click on the <strong>Activate</strong> button.</li>
140
- <li>You will see a confirmation screen saying that your software has been activated. Click on the <strong>OK</strong> button.</li>
141
- <li>You can now start using 2020 Design 12 to create your kitchen and bathroom designs.</li> <h2>How to use 2020 Design 12</h2>
142
- <p>Now that you have downloaded and installed 2020 Design 12, you are ready to use it to create your kitchen and bathroom designs. 2020 Design 12 is a user-friendly and intuitive software that will guide you through the design process step by step. Here are some tips and tricks for using 2020 Design 12:</p>
143
- <h3>Tips and tricks for using 2020 Design 12</h3>
144
- <ul>
145
- <li>Use the <strong>Quick Start Wizard</strong> to create a new design based on a template or a previous project. You can choose from different styles, layouts, and dimensions.</li>
146
- <li>Use the <strong>Design Tab</strong> to access the main tools and features of the software. You can draw walls, doors, windows, cabinets, appliances, plumbing, lighting, accessories, and more. You can also modify the properties, dimensions, colors, and styles of the items.</li>
147
- <li>Use the <strong>Catalog Tab</strong> to browse and select from thousands of manufacturer products. You can also use the <strong>Cloud Configurator</strong> to customize the products online without downloading catalogs.</li>
148
- <li>Use the <strong>Render Tab</strong> to generate photorealistic renderings of your designs. You can choose from different modes, such as <strong>EZ Render</strong>, <strong>Raytrace</strong>, or <strong>Panorama</strong>. You can also adjust the lighting, shadows, reflections, and textures of your renderings.</li>
149
- <li>Use the <strong>Presentation Tab</strong> to create detailed floor plans, elevations, perspectives, and reports of your designs. You can also export your designs to PDF, JPG, DWG, or SketchUp formats.</li>
150
- <li>Use the <strong>Pricing Tab</strong> to generate accurate quotes for your clients based on manufacturer prices. You can also apply discounts, taxes, and markups to your quotes.</li>
151
- <li>Use the <strong>Help Tab</strong> to access online training, video tips, knowledge center, blogs, webinars, and more. You can also contact the 2020 support team if you have any questions or issues with the software.</li>
152
- </ul>
153
- <h3>Examples of designs created with 2020 Design 12</h3>
154
- <p>To inspire you and show you what you can do with 2020 Design 12, here are some examples of kitchen and bathroom designs created with this software:</p>
155
- <table>
156
- <tr>
157
- <td><img src="https://www.2020spaces.com/wp-content/uploads/2019/10/2020Design-V11-9-Kitchen-1.jpg" alt="Kitchen design with white cabinets and blue backsplash"></td>
158
- <td><img src="https://www.2020spaces.com/wp-content/uploads/2019/10/2020Design-V11-9-Kitchen-2.jpg" alt="Kitchen design with dark wood cabinets and marble countertop"></td>
159
- <td><img src="https://www.2020spaces.com/wp-content/uploads/2019/10/2020Design-V11-9-Kitchen-3.jpg" alt="Kitchen design with gray cabinets and yellow accents"></td>
160
- </tr>
161
- <tr>
162
- <td><img src="https://www.2020spaces.com/wp-content/uploads/2019/10/2020Design-V11-9-Bathroom-1.jpg" alt="Bathroom design with white vanity and blue tiles"></td>
163
- <td><img src="https://www.2020spaces.com/wp-content/uploads/2019/10/2020Design-V11-9-Bathroom-2.jpg" alt="Bathroom design with dark wood vanity and stone wall"></td>
164
- <td><img src="https://www.2020spaces.com/wp-content/uploads/2019/10/2020Design-V11-9-Bathroom-3.jpg" alt="Bathroom design with gray vanity and green plants"></td>
165
- </tr>
166
- </table>
167
- <h2>Conclusion</h2>
168
- <p>In conclusion, 2020 Design 12 is the best kitchen and bathroom design software that you can use to create stunning designs for your clients. It has a new 64-bit architecture, a new EZ Render rendering engine, a new Cloud Configurator, a new Shaker Cabinet Door option, a new Screen Layout feature, a new Annotation Tool, a new SketchUp Importer, a new Cabinet Door Replacement feature, a new Catalog Manager, a new Pricing Tool, a new Manager Starter Edition, a new User Interface, an improved User Experience, and an improved User Support. It also has access to the largest selection of manufacturer catalogs online.</p>
169
- <p>To download 2020 Design 12, you need to have a valid license and an active subscription. You also need to have an account on the 2020 website. You can follow the steps that we have explained in this article to download and install the software. You can also use the tips and tricks that we have shared to use the software effectively. You can also check out the examples of designs that we have shown to inspire you and see what you can do with 2020 Design 12.</p>
170
- <p>We hope that this article has helped you learn how to download 2020 Design 12 and how to use it to create amazing kitchen and bathroom designs. If you have any questions or feedback, please feel free to contact us or leave a comment below. We would love to hear from you!</p>
171
- <h2>FAQs</h2>
172
- <p>Here are some of the most frequently asked questions about 2020 Design 12:</p>
173
- <ol>
174
- <li><strong>How much does 2020 Design 12 cost?</strong></li>
175
- <p>2020 Design 12 is available as a subscription-based software. The price depends on the type and duration of the subscription that you choose. You can visit the <a href="">2020 website</a> to see the different subscription options and prices.</p>
176
- <li><strong>Can I use 2020 Design 12 on a Mac?</strong></li>
177
- <p>2020 Design 12 is compatible with Windows 10 (64-bit) operating system only. If you want to use it on a Mac, you need to install a Windows emulator, such as Parallels Desktop or Boot Camp, on your Mac.</p>
178
- <li><strong>Can I use 2020 Design 12 offline?</strong></li>
179
- <p>2020 Design 12 is a desktop solution that can be used offline. However, you need to have an internet connection to activate the software, download catalogs, use the Cloud Configurator, and access online support.</p>
180
- <li><strong>Can I import and export files from other software into 2020 Design 12?</strong></li>
181
- <p>Yes, you can import and export files from other software into 2020 Design 12. You can import files in DWG, DXF, SKP, JPG, PNG, BMP, TIF, GIF, PDF, and CSV formats. You can export files in DWG, DXF, SKP, JPG, PNG, BMP, TIF, GIF, PDF, CSV, XML, and HTML formats.</p>
182
- <li><strong>Can I share my designs with my clients using 2020 Design 12?</strong></li>
183
- <p>Yes, you can share your designs with your clients using 2020 Design 12. You can send them renderings, panoramas, floor plans, elevations, perspectives, and reports via email or social media. You can also use the <a href="">2020 Cloud Viewer</a>, a free online tool that lets you share your designs in an interactive way.</p>
184
- </ol></p> 197e85843d<br />
185
- <br />
186
- <br />
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/1pelhydcardo/ChatGPT-prompt-generator/assets/APKPure 3 The Android App Store that Saves You Time Space and Data.md DELETED
@@ -1,124 +0,0 @@
1
-
2
- <h1>APKPure 3: A Comprehensive Guide</h1>
3
- <p>If you are an Android user, you might be familiar with Google Play Store, the official app store for Android devices. But did you know that there are other app stores that offer different apps and games that you might not find on Google Play? One of them is APKPure, a popular alternative app store that has been around since 2014.</p>
4
- <p>APKPure is a website and an app that allows you to download and install Android apps and games from various sources. You can find apps that are not available in your region, apps that are discontinued or removed from Google Play, apps that have faster updates or older versions, and more. You can also discover new and upcoming apps and games, follow your favorite ones, and join a community of Android enthusiasts.</p>
5
- <h2>apkpure 3</h2><br /><p><b><b>DOWNLOAD</b> - <a href="https://urlin.us/2uSYa3">https://urlin.us/2uSYa3</a></b></p><br /><br />
6
- <p>However, using APKPure also comes with some risks and challenges. Since APKPure is not an official app store, it does not have the same security and quality standards as Google Play. You might encounter apps that are infected with malware or adware, apps that are illegal or infringe copyrights, apps that are outdated or incompatible with your device, and more. You also need to enable unknown sources on your device settings to install apps from APKPure, which can expose you to potential threats.</p>
7
- <p>In this article, we will give you a comprehensive guide on APKPure 3, the latest version of the app store. We will cover its features, benefits, drawbacks, alternatives, and more. We will also provide some tips and recommendations on how to use APKPure safely and effectively.</p>
8
- <h2>Features of APKPure 3</h2>
9
- <p>APKPure 3 is the latest version of the app store that was released in September 2020. It has some new features and improvements that make it more user-friendly and convenient. Here are some of the features of APKPure 3:</p>
10
- <ul>
11
- <li><strong>No region locking</strong>: APKPure offers a selection of the best Android apps and games that you can not find in Google Play due to regional restrictions. You can access apps and games from different countries and regions without any limitations.</li>
12
- <li><strong>Safe downloading</strong>: All apps in APKPure are verified by MD5 hash to ensure their integrity and authenticity. You can also check the digital signature of each app to make sure it matches the original one. APKPure also scans all apps for viruses and malware before uploading them to the app store.</li>
13
- <li><strong>Find any app you need</strong>: APKPure has a powerful search engine that allows you to find any app or game you want by keywords, categories, tags, ratings, reviews, etc. You can also browse the app store by popular, trending, new, or recommended apps and games.</li>
14
- <li><strong>Pause and resume downloads</strong>: You can pause and resume your downloads at any time without losing your progress. This is useful if you have a slow or unstable internet connection or if you want to save your data usage.</li>
15
- </ul>
16
- <h2>Benefits of APKPure 3</h2>
17
- <p>APKPure 3 has many benefits for Android users who want to explore more apps and games beyond Google Play. Here are some of the benefits of using APKPure 3:</p>
18
- <p>apkpure 3 download<br />
19
- apkpure 3 apk<br />
20
- apkpure 3 app store<br />
21
- apkpure 3 update<br />
22
- apkpure 3 install<br />
23
- apkpure 3 mod apk<br />
24
- apkpure 3 for pc<br />
25
- apkpure 3 for android<br />
26
- apkpure 3 latest version<br />
27
- apkpure 3 free download<br />
28
- apkpure 3 pro apk<br />
29
- apkpure 3 old version<br />
30
- apkpure 3 online<br />
31
- apkpure 3 games<br />
32
- apkpure 3 app download<br />
33
- apkpure 3 premium apk<br />
34
- apkpure 3 downloader<br />
35
- apkpure 3 for ios<br />
36
- apkpure 3 beta<br />
37
- apkpure 3 review<br />
38
- apkpure 3 alternative<br />
39
- apkpure 3 modded games<br />
40
- apkpure 3 cracked apps<br />
41
- apkpure 3 for firestick<br />
42
- apkpure 3 lite apk<br />
43
- apkpure 3 region free apk<br />
44
- apkpure 3 safe<br />
45
- apkpure 3 pubg mobile<br />
46
- apkpure 3 fortnite<br />
47
- apkpure 3 minecraft<br />
48
- apkpure 3 gta san andreas<br />
49
- apkpure 3 roblox<br />
50
- apkpure 3 among us<br />
51
- apkpure 3 call of duty mobile<br />
52
- apkpure 3 clash of clans<br />
53
- apkpure 3 pokemon go<br />
54
- apkpure 3 brawl stars<br />
55
- apkpure 3 free fire<br />
56
- apkpure 3 subway surfers<br />
57
- apkpure 3 candy crush saga<br />
58
- apkpure 3 zoom cloud meetings<br />
59
- apkpure 3 tiktok<br />
60
- apkpure 3 instagram<br />
61
- apkpure 3 whatsapp messenger<br />
62
- apkpure 3 facebook lite<br />
63
- apkpure 3 youtube vanced<br />
64
- apkpure 3 netflix mod apk</p>
65
- <ul> <li><strong>Access to a wide variety of apps and games</strong>: APKPure has a huge collection of apps and games that you can explore and download. You can find apps and games that are not available on Google Play, such as modded, hacked, or patched versions. You can also find apps and games that are exclusive to certain regions or countries, such as China, Japan, Korea, etc.</li>
66
- <li><strong>Ability to download older or discontinued versions</strong>: APKPure keeps a history of all the versions of the apps and games that it hosts. You can download and install any version you want, even if it is no longer supported or updated by the developer. This is useful if you want to use an app or game that has a feature that was removed or changed in the newer version, or if you have a device that is not compatible with the latest version.</li>
67
- <li><strong>Faster updates and releases</strong>: APKPure often gets the latest updates and releases of the apps and games before they are available on Google Play. This means you can enjoy the newest features and improvements sooner than other users. You can also enable auto-update for your favorite apps and games, so you don't have to manually check for updates.</li>
68
- <li><strong>Open-source nature and customization</strong>: APKPure is an open-source app store that allows you to customize it according to your preferences. You can change the theme, language, font size, etc. You can also create your own app store and share it with other users. You can also contribute to the development and improvement of APKPure by reporting bugs, suggesting features, or translating the app.</li>
69
- </ul>
70
- <h2>Drawbacks of APKPure 3</h2>
71
- <p>APKPure 3 is not without its drawbacks. As an unofficial app store, it has some risks and challenges that you should be aware of before using it. Here are some of the drawbacks of using APKPure 3:</p>
72
- <ul>
73
- <li><strong>Security and legal issues</strong>: APKPure does not have the same security and quality standards as Google Play. It does not verify the identity or legitimacy of the app developers or publishers. It also does not have a clear policy or mechanism for dealing with complaints or disputes. This means you might encounter apps that are illegal, infringing, fraudulent, deceptive, harmful, or malicious. You might also violate the terms and conditions of some apps or games by downloading them from APKPure.</li>
74
- <li><strong>Potential malware and adware infections</strong>: APKPure does scan all the apps for viruses and malware before uploading them to the app store, but it cannot guarantee that they are 100% safe and clean. Some apps might contain hidden malware or adware that can compromise your device's security and performance. Some apps might also display annoying or intrusive ads that can interfere with your user experience.</li>
75
- <li><strong>Outdated or incompatible apps</strong>: APKPure does not always have the latest or most compatible version of the apps and games. Some apps might be outdated or discontinued by the developer or publisher. Some apps might not work properly on your device due to hardware or software limitations. Some apps might also conflict with other apps or system settings on your device.</li>
76
- </ul>
77
- <h2>Alternatives to APKPure 3</h2>
78
- <p>If you are looking for other app stores that offer similar or better features than APKPure 3, you have plenty of options to choose from. Here are some of the best alternatives to APKPure 3:</p>
79
- <table>
80
- <tr>
81
- <th>Name</th>
82
- <th>Description</th>
83
- <th>Pros</th>
84
- <th>Cons</th>
85
- </tr>
86
- <tr>
87
- <td>APKMirror</td>
88
- <td>A website that hosts free Android apps and games from various sources.</td>
89
- <td>- Safe downloading<br>- Ability to get old versions<br>- No account needed<br></td>
90
- <td>- No native Android app<br>- Accepts very few new APKs<br>- No auto-update feature<br></td>
91
- </tr>
92
- <tr>
93
- <td>F-Droid</td>
94
- <td>An app store that only offers free and open source Android apps and games.</td>
95
- <td>- Privacy focused<br>- Ad-free<br>- No registration required<br>- Crowdsourced<br>- No tracking<br></td>
96
- <td>- Limited selection<br>- No modded or patched apps<br>- Slow updates<br></td>
97
- </tr>
98
- <tr>
99
- <td>Aptoide</td>
100
- <td>An app store that allows users to create and manage their own app stores.</td>
101
- <td>- Free<br>- Open source version available<br>- Large user base<br>- Customizable<br></td>
102
- <td>- Illegal apps<br>- Not all apps are safe<br>- Most apps are outdated<br></td>
103
- </tr>
104
- <tr>
105
- <td>Aurora Store</td>
106
- <td>An app store that allows users to download apps from Google Play anonymously.</td>
107
- <td>- Privacy focused<br>- Ad-free - No region locking<br>- No account needed<br></td>
108
- <td>- Not all apps are available<br>- Some apps might not work properly<br>- No auto-update feature<br></td>
109
- </tr>
110
- </table>
111
- <h2>Conclusion</h2>
112
- <p>APKPure 3 is a great app store for Android users who want to explore more apps and games beyond Google Play. It offers a lot of features, benefits, and options that can enhance your Android experience. However, it also has some drawbacks and risks that you should be careful of before using it. You should always check the source, signature, and permission of the apps you download from APKPure, and use a reliable antivirus or security app to protect your device. You should also respect the rights and policies of the app developers and publishers, and avoid downloading or using illegal or infringing apps.</p>
113
- <p>If you are looking for other app stores that offer similar or better features than APKPure 3, you can try APKMirror, F-Droid, Aptoide, or Aurora Store. They are some of the best alternatives to APKPure 3 that you can find online. You can compare their pros and cons and choose the one that suits your needs and preferences.</p>
114
- <p>We hope this article has given you a comprehensive guide on APKPure 3 and its alternatives. If you have any questions or feedback, please feel free to leave a comment below. Thank you for reading!</p>
115
- <h2>FAQs</h2>
116
- <ul>
117
- <li><strong>Is APKPure safe to use?</strong><br>APKPure is generally safe to use, as it scans all the apps for viruses and malware before uploading them to the app store. However, it cannot guarantee that all the apps are 100% safe and clean, as some apps might contain hidden malware or adware that can harm your device. You should always check the source, signature, and permission of the apps you download from APKPure, and use a reliable antivirus or security app to protect your device.</li>
118
- <li><strong>Is APKPure legal?</strong><br>APKPure is not illegal in itself, as it is just a platform that hosts and distributes Android apps and games from various sources. However, some of the apps and games that you can find on APKPure might be illegal or infringing, as they might violate the terms and conditions of the original developers or publishers, or the laws of your country or region. You should always respect the rights and policies of the app developers and publishers, and avoid downloading or using illegal or infringing apps.</li>
119
- <li><strong>How to download and install APKPure on Android devices?</strong><br>To download and install APKPure on your Android device, you need to follow these steps:<br>- Go to the official website of APKPure (https://apkpure.com/) and click on the download button.<br>- Once the APK file is downloaded, open it and tap on install.<br>- If you see a warning message that says "For your security, your phone is not allowed to install unknown apps from this source", go to your device settings and enable unknown sources.<br>- Once the installation is complete, open the app and enjoy!</li>
120
- <li><strong>How to update apps using APKPure?</strong><br>To update apps using APKPure, you need to follow these steps:<br>- Open the app and tap on the menu icon on the top left corner.<br>- Tap on "My Games & Apps" and then tap on "Updates".<br>- You will see a list of apps that have new updates available.<br>- Tap on the update button next to each app you want to update.<br>- Wait for the download and installation to finish.<br>- Alternatively, you can enable auto-update for your favorite apps by tapping on the menu icon on the top right corner of each app's page.</li>
121
- <li><strong>How to uninstall APKPure?</strong><br>To uninstall APKPure from your Android device, you need to follow these steps:<br>- Go to your device settings and tap on "Apps" or "Applications".<br>- Find and tap on "APKPure".<br>- Tap on "Uninstall" and confirm.<br>- You can also uninstall APKPure by long-pressing its icon on your home screen or app drawer and dragging it to the trash bin.</li>
122
- </ul></p> 197e85843d<br />
123
- <br />
124
- <br />
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/1pelhydcardo/ChatGPT-prompt-generator/assets/Best Ways to Download YouTube Videos Reddit Users Recommend.md DELETED
@@ -1,183 +0,0 @@
1
- <br />
2
- <h1>Best Way to Download YouTube Videos Reddit</h1>
3
- <p>Do you want to download YouTube videos that are posted on Reddit? Maybe you want to watch them offline, share them with your friends, or edit them for your own purposes. Whatever the reason, downloading YouTube videos from Reddit is not as hard as you might think. In this article, we will show you the best tools to download YouTube videos from Reddit, whether you are using a web browser, a mobile device, or a desktop app.</p>
4
- <h2>Introduction</h2>
5
- <h3>Why download YouTube videos from Reddit?</h3>
6
- <p>Reddit is one of the most popular social media platforms in the world, with millions of users sharing and discussing all kinds of topics. One of the most common types of content on Reddit is YouTube videos, which can be found in various subreddits, such as r/videos, r/funny, r/educationalvideos, and many more.</p>
7
- <h2>best way to download youtube videos reddit</h2><br /><p><b><b>DOWNLOAD</b> &#9675;&#9675;&#9675; <a href="https://urlin.us/2uSYnv">https://urlin.us/2uSYnv</a></b></p><br /><br />
8
- <p>Downloading YouTube videos from Reddit can have many benefits, such as:</p>
9
- <ul>
10
- <li>You can watch them offline without an internet connection or ads.</li>
11
- <li>You can save them on your device or cloud storage for future reference.</li>
12
- <li>You can share them with your friends or family via other apps or platforms.</li>
13
- <li>You can edit them for your own projects or purposes.</li>
14
- </ul>
15
- <h3>What are the best tools to download YouTube videos from Reddit?</h3>
16
- <p>There are many tools that claim to download YouTube videos from Reddit, but not all of them are reliable, safe, or easy to use. Some of them may not work properly, contain malware, or have annoying pop-ups. To help you avoid these problems, we have selected the best tools to download YouTube videos from Reddit, based on their features, performance, and user reviews. We have divided them into three categories: web-based video downloaders, mobile apps, and desktop apps.</p>
17
- <h2>Web-based video downloaders</h2>
18
- <h3>RedditSave</h3>
19
- <p>RedditSave is a free website that lets you download videos from any device. And unlike some downloader sites, it saves videos with the audio included. It works with YouTube and many other video platforms that are posted on Reddit.</p>
20
- <h4>How to use RedditSave</h4>
21
- <ol>
22
- <li>Go to [Reddit](^1^) and find the post that contains the YouTube video you want to download.</li>
23
- <li>Copy the URL of the post by right-clicking on it and selecting "Copy link address".</li>
24
- <li>Go to [RedditSave](^5^) and paste the URL in the search box.</li>
25
- <li>Click on "Download" and choose the quality and format you want.</li>
26
- <li>Click on "Download" again and save the video file on your device.</li>
27
- </ol>
28
- <h4>Pros and cons of RedditSave</h4>
29
- <ul>
30
- <li><b>Pros:</b></li>
31
- <ul>
32
- <li>It is free and easy to use.</li>
33
- <li>It supports many video platforms besides YouTube.</li>
34
- <li>It downloads videos with sound.</li>
35
- <li>It offers different quality and format options.</li>
36
- </ul>
37
- <li><b>Cons:</b></li>
38
- <ul>
39
- <li>It may not work with some private or deleted posts.</li>
40
- <li>It may have some ads or pop-ups.</li>
41
- </ul>
42
- </ul>
43
- <h3>Viddit.red</h3>
44
- <p>Viddit.red is another free website that allows you to download YouTube videos from Reddit with a simple interface. It also supports other video platforms, such as Vimeo, Dailymotion, Twitch, and more. It downloads videos with sound and offers different quality options.</p>
45
- <h4>How to use Viddit.red</h4>
46
- <ol>
47
- <li>Go to [Reddit] and find the post that contains the YouTube video you want to download.</li>
48
- <li>Copy the URL of the post by right-clicking on it and selecting "Copy link address".</li>
49
- <li>Go to [Viddit.red] and paste the URL in the search box.</li>
50
- <li>Click on "Download" and choose the quality you want.</li>
51
- <li>Click on "Download" again and save the video file on your device.</li>
52
- </ol>
53
- <h4>Pros and cons of Viddit.red</h4>
54
- <ul>
55
- <li><b>Pros:</b></li>
56
- <ul>
57
- <li>It is free and simple to use.</li>
58
- <li>It supports many video platforms besides YouTube.</li>
59
- <li>It downloads videos with sound.</li>
60
- <li>It offers different quality options.</li>
61
- </ul>
62
- <li><b>Cons:</b></li>
63
- <ul>
64
- <li>It may not work with some private or deleted posts.</li>
65
- <li>It may have some ads or pop-ups.</li>
66
- </ul>
67
- </ul>
68
- <h2>Mobile apps</h2>
69
- <h3>Slide for Reddit</h3>
70
- <p>If you are using an Android device, you can download YouTube videos from Reddit using Slide for Reddit, a free and open-source app that lets you browse Reddit in a smooth and customizable way. It has a built-in video downloader that works with YouTube and other video platforms. It also has many other features, such as offline mode, night mode, multi-account support, and more.</p>
71
- <h4>How to use Slide for Reddit</h4>
72
- <ol>
73
- <li>Download and install Slide for Reddit from [Google Play Store].</li>
74
- <li>Open the app and log in to your Reddit account or browse as a guest.</li>
75
- <li>Find the post that contains the YouTube video you want to download.</li>
76
- <li>Tap on the three-dot menu icon at the top right corner of the post and select "Download content".</li>
77
- <li>Select the quality and format you want and tap on "Download".</li>
78
- <li>The video file will be saved in your device's gallery or file manager.</li>
79
- </ol>
80
- <h4>Pros and cons of Slide for Reddit</h4>
81
- <ul>
82
- <li><b>Pros:</b></li>
83
- <ul>
84
- <li>It is free and open-source.</li>
85
- <li>It has a built-in video downloader that supports many video platforms.</li>
86
- <li>It has a smooth and customizable user interface.</li>
87
- <li>It has many other features that enhance your Reddit experience.</li>
88
- </ul>
89
- <li><b>Cons:</b></li>
90
- <ul>
91
- <li>It is only available for Android devices.</li>
92
- <li>It may not work with some private or deleted posts.</li>
93
- </ul>
94
- </ul>
95
- <h3>SaveVideo bot</h3>
96
- <p>If you are using an iOS device, you can download YouTube videos from Reddit using SaveVideo bot, a free Telegram bot that lets you download videos from any website. It works with YouTube and other video platforms that are posted on Reddit. It downloads videos with sound and offers different quality options.</p>
97
- <h4>How to use SaveVideo bot</h4>
98
- <ol>
99
- <li>Download and install Telegram from [App Store].</li>
100
- <li>Open the app and create an account or log in to your existing account.</li>
101
- <li>Go to [Reddit] and find the post that contains the YouTube video you want to download.</li>
102
- <li>Copy the URL of the post by tapping on it and selecting "Share" then "Copy".</li>
103
- <li>Go to Telegram and search for [@SaveVideoBot] or click on this [link].</li>
104
- <li>Paste the URL in the chat box and send it to the bot.</li>
105
- <li>The bot will reply with a list of quality options. Tap on the one you want.</li>
106
- <li>The bot will send you the video file. Tap on it and select "Save to Camera Roll".</li>
107
- </ol>
108
- <h4>Pros and cons of SaveVideo bot</h4>
109
- <ul>
110
- <li><b>Pros:</b></li>
111
- <ul>
112
- <li>It is free and easy to use.</li>
113
- <li>It supports many video platforms besides YouTube.</li>
114
- <li>It downloads videos with sound.</li>
115
- <li>It offers different quality options.</li>
116
- </ul>
117
- <li><b>Cons:</ <b>:</b></li>
118
- <ul>
119
- <li>It requires Telegram app and account.</li>
120
- <li>It may not work with some private or deleted posts.</li>
121
- <li>It may have some ads or pop-ups.</li>
122
- </ul>
123
- </ul>
124
- <h2>Conclusion</h2>
125
- <h3>Summary of the main points</h3>
126
- <p>In this article, we have shown you the best way to download YouTube videos from Reddit, using different tools for different devices. We have compared the pros and cons of each tool, and explained how to use them step by step. Whether you want to use a web-based video downloader, a mobile app, or a desktop app, you can find the best option for your needs and preferences.</p>
127
- <h3>Call to action</h3>
128
- <p>Now that you know how to download YouTube videos from Reddit, why not give it a try and see for yourself how easy and convenient it is? You can enjoy watching your favorite videos offline, share them with your friends, or edit them for your own purposes. Just remember to respect the rights of the original creators and follow the terms of service of each platform. Happy downloading!</p>
129
- <p>How to download youtube videos with sound from reddit<br />
130
- Reddit video downloader online free<br />
131
- Best software for downloading youtube videos from reddit<br />
132
- RedditSave: Download reddit videos with audio<br />
133
- Stacher: A customizable GUI for YT-DLP<br />
134
- yt-dlp: A command-line tool for downloading youtube videos<br />
135
- Slide for Reddit: A mobile app that can download reddit videos<br />
136
- /u/SaveVideo bot: A reddit bot that can download videos from any subreddit<br />
137
- Download youtube videos in 1080p from reddit<br />
138
- How to use FFMPEG to merge video and audio from youtube downloads<br />
139
- Jdownloader2: A desktop app that can download youtube videos<br />
140
- How to avoid YT throttling when downloading youtube videos<br />
141
- How to download your own youtube videos from reddit<br />
142
- YouTube Premium: A paid service that allows offline viewing of youtube videos<br />
143
- How to insert "pp" after "youtube" to download videos<br />
144
- How to setup youtube-dl-gui for downloading youtube videos<br />
145
- How to use the command line to download youtube videos with yt-dlp<br />
146
- How to download a portion of a youtube video from reddit<br />
147
- How to automatically rename the output files of youtube downloads<br />
148
- How to download videos copied to your clipboard with Stacher<br />
149
- How to use multi-threading to download multiple youtube videos simultaneously<br />
150
- How to download playlists from youtube using yt-dlp or Stacher<br />
151
- How to use the Something Not Working tab in Stacher to troubleshoot issues<br />
152
- How to choose the best video and audio quality for youtube downloads<br />
153
- How to use the extra options in Stacher for more customization<br />
154
- How to install yt-dlp and yt-dlg on Windows, Mac, or Linux<br />
155
- How to use the -x option in yt-dlp to only download audio from youtube videos<br />
156
- How to use the /r/youtubedl subreddit for more information and support<br />
157
- How to use the wikiHow guide on The 7 Best Free Tools to Download Reddit Videos with Sound<br />
158
- How to use the Business Insider guide on 2 Ways to Download Any Reddit Video<br />
159
- How to use the /r/software subreddit for more recommendations and reviews on youtube video downloaders<br />
160
- How to use the GitHub repository of yt-dlp for more details and updates on the tool<br />
161
- How to use the GitHub repository of yt-dlg for more details and updates on the GUI<br />
162
- How to use the GitHub repository of jely2002/youtube-dl-gui for another GUI option for yt-dlp or youtube-dl<br />
163
- How to use the GitHub repository of oleksis/youtube-dl-gui for another GUI option for yt-dlp or youtube-dl<br />
164
- How to use the Stacher subreddit's Wiki for more instructions and tips on using Stacher<br />
165
- How to use the Slide for Reddit app's settings and features for downloading reddit videos<br />
166
- How to use the /u/SaveVideo bot's commands and options for downloading reddit videos<br />
167
- How to use the Jdownloader2 app's settings and features for downloading youtube videos<br />
168
- How to use the YouTube Premium app's settings and features for offline viewing of youtube videos</p>
169
- <h2>FAQs</h2>
170
- <ul>
171
- <li><b>Q: Is it legal to download YouTube videos from Reddit?</b></li>
172
- <li>A: It depends on the content and the purpose of downloading. Generally, downloading YouTube videos for personal use is not illegal, as long as you do not distribute or monetize them. However, some videos may be protected by copyright or other laws, and downloading them may violate the rights of the original creators. You should always check the terms of service of each platform and the license of each video before downloading.</li>
173
- <li><b>Q: How can I download YouTube videos from Reddit on a Mac or PC?</b></li>
174
- <li>A: You can use a desktop app, such as 4K Video Downloader, which is compatible with both Mac and PC. It allows you to download YouTube videos from Reddit in high quality and various formats. You can also use a web-based video downloader, such as RedditSave or Viddit.red, which work on any browser and device.</li>
175
- <li><b>Q: How can I download YouTube videos from Reddit without sound?</b></li>
176
- <li>A: You can use a web-based video downloader that offers an option to download videos without sound, such as Viddit.red. Alternatively, you can use a video converter tool, such as Online Video Converter, which lets you remove the audio track from any video file.</li>
177
- <li><b>Q: How can I download YouTube videos from Reddit in MP3 format?</b></li>
178
- <li>A: You can use a web-based video downloader that offers an option to download videos in MP3 format, such as RedditSave. Alternatively, you can use a video converter tool, such as Online Video Converter, which lets you convert any video file to MP3 format.</li>
179
- <li><b>Q: How can I download YouTube videos from Reddit faster?</b></li>
180
- <li>A: You can use a web-based video downloader that offers an option to download videos in lower quality or smaller size, such as RedditSave or Viddit.red. Alternatively, you can use a download manager tool, such as Internet Download Manager, which lets you accelerate and resume your downloads.</li>
181
- </ul></p> 197e85843d<br />
182
- <br />
183
- <br />
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/1phancelerku/anime-remove-background/Bitcoin Mining Simulator Idle Clicker Tycoon Mod APK.md DELETED
@@ -1,162 +0,0 @@
1
- <br />
2
- <h1>Bitcoin Mining Idle Tycoon Mod APK: A Fun and Educational Game for Crypto Enthusiasts</h1>
3
- <p>Are you interested in bitcoin mining and cryptocurrency? Do you want to learn how to mine bitcoins, trade them, and grow your virtual business? If yes, then you might want to check out Bitcoin Mining Idle Tycoon Mod APK, a fun and educational game that simulates the process of bitcoin mining. In this article, we will tell you what this game is, how to play it, what are its benefits and challenges, and how to download and install the mod apk version. Read on to find out more.</p>
4
- <h2>What is Bitcoin Mining Idle Tycoon Mod APK?</h2>
5
- <h3>A brief introduction to the game and its features</h3>
6
- <p>Bitcoin Mining Idle Tycoon Mod APK is a modified version of Bitcoin Mining Idle Tycoon, a game developed by Ernest Trosclair. The game is an idle clicker tycoon game that lets you start your own bitcoin mining business, hire workers, upgrade your equipment, trade your currency, and get rich. The game has many features that make it realistic and engaging, such as:</p>
7
- <h2>bitcoin mining idle tycoon mod apk</h2><br /><p><b><b>Download Zip</b> &#9889; <a href="https://jinyurl.com/2uNOzK">https://jinyurl.com/2uNOzK</a></b></p><br /><br />
8
- <ul>
9
- <li>A trade market where you can sell your mined currency instantly or keep it for better rates</li>
10
- <li>A variety of upgrades that can boost your profits and expand your business</li>
11
- <li>A goal to get a girlfriend once you get rich in the game</li>
12
- <li>A realistic simulation of bitcoin mining and cryptocurrency</li>
13
- </ul>
14
- <h3>How to download and install the mod apk version</h3>
15
- <p>The mod apk version of Bitcoin Mining Idle Tycoon is a modified version that gives you some extra benefits, such as:</p>
16
- <ul>
17
- <li>Modify advertising gain reward</li>
18
- <li>Unlimited money</li>
19
- <li>No ads</li>
20
- </ul>
21
- <p>To download and install the mod apk version, you need to follow these steps:</p>
22
- <ol>
23
- <li>Go to <a href="(^18^)">this link</a> and download the mod apk file.</li>
24
- <li>Enable unknown sources on your device settings.</li>
25
- <li>Locate the downloaded file on your file manager and tap on it.</li>
26
- <li>Follow the installation instructions on the screen.</li>
27
- <li>Launch the game and enjoy.</li>
28
- </ol>
29
- <h2>How to Play Bitcoin Mining Idle Tycoon Mod APK?</h2>
30
- <h3>The basics of bitcoin mining and the game mechanics</h3>
31
- <p>Bitcoin mining is the process of creating new bitcoins by solving complex mathematical problems that verify transactions on the blockchain <p>The network, which is a globally distributed public ledger consisting of a giant list of timestamped transactions. The network relies on the consensus of the miners to agree on the current state of the ledger and to prevent double-spending, which is when someone tries to spend the same bitcoin twice.</p>
32
- <h3>The different upgrades, workers, and equipment available in the game</h3>
33
- <p>In Bitcoin Mining Idle Tycoon Mod APK, you can upgrade your mining business by hiring more workers, buying better equipment, and increasing your hash rate. The hash rate is the measure of how fast your computer can solve the algorithms and earn bitcoins. The higher your hash rate, the more bitcoins you can mine.</p>
34
- <p>Some of the upgrades, workers, and equipment you can get in the game are:</p>
35
- <p>bitcoin mining idle tycoon hack apk<br />
36
- bitcoin mining idle tycoon unlimited coins apk<br />
37
- bitcoin mining idle tycoon mod apk download<br />
38
- bitcoin mining idle tycoon cheat apk<br />
39
- bitcoin mining idle tycoon latest mod apk<br />
40
- bitcoin mining idle tycoon mod apk android 1<br />
41
- bitcoin mining idle tycoon mod apk revdl<br />
42
- bitcoin mining idle tycoon mod apk 4.27.0<br />
43
- bitcoin mining idle tycoon mod apk free shopping<br />
44
- bitcoin mining idle tycoon mod apk happymod<br />
45
- bitcoin mining idle tycoon mod apk 2023<br />
46
- bitcoin mining idle tycoon mod apk offline<br />
47
- bitcoin mining idle tycoon mod apk no ads<br />
48
- bitcoin mining idle tycoon mod apk unlimited money<br />
49
- bitcoin mining idle tycoon mod apk rexdl<br />
50
- bitcoin mining idle tycoon mod apk 4.26.0<br />
51
- bitcoin mining idle tycoon mod apk 4.25.0<br />
52
- bitcoin mining idle tycoon mod apk 4.24.0<br />
53
- bitcoin mining idle tycoon mod apk 4.23.0<br />
54
- bitcoin mining idle tycoon mod apk 4.22.0<br />
55
- bitcoin mining idle tycoon pro mod apk<br />
56
- bitcoin mining idle tycoon premium mod apk<br />
57
- bitcoin mining idle tycoon vip mod apk<br />
58
- bitcoin mining idle tycoon mega mod apk<br />
59
- bitcoin mining idle tycoon super mod apk<br />
60
- download game bitcoin mining idle tycoon mod apk<br />
61
- download bitcoin mining idle tycoon hack mod apk<br />
62
- download bitcoin mining idle tycoon cheat mod apk<br />
63
- download bitcoin mining idle tycoon unlimited money mod apk<br />
64
- download bitcoin mining idle tycoon latest version mod apk<br />
65
- how to install bitcoin mining idle tycoon mod apk<br />
66
- how to play bitcoin mining idle tycoon mod apk<br />
67
- how to download bitcoin mining idle tycoon mod apk on pc<br />
68
- how to update bitcoin mining idle tycoon mod apk<br />
69
- how to get free coins in bitcoin mining idle tycoon mod apk<br />
70
- best tips for bitcoin mining idle tycoon mod apk<br />
71
- best strategy for bitcoin mining idle tycoon mod apk<br />
72
- best guide for bitcoin mining idle tycoon mod apk<br />
73
- best cheats for bitcoin mining idle tycoon mod apk<br />
74
- best hacks for bitcoin mining idle tycoon mod apk</p>
75
- <table>
76
- <tr>
77
- <th>Upgrade</th>
78
- <th>Description</th>
79
- <th>Cost</th>
80
- </tr>
81
- <tr>
82
- <td>Worker</td>
83
- <td>A person who works on your mining rig and earns bitcoins for you</td>
84
- <td>$100</td>
85
- </tr>
86
- <tr>
87
- <td>Graphics Card</td>
88
- <td>A device that enhances your computer's performance and increases your hash rate</td>
89
- <td>$500</td>
90
- </tr>
91
- <tr>
92
- <td>Cooling Fan</td>
93
- <td>A device that cools down your computer and prevents overheating and damage</td>
94
- <td>$200</td>
95
- </tr>
96
- <tr>
97
- <td>Power Supply</td>
98
- <td>A device that provides electricity to your computer and equipment</td>
99
- <td>$300</td>
100
- </tr>
101
- <tr>
102
- <td>ASIC Miner</td>
103
- <td>A specialized device that is designed for bitcoin mining and has a very high hash rate</td>
104
- <td>$10,000</td>
105
- </tr>
106
- <tr>
107
- <td>Data Center</td>
108
- <td>A large facility that houses many computers and equipment for bitcoin mining</td>
109
- <td>$100,000</td>
110
- </tr>
111
- <tr>
112
- <td>Solar Panel</td>
113
- <td>A device that generates renewable energy from the sun and reduces your electricity cost</td>
114
- <td>$50,000</td>
115
- </tr>
116
- <tr>
117
- <td>Quantum Computer</td ><td>A futuristic device that can solve algorithms in seconds and has an enormous hash rate</td ><td>$1,000,000</td ></tr ></table ><h3>The trade market and the strategies to sell or keep mined currency</h3 ><p>One of the most important aspects of Bitcoin Mining Idle Tycoon Mod APK is the trade market, where you can sell or keep your mined currency. The trade market shows you the current price of bitcoin in US dollars, as well as the historical price chart. You can choose to sell your bitcoins instantly at the current price, or wait for a better price in the future. However, you also have to consider the risk of price fluctuations and market crashes.</p ><p>Some of the strategies you can use to sell or keep your mined currency are:</p ><ul ><li>Sell high, buy low: This is a basic principle of trading that means you should sell your bitcoins when the price is high and buy them back when the price is low. This way, you can increase your profits and accumulate more bitcoins.</li ><li>HODL: This is a slang term that means to hold on to your bitcoins for a long time, regardless of the price changes. This strategy is based on the belief that bitcoin will eventually increase in value and become a global currency. However, this strategy also requires patience and confidence in the future of bitcoin.</li ><li>Diversify: This is a strategy that means to invest in different types of assets, such as stocks, bonds, gold, or other cryptocurrencies. This way, you can reduce your risk and exposure to bitcoin's volatility and benefit from other opportunities in the market.</li ></ul ><h2>What are the Benefits of Bitcoin Mining Idle Tycoon Mod APK?</h2 ><h3>The educational value of learning about bitcoin mining and cryptocurrency</h3 ><p>One of the main benefits of Bitcoin Mining Idle Tycoon Mod APK is that it can teach you about bitcoin mining and cryptocurrency in a fun and interactive way. You can learn about how bitcoin works, how it is created, how it is traded, and how it is secured. You can also learn about the history and evolution of bitcoin, as well as its advantages and disadvantages. By playing this game, you can gain a better understanding of one of the most innovative and influential technologies of our time.</p ><h3>The entertainment value of managing a virtual mining business and getting rich</h3 ><p>Another benefit of Bitcoin Mining Idle Tycoon Mod APK is that it can provide you with hours of entertainment and satisfaction. You can enjoy managing your own virtual mining business, hiring workers, buying equipment, upgrading your facilities, and earning bitcoins. You can also compete with other players. in the global leaderboard, and see how you rank among the best bitcoin miners in the world. You can also have fun with the humorous and witty dialogues, graphics, and sound effects in the game. You can feel the thrill of getting rich and achieving your goals in the game.</p>
118
- <h3>The mod apk features that enhance the gaming experience and remove ads</h3>
119
- <p>A final benefit of Bitcoin Mining Idle Tycoon Mod APK is that it offers some extra features that enhance the gaming experience and remove ads. The mod apk version gives you unlimited money, which means you can buy any upgrade, worker, or equipment you want without worrying about the cost. You can also modify the advertising gain reward, which means you can get more bitcoins from watching ads. Moreover, you can enjoy the game without any annoying ads that interrupt your gameplay or consume your data.</p>
120
- <h2>What are the Challenges of Bitcoin Mining Idle Tycoon Mod APK?</h2>
121
- <h3>The increasing difficulty and competition of mining as the game progresses</h3>
122
- <p>One of the challenges of Bitcoin Mining Idle Tycoon Mod APK is that it becomes more difficult and competitive as the game progresses. The game follows the real-life scenario of bitcoin mining, which means that the algorithms become harder to solve over time, and the reward for each block decreases. This means that you need to invest more money and resources to maintain your hash rate and profits. You also need to compete with other players who are also mining bitcoins and trying to get a share of the limited supply.</p>
123
- <h3>The risk of cryptojacking and malware from downloading untrusted sources</h3>
124
- <p>Another challenge of Bitcoin Mining Idle Tycoon Mod APK is that it poses a risk of cryptojacking and malware from downloading untrusted sources. Cryptojacking is a malicious practice where hackers use your device's processing power to mine cryptocurrency without your consent or knowledge. Malware is a software that can harm your device or data by stealing, deleting, encrypting, or spying on them. These threats can affect your device's performance, battery life, security, and privacy. Therefore, you need to be careful when downloading and installing the mod apk version from unknown sources, and always scan your device for any potential infections.</p>
125
- <h3>The legal and ethical issues of bitcoin mining and its environmental impact</h3>
126
- <p>A final challenge of Bitcoin Mining Idle Tycoon Mod APK is that it raises some legal and ethical issues of bitcoin mining and its environmental impact. Bitcoin mining is not regulated or controlled by any central authority, which means that it can be used for illegal or unethical purposes, such as money laundering, tax evasion, terrorism financing, or drug trafficking. Bitcoin mining also consumes a lot of electricity and generates a lot of carbon emissions, which contributes to global warming and climate change. Therefore, you need to be aware of these issues and consider their implications when playing this game.</p>
127
- <h2>Conclusion</h2>
128
- <p>Bitcoin Mining Idle Tycoon Mod APK is a fun and educational game that simulates the process of bitcoin mining. You can learn how to mine bitcoins, trade them, and grow your virtual business. You can also enjoy managing your own mining business, hiring workers, buying equipment, upgrading your facilities, and earning bitcoins. You can also benefit from the mod apk features that give you unlimited money, modify advertising gain reward, and remove ads. However, you also need to face some challenges, such as the increasing difficulty and competition of mining, the risk of cryptojacking and malware from downloading untrusted sources, and the legal and ethical issues of bitcoin mining and its environmental impact. If you are interested in bitcoin mining and cryptocurrency, you might want to try this game and see how it works.</p>
129
- <h2>FAQs</h2>
130
- <h3>Q1: Is Bitcoin Mining Idle Tycoon Mod APK safe to download and play?</h3>
131
- <p>A1: Bitcoin Mining Idle Tycoon Mod APK is generally safe to download and play if you get it from a trusted source. However, there is always a risk of cryptojacking and malware from downloading untrusted sources. Therefore, you should always scan your device for any potential infections before installing the mod apk version.</p>
132
- <h3>Q2: How much real money can I earn from playing Bitcoin Mining Idle Tycoon Mod APK?</h3>
133
- <p>A2: Bitcoin Mining Idle Tycoon Mod APK is a game that simulates bitcoin mining. You cannot earn real money from playing this game. The bitcoins you mine in the game are virtual currency that only exist in the game. However, you can learn about how bitcoin mining works in real life by playing this game.</p>
134
- <h3>Q3: What are some tips and tricks to succeed in Bitcoin Mining Idle Tycoon Mod APK?</h3>
135
- <p>A3: Some tips and tricks to succeed in Bitcoin Mining Idle Tycoon Mod APK are:</p>
136
- <ul>
137
- <li>Hire more workers to increase your hash rate and profits</li>
138
- <li>Buy better equipment to improve your performance and efficiency</li>
139
- <li>Upgrade your facilities to expand your business and attract more customers</li>
140
- <li>Sell your bitcoins at the right time to maximize your earnings</li>
141
- <li>Keep an eye on the trade market and the price fluctuations</li>
142
- <li>Watch ads to get extra rewards and bonuses</li>
143
- <li>Use the mod apk features to get unlimited money and remove ads</li>
144
- </ul>
145
- <h3>Q4: What are some alternatives to Bitcoin Mining Idle Tycoon Mod APK?</h3>
146
- <p>A4: Some alternatives to Bitcoin Mining Idle Tycoon Mod APK are:</p>
147
- <ul>
148
- <li>Bitcoin Billionaire: A game that lets you tap your screen to mine bitcoins, build a fortune, and invest in various businesses and technologies.</li>
149
- <li>Crypto Idle Miner: A game that lets you build your own crypto mining empire, hire managers, upgrade your equipment, and trade various cryptocurrencies.</li>
150
- <li>Idle Miner Tycoon: A game that lets you manage your own mining company, mine different resources, hire workers, and optimize your workflow.</li>
151
- </ul>
152
- <h3>Q5: How can I learn more about bitcoin mining and cryptocurrency?</h3>
153
- <p>A5: Some ways to learn more about bitcoin mining and cryptocurrency are:</p>
154
- <ul>
155
- <li>Read books, articles, blogs, and podcasts about bitcoin and cryptocurrency.</li>
156
- <li>Watch videos, documentaries, and tutorials about bitcoin and cryptocurrency.</li>
157
- <li>Join online forums, communities, and groups related to bitcoin and cryptocurrency.</li>
158
- <li>Take online courses, webinars, or workshops about bitcoin and cryptocurrency.</li>
159
- <li>Consult experts, mentors, or advisors who have experience in bitcoin and cryptocurrency.</li>
160
- </ul></p> 401be4b1e0<br />
161
- <br />
162
- <br />
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/1phancelerku/anime-remove-background/Download Index of Cricket League Mod APK v1.0.5 for Android - Unlimited Coins and Gems.md DELETED
@@ -1,124 +0,0 @@
1
- <br />
2
- <h1>Index of Cricket League Mod APK: How to Download and Play the Best Cricket Game on Your Android Device</h1>
3
- <h2>Introduction</h2>
4
- <p>If you are a fan of cricket, you must have heard of Cricket League, one of the most popular and realistic cricket games on the Google Play Store. However, if you want to enjoy the game to the fullest, you might need to spend some real money to unlock premium features, such as unlimited coins, all players unlocked, no ads, and more. That's why many people are looking for the modded version of Cricket League, which gives them access to all these benefits for free.</p>
5
- <h2>index of cricket league mod apk</h2><br /><p><b><b>Download</b> &ndash;&ndash;&ndash;&ndash;&ndash;>>> <a href="https://jinyurl.com/2uNTox">https://jinyurl.com/2uNTox</a></b></p><br /><br />
6
- <p>In this article, we will show you how to download and install Cricket League Mod APK on your Android device, and how to play the game with all the features unlocked. We will also answer some frequently asked questions about the game and the modded file. So, without further ado, let's get started!</p>
7
- <h2>What is Cricket League Mod APK?</h2>
8
- <p>Cricket League Mod APK is a modified version of the original Cricket League game, which is developed by Gametion Technologies Pvt Ltd. The modded file has been hacked by some third-party developers to provide users with unlimited money, all players unlocked, no ads, and other premium features that are otherwise not available in the official game.</p>
9
- <p>With Cricket League Mod APK, you can enjoy playing cricket with your favorite teams and players, without worrying about running out of coins or being interrupted by annoying ads. You can also join different tournaments and leagues, unlock new stadiums and rewards, and experience realistic 3D graphics and sound effects.</p>
10
- <h2>Why should you download Cricket League Mod APK?</h2>
11
- <p>There are many reasons why you should download Cricket League Mod APK instead of the original game. Here are some of them:</p>
12
- <ul>
13
- <li>You can save your money by getting unlimited coins for free. You can use these coins to buy new players, upgrade your skills, and customize your team.</li>
14
- <li>You can unlock all the players in the game, including legendary cricketers like Sachin Tendulkar, Virat Kohli, MS Dhoni, AB de Villiers, and more. You can also create your own dream team with your favorite players.</li>
15
- <li>You can play the game without any ads. Ads can be very annoying and distracting when you are playing a game. They can also slow down your device and consume your data. With Cricket League Mod APK, you can enjoy a smooth and ad-free gaming experience.</li>
16
- <li>You can access all the modes and tournaments in the game, such as Quick Match, World Cup, IPL, PSL, BBL, CPL, and more. You can also play online with your friends or other players from around the world.</li>
17
- <li>You can unlock new stadiums and rewards as you progress in the game. You can play in different venues like Eden Gardens, Wankhede Stadium, Lord's, MCG, SCG, etc. You can also win trophies, medals, badges, and other prizes.</li>
18
- <li>You can enjoy realistic 3D graphics and sound effects that make you feel like you are playing in a real cricket match. You can also customize your camera angles, graphics settings, sound effects, etc.</li>
19
- </ul>
20
- <h2>How to download and install Cricket League Mod APK?</h2>
21
- <p>Downloading and installing Cricket League Mod APK is very easy and simple. Just follow these steps:</p>
22
- <h3>Step 1: Find a reliable source for the modded file</h3>
23
- <p>The first thing you need to do is to find a trustworthy website that provides the modded file for Cricket League. You can use Google or any other search engine to find a reliable source for the modded file. You can also check the reviews and ratings of the website to see if it is safe and secure. Some of the websites that offer Cricket League Mod APK are:</p>
24
- <ul>
25
- <li>[APKPure]</li>
26
- <li>[APKHome]</li>
27
- <li>[ModDroid]</li>
28
- <li>[APKDone]</li>
29
- </ul>
30
- <p>Make sure you download the latest version of the modded file, which is 1.0.9 as of June 2023.</p>
31
- <h3>Step 2: Enable unknown sources on your device</h3>
32
- <p>The next thing you need to do is to enable unknown sources on your device. This will allow you to install apps that are not from the Google Play Store. To do this, follow these steps:</p>
33
- <p>Cricket League v1.0.5 mod apk download<br />
34
- How to install Cricket League mod apk on Android<br />
35
- Cricket League mod apk unlimited money and coins<br />
36
- Cricket League 3D multiplayer mod apk latest version<br />
37
- Cricket League mod apk by Miniclip Com<br />
38
- Cricket League mod apk free download for Android<br />
39
- Cricket League mod apk offline mode<br />
40
- Cricket League mod apk hack and cheats<br />
41
- Cricket League mod apk with all teams unlocked<br />
42
- Cricket League mod apk no root required<br />
43
- Cricket League mod apk with real-time commentary<br />
44
- Cricket League mod apk with realistic graphics and physics<br />
45
- Cricket League mod apk with custom tournaments and leagues<br />
46
- Cricket League mod apk with online leaderboards and achievements<br />
47
- Cricket League mod apk with easy controls and gameplay<br />
48
- Cricket League mod apk with different game modes and difficulty levels<br />
49
- Cricket League mod apk with HD quality sound and music<br />
50
- Cricket League mod apk with daily rewards and challenges<br />
51
- Cricket League mod apk with in-app purchases and ads removed<br />
52
- Cricket League mod apk with bug fixes and performance improvements<br />
53
- Cricket League mod apk for PC and laptop<br />
54
- Cricket League mod apk for iOS and iPhone<br />
55
- Cricket League mod apk for Windows 10 and Mac OS<br />
56
- Cricket League mod apk for Firestick and Smart TV<br />
57
- Cricket League mod apk for Chromebook and Linux<br />
58
- Cricket League mod apk reviews and ratings<br />
59
- Cricket League mod apk tips and tricks<br />
60
- Cricket League mod apk FAQs and guides<br />
61
- Cricket League mod apk features and specifications<br />
62
- Cricket League mod apk comparison and alternatives<br />
63
- Download link of Cricket League mod apk file<br />
64
- How to update Cricket League mod apk to the latest version<br />
65
- How to uninstall Cricket League mod apk from your device<br />
66
- How to backup and restore your data in Cricket League mod apk<br />
67
- How to play Cricket League mod apk with friends online<br />
68
- How to join a clan or create your own in Cricket League mod apk<br />
69
- How to customize your avatar and team in Cricket League mod apk<br />
70
- How to earn more money and coins in Cricket League mod apk<br />
71
- How to unlock new teams and players in Cricket League mod apk<br />
72
- How to improve your skills and strategy in Cricket League mod apk</p>
73
- <ol>
74
- <li>Go to your device settings and tap on security or privacy.</li>
75
- <li>Find the option that says unknown sources or install unknown apps and toggle it on.</li>
76
- <li>A warning message will pop up, telling you that installing apps from unknown sources can harm your device. Tap on OK or Allow to proceed.</li>
77
- </ol>
78
- <p>You can also enable unknown sources for specific apps, such as your browser or file manager, by tapping on their names and toggling on the option that says allow from this source.</p>
79
- <h3>Step 3: Download and install the APK file</h3>
80
- <p>The final step is to download and install the APK file on your device. To do this, follow these steps:</p>
81
- <ol>
82
- <li>Open your browser or file manager and go to the website where you downloaded the modded file.</li>
83
- <li>Tap on the download button or link and wait for the file to be downloaded.</li>
84
- <li>Once the download is complete, tap on the file name or open it with your file manager.</li>
85
- <li>A prompt will appear, asking you if you want to install the app. Tap on Install and wait for the installation to finish.</li>
86
- <li>Once the installation is done, tap on Open or Done to launch the game or exit the installer.</li>
87
- </ol>
88
- <p>Congratulations! You have successfully downloaded and installed Cricket League Mod APK on your Android device. You can now enjoy playing the game with all the features unlocked.</p>
89
- <h2>How to play Cricket League Mod APK?</h2>
90
- <p>Playing Cricket League Mod APK is very easy and fun. Here are some tips on how to play the game:</p>
91
- <h3>Choose your team and players</h3>
92
- <p>The first thing you need to do is to choose your team and players. You can select from different countries, such as India, Australia, England, Pakistan, South Africa, etc. You can also create your own custom team with your favorite players. You can edit their names, skills, appearances, etc.</p>
93
- <p>You can also unlock all the players in the game, including legendary cricketers like Sachin Tendulkar, Virat Kohli, MS Dhoni, AB de Villiers, and more. You can also create your own dream team with your favorite players.</p>
94
- <h3>Play different modes and tournaments</h3>
95
- <p>The next thing you need to do is to play different modes and tournaments in the game. You can choose from different options, such as Quick Match, World Cup, IPL, PSL, BBL, CPL, and more. You can also play online with your friends or other players from around the world.</p>
96
- <p>You can also customize your match settings, such as overs, difficulty level, toss, pitch condition, weather, etc. You can also view your match statistics, such as scorecard, wagon wheel, man of the match, etc.</p>
97
- <h3>Unlock new stadiums and rewards</h3>
98
- <p>The last thing you need to do is to unlock new stadiums and rewards as you progress in the game. You can play in different venues like Eden Gardens, Wankhede Stadium, Lord's, MCG, SCG, etc. You can also win trophies, medals, badges, and other prizes.</p>
99
- <p>You can also unlock new features and items in the game store using your unlimited coins. You can buy new bats, balls, gloves, helmets, shoes, etc. You can also upgrade your skills and abilities using your coins.</p>
100
- <h3>Enjoy realistic graphics and sound effects</h3>
101
- <p>The best thing about Cricket League Mod APK is that it has realistic 3D graphics and sound effects that make you feel like you are playing in a real cricket match. You can also customize your camera angles, graphics settings, sound effects, etc. You can also enjoy the commentary and crowd cheering that add to the excitement of the game.</p>
102
- <h2>Conclusion</h2>
103
- <p>Cricket League Mod APK is a great game for cricket lovers who want to enjoy the game with all the features unlocked. You can download and install the modded file on your Android device easily and safely, and play the game with unlimited coins, all players unlocked, no ads, and other premium features. You can also play different modes and tournaments, unlock new stadiums and rewards, and enjoy realistic graphics and sound effects.</p>
104
- <p>If you are looking for a fun and realistic cricket game on your Android device, you should definitely try Cricket League Mod APK. It is one of the best cricket games on the Google Play Store, and it will give you hours of entertainment and enjoyment.</p>
105
- <h2>FAQs</h2>
106
- <p>Here are some frequently asked questions about Cricket League Mod APK:</p>
107
- <h3>Q: Is Cricket League Mod APK safe to download and install?</h3>
108
- <p>A: Yes, Cricket League Mod APK is safe to download and install, as long as you get it from a reliable source. However, you should always be careful when downloading apps from unknown sources, as they may contain viruses or malware that can harm your device. You should also scan the file with an antivirus app before installing it.</p>
109
- <h3>Q: Do I need to root my device to use Cricket League Mod APK?</h3>
110
- <p>A: No, you do not need to root your device to use Cricket League Mod APK. The modded file works on both rooted and non-rooted devices. However, some features may require root access, such as changing the IMEI number or spoofing your location.</p>
111
- <h3>Q: Will I get banned from playing online if I use Cricket League Mod APK?</h3>
112
- <p>A: No, you will not get banned from playing online if you use Cricket League Mod APK. The modded file has an anti-ban feature that prevents the game server from detecting your modded file. However, you should not abuse the modded features or cheat in online matches, as that may ruin the fun for other players.</p>
113
- <h3>Q: How can I update Cricket League Mod APK?</h3>
114
- <p>A: You can update Cricket League Mod APK by downloading the latest version of the modded file from the same website where you got it. You can also check for updates within the game settings. However, you should always backup your game data before updating, as some updates may cause compatibility issues or data loss.</p>
115
- <h3>Q: How can I uninstall Cricket League Mod APK?</h3>
116
- <p>A: You can uninstall Cricket League Mod APK by following these steps:</p>
117
- <ol>
118
- <li>Go to your device settings and tap on apps or applications.</li>
119
- <li>Find and tap on Cricket League Mod APK.</li>
120
- <li>Tap on uninstall and confirm your action.</li>
121
- <li>Wait for the app to be uninstalled from your device.</li>
122
- </ol></p> 401be4b1e0<br />
123
- <br />
124
- <br />
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/4Taps/SadTalker/src/facerender/modules/generator.py DELETED
@@ -1,251 +0,0 @@
1
- import torch
2
- from torch import nn
3
- import torch.nn.functional as F
4
- from src.facerender.modules.util import ResBlock2d, SameBlock2d, UpBlock2d, DownBlock2d, ResBlock3d, SPADEResnetBlock
5
- from src.facerender.modules.dense_motion import DenseMotionNetwork
6
-
7
-
8
- class OcclusionAwareGenerator(nn.Module):
9
- """
10
- Generator follows NVIDIA architecture.
11
- """
12
-
13
- def __init__(self, image_channel, feature_channel, num_kp, block_expansion, max_features, num_down_blocks, reshape_channel, reshape_depth,
14
- num_resblocks, estimate_occlusion_map=False, dense_motion_params=None, estimate_jacobian=False):
15
- super(OcclusionAwareGenerator, self).__init__()
16
-
17
- if dense_motion_params is not None:
18
- self.dense_motion_network = DenseMotionNetwork(num_kp=num_kp, feature_channel=feature_channel,
19
- estimate_occlusion_map=estimate_occlusion_map,
20
- **dense_motion_params)
21
- else:
22
- self.dense_motion_network = None
23
-
24
- self.first = SameBlock2d(image_channel, block_expansion, kernel_size=(7, 7), padding=(3, 3))
25
-
26
- down_blocks = []
27
- for i in range(num_down_blocks):
28
- in_features = min(max_features, block_expansion * (2 ** i))
29
- out_features = min(max_features, block_expansion * (2 ** (i + 1)))
30
- down_blocks.append(DownBlock2d(in_features, out_features, kernel_size=(3, 3), padding=(1, 1)))
31
- self.down_blocks = nn.ModuleList(down_blocks)
32
-
33
- self.second = nn.Conv2d(in_channels=out_features, out_channels=max_features, kernel_size=1, stride=1)
34
-
35
- self.reshape_channel = reshape_channel
36
- self.reshape_depth = reshape_depth
37
-
38
- self.resblocks_3d = torch.nn.Sequential()
39
- for i in range(num_resblocks):
40
- self.resblocks_3d.add_module('3dr' + str(i), ResBlock3d(reshape_channel, kernel_size=3, padding=1))
41
-
42
- out_features = block_expansion * (2 ** (num_down_blocks))
43
- self.third = SameBlock2d(max_features, out_features, kernel_size=(3, 3), padding=(1, 1), lrelu=True)
44
- self.fourth = nn.Conv2d(in_channels=out_features, out_channels=out_features, kernel_size=1, stride=1)
45
-
46
- self.resblocks_2d = torch.nn.Sequential()
47
- for i in range(num_resblocks):
48
- self.resblocks_2d.add_module('2dr' + str(i), ResBlock2d(out_features, kernel_size=3, padding=1))
49
-
50
- up_blocks = []
51
- for i in range(num_down_blocks):
52
- in_features = max(block_expansion, block_expansion * (2 ** (num_down_blocks - i)))
53
- out_features = max(block_expansion, block_expansion * (2 ** (num_down_blocks - i - 1)))
54
- up_blocks.append(UpBlock2d(in_features, out_features, kernel_size=(3, 3), padding=(1, 1)))
55
- self.up_blocks = nn.ModuleList(up_blocks)
56
-
57
- self.final = nn.Conv2d(block_expansion, image_channel, kernel_size=(7, 7), padding=(3, 3))
58
- self.estimate_occlusion_map = estimate_occlusion_map
59
- self.image_channel = image_channel
60
-
61
- def deform_input(self, inp, deformation):
62
- _, d_old, h_old, w_old, _ = deformation.shape
63
- _, _, d, h, w = inp.shape
64
- if d_old != d or h_old != h or w_old != w:
65
- deformation = deformation.permute(0, 4, 1, 2, 3)
66
- deformation = F.interpolate(deformation, size=(d, h, w), mode='trilinear')
67
- deformation = deformation.permute(0, 2, 3, 4, 1)
68
- return F.grid_sample(inp, deformation)
69
-
70
- def forward(self, source_image, kp_driving, kp_source):
71
- # Encoding (downsampling) part
72
- out = self.first(source_image)
73
- for i in range(len(self.down_blocks)):
74
- out = self.down_blocks[i](out)
75
- out = self.second(out)
76
- bs, c, h, w = out.shape
77
- # print(out.shape)
78
- feature_3d = out.view(bs, self.reshape_channel, self.reshape_depth, h ,w)
79
- feature_3d = self.resblocks_3d(feature_3d)
80
-
81
- # Transforming feature representation according to deformation and occlusion
82
- output_dict = {}
83
- if self.dense_motion_network is not None:
84
- dense_motion = self.dense_motion_network(feature=feature_3d, kp_driving=kp_driving,
85
- kp_source=kp_source)
86
- output_dict['mask'] = dense_motion['mask']
87
-
88
- if 'occlusion_map' in dense_motion:
89
- occlusion_map = dense_motion['occlusion_map']
90
- output_dict['occlusion_map'] = occlusion_map
91
- else:
92
- occlusion_map = None
93
- deformation = dense_motion['deformation']
94
- out = self.deform_input(feature_3d, deformation)
95
-
96
- bs, c, d, h, w = out.shape
97
- out = out.view(bs, c*d, h, w)
98
- out = self.third(out)
99
- out = self.fourth(out)
100
-
101
- if occlusion_map is not None:
102
- if out.shape[2] != occlusion_map.shape[2] or out.shape[3] != occlusion_map.shape[3]:
103
- occlusion_map = F.interpolate(occlusion_map, size=out.shape[2:], mode='bilinear')
104
- out = out * occlusion_map
105
-
106
- # output_dict["deformed"] = self.deform_input(source_image, deformation) # 3d deformation cannot deform 2d image
107
-
108
- # Decoding part
109
- out = self.resblocks_2d(out)
110
- for i in range(len(self.up_blocks)):
111
- out = self.up_blocks[i](out)
112
- out = self.final(out)
113
- out = F.sigmoid(out)
114
-
115
- output_dict["prediction"] = out
116
-
117
- return output_dict
118
-
119
-
120
- class SPADEDecoder(nn.Module):
121
- def __init__(self):
122
- super().__init__()
123
- ic = 256
124
- oc = 64
125
- norm_G = 'spadespectralinstance'
126
- label_nc = 256
127
-
128
- self.fc = nn.Conv2d(ic, 2 * ic, 3, padding=1)
129
- self.G_middle_0 = SPADEResnetBlock(2 * ic, 2 * ic, norm_G, label_nc)
130
- self.G_middle_1 = SPADEResnetBlock(2 * ic, 2 * ic, norm_G, label_nc)
131
- self.G_middle_2 = SPADEResnetBlock(2 * ic, 2 * ic, norm_G, label_nc)
132
- self.G_middle_3 = SPADEResnetBlock(2 * ic, 2 * ic, norm_G, label_nc)
133
- self.G_middle_4 = SPADEResnetBlock(2 * ic, 2 * ic, norm_G, label_nc)
134
- self.G_middle_5 = SPADEResnetBlock(2 * ic, 2 * ic, norm_G, label_nc)
135
- self.up_0 = SPADEResnetBlock(2 * ic, ic, norm_G, label_nc)
136
- self.up_1 = SPADEResnetBlock(ic, oc, norm_G, label_nc)
137
- self.conv_img = nn.Conv2d(oc, 3, 3, padding=1)
138
- self.up = nn.Upsample(scale_factor=2)
139
-
140
- def forward(self, feature):
141
- seg = feature
142
- x = self.fc(feature)
143
- x = self.G_middle_0(x, seg)
144
- x = self.G_middle_1(x, seg)
145
- x = self.G_middle_2(x, seg)
146
- x = self.G_middle_3(x, seg)
147
- x = self.G_middle_4(x, seg)
148
- x = self.G_middle_5(x, seg)
149
- x = self.up(x)
150
- x = self.up_0(x, seg) # 256, 128, 128
151
- x = self.up(x)
152
- x = self.up_1(x, seg) # 64, 256, 256
153
-
154
- x = self.conv_img(F.leaky_relu(x, 2e-1))
155
- # x = torch.tanh(x)
156
- x = F.sigmoid(x)
157
-
158
- return x
159
-
160
-
161
- class OcclusionAwareSPADEGenerator(nn.Module):
162
-
163
- def __init__(self, image_channel, feature_channel, num_kp, block_expansion, max_features, num_down_blocks, reshape_channel, reshape_depth,
164
- num_resblocks, estimate_occlusion_map=False, dense_motion_params=None, estimate_jacobian=False):
165
- super(OcclusionAwareSPADEGenerator, self).__init__()
166
-
167
- if dense_motion_params is not None:
168
- self.dense_motion_network = DenseMotionNetwork(num_kp=num_kp, feature_channel=feature_channel,
169
- estimate_occlusion_map=estimate_occlusion_map,
170
- **dense_motion_params)
171
- else:
172
- self.dense_motion_network = None
173
-
174
- self.first = SameBlock2d(image_channel, block_expansion, kernel_size=(3, 3), padding=(1, 1))
175
-
176
- down_blocks = []
177
- for i in range(num_down_blocks):
178
- in_features = min(max_features, block_expansion * (2 ** i))
179
- out_features = min(max_features, block_expansion * (2 ** (i + 1)))
180
- down_blocks.append(DownBlock2d(in_features, out_features, kernel_size=(3, 3), padding=(1, 1)))
181
- self.down_blocks = nn.ModuleList(down_blocks)
182
-
183
- self.second = nn.Conv2d(in_channels=out_features, out_channels=max_features, kernel_size=1, stride=1)
184
-
185
- self.reshape_channel = reshape_channel
186
- self.reshape_depth = reshape_depth
187
-
188
- self.resblocks_3d = torch.nn.Sequential()
189
- for i in range(num_resblocks):
190
- self.resblocks_3d.add_module('3dr' + str(i), ResBlock3d(reshape_channel, kernel_size=3, padding=1))
191
-
192
- out_features = block_expansion * (2 ** (num_down_blocks))
193
- self.third = SameBlock2d(max_features, out_features, kernel_size=(3, 3), padding=(1, 1), lrelu=True)
194
- self.fourth = nn.Conv2d(in_channels=out_features, out_channels=out_features, kernel_size=1, stride=1)
195
-
196
- self.estimate_occlusion_map = estimate_occlusion_map
197
- self.image_channel = image_channel
198
-
199
- self.decoder = SPADEDecoder()
200
-
201
- def deform_input(self, inp, deformation):
202
- _, d_old, h_old, w_old, _ = deformation.shape
203
- _, _, d, h, w = inp.shape
204
- if d_old != d or h_old != h or w_old != w:
205
- deformation = deformation.permute(0, 4, 1, 2, 3)
206
- deformation = F.interpolate(deformation, size=(d, h, w), mode='trilinear')
207
- deformation = deformation.permute(0, 2, 3, 4, 1)
208
- return F.grid_sample(inp, deformation)
209
-
210
- def forward(self, source_image, kp_driving, kp_source):
211
- # Encoding (downsampling) part
212
- out = self.first(source_image)
213
- for i in range(len(self.down_blocks)):
214
- out = self.down_blocks[i](out)
215
- out = self.second(out)
216
- bs, c, h, w = out.shape
217
- # print(out.shape)
218
- feature_3d = out.view(bs, self.reshape_channel, self.reshape_depth, h ,w)
219
- feature_3d = self.resblocks_3d(feature_3d)
220
-
221
- # Transforming feature representation according to deformation and occlusion
222
- output_dict = {}
223
- if self.dense_motion_network is not None:
224
- dense_motion = self.dense_motion_network(feature=feature_3d, kp_driving=kp_driving,
225
- kp_source=kp_source)
226
- output_dict['mask'] = dense_motion['mask']
227
-
228
- if 'occlusion_map' in dense_motion:
229
- occlusion_map = dense_motion['occlusion_map']
230
- output_dict['occlusion_map'] = occlusion_map
231
- else:
232
- occlusion_map = None
233
- deformation = dense_motion['deformation']
234
- out = self.deform_input(feature_3d, deformation)
235
-
236
- bs, c, d, h, w = out.shape
237
- out = out.view(bs, c*d, h, w)
238
- out = self.third(out)
239
- out = self.fourth(out)
240
-
241
- if occlusion_map is not None:
242
- if out.shape[2] != occlusion_map.shape[2] or out.shape[3] != occlusion_map.shape[3]:
243
- occlusion_map = F.interpolate(occlusion_map, size=out.shape[2:], mode='bilinear')
244
- out = out * occlusion_map
245
-
246
- # Decoding part
247
- out = self.decoder(out)
248
-
249
- output_dict["prediction"] = out
250
-
251
- return output_dict
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/801artistry/RVC801/demucs/test.py DELETED
@@ -1,109 +0,0 @@
1
- # Copyright (c) Facebook, Inc. and its affiliates.
2
- # All rights reserved.
3
- #
4
- # This source code is licensed under the license found in the
5
- # LICENSE file in the root directory of this source tree.
6
-
7
- import gzip
8
- import sys
9
- from concurrent import futures
10
-
11
- import musdb
12
- import museval
13
- import torch as th
14
- import tqdm
15
- from scipy.io import wavfile
16
- from torch import distributed
17
-
18
- from .audio import convert_audio
19
- from .utils import apply_model
20
-
21
-
22
- def evaluate(model,
23
- musdb_path,
24
- eval_folder,
25
- workers=2,
26
- device="cpu",
27
- rank=0,
28
- save=False,
29
- shifts=0,
30
- split=False,
31
- overlap=0.25,
32
- is_wav=False,
33
- world_size=1):
34
- """
35
- Evaluate model using museval. Run the model
36
- on a single GPU, the bottleneck being the call to museval.
37
- """
38
-
39
- output_dir = eval_folder / "results"
40
- output_dir.mkdir(exist_ok=True, parents=True)
41
- json_folder = eval_folder / "results/test"
42
- json_folder.mkdir(exist_ok=True, parents=True)
43
-
44
- # we load tracks from the original musdb set
45
- test_set = musdb.DB(musdb_path, subsets=["test"], is_wav=is_wav)
46
- src_rate = 44100 # hardcoded for now...
47
-
48
- for p in model.parameters():
49
- p.requires_grad = False
50
- p.grad = None
51
-
52
- pendings = []
53
- with futures.ProcessPoolExecutor(workers or 1) as pool:
54
- for index in tqdm.tqdm(range(rank, len(test_set), world_size), file=sys.stdout):
55
- track = test_set.tracks[index]
56
-
57
- out = json_folder / f"{track.name}.json.gz"
58
- if out.exists():
59
- continue
60
-
61
- mix = th.from_numpy(track.audio).t().float()
62
- ref = mix.mean(dim=0) # mono mixture
63
- mix = (mix - ref.mean()) / ref.std()
64
- mix = convert_audio(mix, src_rate, model.samplerate, model.audio_channels)
65
- estimates = apply_model(model, mix.to(device),
66
- shifts=shifts, split=split, overlap=overlap)
67
- estimates = estimates * ref.std() + ref.mean()
68
-
69
- estimates = estimates.transpose(1, 2)
70
- references = th.stack(
71
- [th.from_numpy(track.targets[name].audio).t() for name in model.sources])
72
- references = convert_audio(references, src_rate,
73
- model.samplerate, model.audio_channels)
74
- references = references.transpose(1, 2).numpy()
75
- estimates = estimates.cpu().numpy()
76
- win = int(1. * model.samplerate)
77
- hop = int(1. * model.samplerate)
78
- if save:
79
- folder = eval_folder / "wav/test" / track.name
80
- folder.mkdir(exist_ok=True, parents=True)
81
- for name, estimate in zip(model.sources, estimates):
82
- wavfile.write(str(folder / (name + ".wav")), 44100, estimate)
83
-
84
- if workers:
85
- pendings.append((track.name, pool.submit(
86
- museval.evaluate, references, estimates, win=win, hop=hop)))
87
- else:
88
- pendings.append((track.name, museval.evaluate(
89
- references, estimates, win=win, hop=hop)))
90
- del references, mix, estimates, track
91
-
92
- for track_name, pending in tqdm.tqdm(pendings, file=sys.stdout):
93
- if workers:
94
- pending = pending.result()
95
- sdr, isr, sir, sar = pending
96
- track_store = museval.TrackStore(win=44100, hop=44100, track_name=track_name)
97
- for idx, target in enumerate(model.sources):
98
- values = {
99
- "SDR": sdr[idx].tolist(),
100
- "SIR": sir[idx].tolist(),
101
- "ISR": isr[idx].tolist(),
102
- "SAR": sar[idx].tolist()
103
- }
104
-
105
- track_store.add_target(target_name=target, values=values)
106
- json_path = json_folder / f"{track_name}.json.gz"
107
- gzip.open(json_path, "w").write(track_store.json.encode('utf-8'))
108
- if world_size > 1:
109
- distributed.barrier()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/AI-Dashboards/HEDIS.Assessment.PHQ9.GADD7.SDoH/style.css DELETED
@@ -1,28 +0,0 @@
1
- body {
2
- padding: 2rem;
3
- font-family: -apple-system, BlinkMacSystemFont, "Arial", sans-serif;
4
- }
5
-
6
- h1 {
7
- font-size: 16px;
8
- margin-top: 0;
9
- }
10
-
11
- p {
12
- color: rgb(107, 114, 128);
13
- font-size: 15px;
14
- margin-bottom: 10px;
15
- margin-top: 5px;
16
- }
17
-
18
- .card {
19
- max-width: 620px;
20
- margin: 0 auto;
21
- padding: 16px;
22
- border: 1px solid lightgray;
23
- border-radius: 16px;
24
- }
25
-
26
- .card p:last-child {
27
- margin-bottom: 0;
28
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/AIGC-Audio/AudioGPT/text_to_speech/tasks/tts/synta.py DELETED
@@ -1,25 +0,0 @@
1
- import os
2
- import torch
3
- import torch.nn.functional as F
4
- from torch import nn
5
-
6
- from text_to_speech.modules.tts.syntaspeech.syntaspeech import SyntaSpeech
7
- from tasks.tts.ps_adv import PortaSpeechAdvTask
8
- from text_to_speech.utils.commons.hparams import hparams
9
-
10
-
11
- class SyntaSpeechTask(PortaSpeechAdvTask):
12
- def build_tts_model(self):
13
- ph_dict_size = len(self.token_encoder)
14
- word_dict_size = len(self.word_encoder)
15
- self.model = SyntaSpeech(ph_dict_size, word_dict_size, hparams)
16
-
17
- self.gen_params = [p for p in self.model.parameters() if p.requires_grad]
18
- self.dp_params = [p for k, p in self.model.named_parameters() if (('dur_predictor' in k) and p.requires_grad)]
19
- self.gen_params_except_dp = [p for k, p in self.model.named_parameters() if (('dur_predictor' not in k) and p.requires_grad)]
20
- self.bert_params = [p for k, p in self.model.named_parameters() if (('bert' in k) and p.requires_grad)]
21
- self.gen_params_except_bert_and_dp = [p for k, p in self.model.named_parameters() if ('dur_predictor' not in k) and ('bert' not in k) and p.requires_grad ]
22
-
23
- self.use_bert = True if len(self.bert_params) > 0 else False
24
-
25
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/AIGC-Audio/AudioGPT/text_to_speech/utils/audio/io.py DELETED
@@ -1,22 +0,0 @@
1
- import subprocess
2
-
3
- import numpy as np
4
- from scipy.io import wavfile
5
-
6
-
7
- def save_wav(wav, path, sr, norm=False):
8
- if norm:
9
- wav = wav / np.abs(wav).max()
10
- wav = wav * 32767
11
- wavfile.write(path[:-4] + '.wav', sr, wav.astype(np.int16))
12
- if path[-4:] == '.mp3':
13
- to_mp3(path[:-4])
14
-
15
-
16
- def to_mp3(out_path):
17
- if out_path[-4:] == '.wav':
18
- out_path = out_path[:-4]
19
- subprocess.check_call(
20
- f'ffmpeg -threads 1 -loglevel error -i "{out_path}.wav" -vn -b:a 192k -y -hide_banner -async 1 "{out_path}.mp3"',
21
- shell=True, stdin=subprocess.PIPE)
22
- subprocess.check_call(f'rm -f "{out_path}.wav"', shell=True)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/AIGC-Audio/Make_An_Audio/ldm/modules/encoders/open_clap/transform.py DELETED
@@ -1,30 +0,0 @@
1
- from torchvision.transforms import Normalize, Compose, RandomResizedCrop, InterpolationMode, ToTensor, Resize, \
2
- CenterCrop
3
-
4
-
5
- def _convert_to_rgb(image):
6
- return image.convert('RGB')
7
-
8
-
9
- def image_transform(
10
- image_size: int,
11
- is_train: bool,
12
- mean=(0.48145466, 0.4578275, 0.40821073),
13
- std=(0.26862954, 0.26130258, 0.27577711)
14
- ):
15
- normalize = Normalize(mean=mean, std=std)
16
- if is_train:
17
- return Compose([
18
- RandomResizedCrop(image_size, scale=(0.9, 1.0), interpolation=InterpolationMode.BICUBIC),
19
- _convert_to_rgb,
20
- ToTensor(),
21
- normalize,
22
- ])
23
- else:
24
- return Compose([
25
- Resize(image_size, interpolation=InterpolationMode.BICUBIC),
26
- CenterCrop(image_size),
27
- _convert_to_rgb,
28
- ToTensor(),
29
- normalize,
30
- ])
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/AIWaves/SOP_Generation-single/Component/__init__.py DELETED
@@ -1,3 +0,0 @@
1
- from .ExtraComponent import *
2
- from .PromptComponent import *
3
- from .ToolComponent import *
 
 
 
 
spaces/AdvertisingAgency/README/README.md DELETED
@@ -1,10 +0,0 @@
1
- ---
2
- title: README
3
- emoji: 🐢
4
- colorFrom: indigo
5
- colorTo: blue
6
- sdk: static
7
- pinned: false
8
- ---
9
-
10
- Edit this `README.md` markdown file to author your organization card.
 
 
 
 
 
 
 
 
 
 
 
spaces/AgentVerse/agentVerse/ui/src/phaser3-rex-plugins/plugins/buildarcadeobject.d.ts DELETED
@@ -1,2 +0,0 @@
1
- import BuildArcadeObject from './utils/arcade/BuildArcadeObject';
2
- export default BuildArcadeObject;
 
 
 
spaces/AgentVerse/agentVerse/ui/src/phaser3-rex-plugins/templates/spinner/clock/Factory.d.ts DELETED
@@ -1,6 +0,0 @@
1
- import Clock from './Clock';
2
- import Base from '../base/Base';
3
-
4
- export default function Factory(
5
- config?: Base.IConfig
6
- ): Clock;
 
 
 
 
 
 
 
spaces/AgentVerse/agentVerse/ui/src/phaser3-rex-plugins/templates/ui/lineprogress/LineProgress.d.ts DELETED
@@ -1,2 +0,0 @@
1
- import LineProgress from '../../../plugins/lineprogress';
2
- export default LineProgress;
 
 
 
spaces/Androidonnxfork/CivitAi-to-Diffusers/diffusers/.github/ISSUE_TEMPLATE/feature_request.md DELETED
@@ -1,20 +0,0 @@
1
- ---
2
- name: "\U0001F680 Feature request"
3
- about: Suggest an idea for this project
4
- title: ''
5
- labels: ''
6
- assignees: ''
7
-
8
- ---
9
-
10
- **Is your feature request related to a problem? Please describe.**
11
- A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
12
-
13
- **Describe the solution you'd like**
14
- A clear and concise description of what you want to happen.
15
-
16
- **Describe alternatives you've considered**
17
- A clear and concise description of any alternative solutions or features you've considered.
18
-
19
- **Additional context**
20
- Add any other context or screenshots about the feature request here.
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/Andy1621/uniformer_image_detection/mmdet/models/dense_heads/__init__.py DELETED
@@ -1,41 +0,0 @@
1
- from .anchor_free_head import AnchorFreeHead
2
- from .anchor_head import AnchorHead
3
- from .atss_head import ATSSHead
4
- from .cascade_rpn_head import CascadeRPNHead, StageCascadeRPNHead
5
- from .centripetal_head import CentripetalHead
6
- from .corner_head import CornerHead
7
- from .embedding_rpn_head import EmbeddingRPNHead
8
- from .fcos_head import FCOSHead
9
- from .fovea_head import FoveaHead
10
- from .free_anchor_retina_head import FreeAnchorRetinaHead
11
- from .fsaf_head import FSAFHead
12
- from .ga_retina_head import GARetinaHead
13
- from .ga_rpn_head import GARPNHead
14
- from .gfl_head import GFLHead
15
- from .guided_anchor_head import FeatureAdaption, GuidedAnchorHead
16
- from .ld_head import LDHead
17
- from .nasfcos_head import NASFCOSHead
18
- from .paa_head import PAAHead
19
- from .pisa_retinanet_head import PISARetinaHead
20
- from .pisa_ssd_head import PISASSDHead
21
- from .reppoints_head import RepPointsHead
22
- from .retina_head import RetinaHead
23
- from .retina_sepbn_head import RetinaSepBNHead
24
- from .rpn_head import RPNHead
25
- from .sabl_retina_head import SABLRetinaHead
26
- from .ssd_head import SSDHead
27
- from .transformer_head import TransformerHead
28
- from .vfnet_head import VFNetHead
29
- from .yolact_head import YOLACTHead, YOLACTProtonet, YOLACTSegmHead
30
- from .yolo_head import YOLOV3Head
31
-
32
- __all__ = [
33
- 'AnchorFreeHead', 'AnchorHead', 'GuidedAnchorHead', 'FeatureAdaption',
34
- 'RPNHead', 'GARPNHead', 'RetinaHead', 'RetinaSepBNHead', 'GARetinaHead',
35
- 'SSDHead', 'FCOSHead', 'RepPointsHead', 'FoveaHead',
36
- 'FreeAnchorRetinaHead', 'ATSSHead', 'FSAFHead', 'NASFCOSHead',
37
- 'PISARetinaHead', 'PISASSDHead', 'GFLHead', 'CornerHead', 'YOLACTHead',
38
- 'YOLACTSegmHead', 'YOLACTProtonet', 'YOLOV3Head', 'PAAHead',
39
- 'SABLRetinaHead', 'CentripetalHead', 'VFNetHead', 'TransformerHead',
40
- 'StageCascadeRPNHead', 'CascadeRPNHead', 'EmbeddingRPNHead', 'LDHead'
41
- ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/Andy1621/uniformer_image_segmentation/configs/danet/danet_r101-d8_512x1024_40k_cityscapes.py DELETED
@@ -1,2 +0,0 @@
1
- _base_ = './danet_r50-d8_512x1024_40k_cityscapes.py'
2
- model = dict(pretrained='open-mmlab://resnet101_v1c', backbone=dict(depth=101))
 
 
 
spaces/Andy1621/uniformer_image_segmentation/configs/encnet/encnet_r50-d8_512x512_160k_ade20k.py DELETED
@@ -1,6 +0,0 @@
1
- _base_ = [
2
- '../_base_/models/encnet_r50-d8.py', '../_base_/datasets/ade20k.py',
3
- '../_base_/default_runtime.py', '../_base_/schedules/schedule_160k.py'
4
- ]
5
- model = dict(
6
- decode_head=dict(num_classes=150), auxiliary_head=dict(num_classes=150))
 
 
 
 
 
 
 
spaces/Andyrasika/Andyrasika-dreamshaper-sdxl-1.0/app.py DELETED
@@ -1,3 +0,0 @@
1
- import gradio as gr
2
-
3
- gr.Interface.load("models/Andyrasika/dreamshaper-sdxl-1.0").launch()
 
 
 
 
spaces/AnimalEquality/chatbot/_proc/_docs/site_libs/quarto-search/autocomplete.umd.js DELETED
@@ -1,3 +0,0 @@
1
- /*! @algolia/autocomplete-js 1.7.3 | MIT License | © Algolia, Inc. and contributors | https://github.com/algolia/autocomplete */
2
- !function(e,t){"object"==typeof exports&&"undefined"!=typeof module?t(exports):"function"==typeof define&&define.amd?define(["exports"],t):t((e="undefined"!=typeof globalThis?globalThis:e||self)["@algolia/autocomplete-js"]={})}(this,(function(e){"use strict";function t(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function n(e){for(var n=1;n<arguments.length;n++){var r=null!=arguments[n]?arguments[n]:{};n%2?t(Object(r),!0).forEach((function(t){o(e,t,r[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(r)):t(Object(r)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(r,t))}))}return e}function r(e){return r="function"==typeof Symbol&&"symbol"==typeof Symbol.iterator?function(e){return typeof e}:function(e){return e&&"function"==typeof Symbol&&e.constructor===Symbol&&e!==Symbol.prototype?"symbol":typeof e},r(e)}function o(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function i(){return i=Object.assign||function(e){for(var t=1;t<arguments.length;t++){var n=arguments[t];for(var r in n)Object.prototype.hasOwnProperty.call(n,r)&&(e[r]=n[r])}return e},i.apply(this,arguments)}function u(e,t){if(null==e)return{};var n,r,o=function(e,t){if(null==e)return{};var n,r,o={},i=Object.keys(e);for(r=0;r<i.length;r++)n=i[r],t.indexOf(n)>=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r<i.length;r++)n=i[r],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}function a(e,t){return function(e){if(Array.isArray(e))return e}(e)||function(e,t){var n=null==e?null:"undefined"!=typeof Symbol&&e[Symbol.iterator]||e["@@iterator"];if(null==n)return;var r,o,i=[],u=!0,a=!1;try{for(n=n.call(e);!(u=(r=n.next()).done)&&(i.push(r.value),!t||i.length!==t);u=!0);}catch(e){a=!0,o=e}finally{try{u||null==n.return||n.return()}finally{if(a)throw o}}return i}(e,t)||l(e,t)||function(){throw new TypeError("Invalid attempt to destructure non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()}function c(e){return function(e){if(Array.isArray(e))return s(e)}(e)||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(e)||l(e)||function(){throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()}function l(e,t){if(e){if("string"==typeof e)return s(e,t);var n=Object.prototype.toString.call(e).slice(8,-1);return"Object"===n&&e.constructor&&(n=e.constructor.name),"Map"===n||"Set"===n?Array.from(e):"Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n)?s(e,t):void 0}}function s(e,t){(null==t||t>e.length)&&(t=e.length);for(var n=0,r=new Array(t);n<t;n++)r[n]=e[n];return r}function p(e){return{current:e}}function f(e,t){var n=void 0;return function(){for(var r=arguments.length,o=new Array(r),i=0;i<r;i++)o[i]=arguments[i];n&&clearTimeout(n),n=setTimeout((function(){return e.apply(void 0,o)}),t)}}function d(e){return e.reduce((function(e,t){return e.concat(t)}),[])}var m=0;function v(){return"autocomplete-".concat(m++)}function h(e,t){return t.reduce((function(e,t){return e&&e[t]}),e)}function g(e){return 0===e.collections.length?0:e.collections.reduce((function(e,t){return e+t.items.length}),0)}var y=function(){},b="1.7.3",O=[{segment:"autocomplete-core",version:b}];function _(e,t){var n=t;return{then:function(t,r){return _(e.then(j(t,n,e),j(r,n,e)),n)},catch:function(t){return _(e.catch(j(t,n,e)),n)},finally:function(t){return t&&n.onCancelList.push(t),_(e.finally(j(t&&function(){return n.onCancelList=[],t()},n,e)),n)},cancel:function(){n.isCanceled=!0;var e=n.onCancelList;n.onCancelList=[],e.forEach((function(e){e()}))},isCanceled:function(){return!0===n.isCanceled}}}function P(e){return _(e,{isCanceled:!1,onCancelList:[]})}function j(e,t,n){return e?function(n){return t.isCanceled?n:e(n)}:n}function w(e,t,n,r){if(!n)return null;if(e<0&&(null===t||null!==r&&0===t))return n+e;var o=(null===t?-1:t)+e;return o<=-1||o>=n?null===r?null:0:o}function S(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function I(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function E(e,t){var n=[];return Promise.resolve(e(t)).then((function(e){return Promise.all(e.filter((function(e){return Boolean(e)})).map((function(e){if(e.sourceId,n.includes(e.sourceId))throw new Error("[Autocomplete] The `sourceId` ".concat(JSON.stringify(e.sourceId)," is not unique."));n.push(e.sourceId);var t=function(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?S(Object(n),!0).forEach((function(t){I(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):S(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}({getItemInputValue:function(e){return e.state.query},getItemUrl:function(){},onSelect:function(e){(0,e.setIsOpen)(!1)},onActive:y},e);return Promise.resolve(t)})))}))}function A(e){var t=function(e){var t=e.collections.map((function(e){return e.items.length})).reduce((function(e,t,n){var r=(e[n-1]||0)+t;return e.push(r),e}),[]).reduce((function(t,n){return n<=e.activeItemId?t+1:t}),0);return e.collections[t]}(e);if(!t)return null;var n=t.items[function(e){for(var t=e.state,n=e.collection,r=!1,o=0,i=0;!1===r;){var u=t.collections[o];if(u===n){r=!0;break}i+=u.items.length,o++}return t.activeItemId-i}({state:e,collection:t})],r=t.source;return{item:n,itemInputValue:r.getItemInputValue({item:n,state:e}),itemUrl:r.getItemUrl({item:n,state:e}),source:r}}var C=/((gt|sm)-|galaxy nexus)|samsung[- ]/i;function D(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function k(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?D(Object(n),!0).forEach((function(t){x(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):D(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function x(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function N(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function q(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function R(e,t,n){var r,o=t.initialState;return{getState:function(){return o},dispatch:function(r,i){var u=function(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?N(Object(n),!0).forEach((function(t){q(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):N(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}({},o);o=e(o,{type:r,props:t,payload:i}),n({state:o,prevState:u})},pendingRequests:(r=[],{add:function(e){return r.push(e),e.finally((function(){r=r.filter((function(t){return t!==e}))}))},cancelAll:function(){r.forEach((function(e){return e.cancel()}))},isEmpty:function(){return 0===r.length}})}}function T(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function L(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?T(Object(n),!0).forEach((function(t){B(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):T(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function B(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function F(e){return function(e){if(Array.isArray(e))return M(e)}(e)||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(e)||function(e,t){if(!e)return;if("string"==typeof e)return M(e,t);var n=Object.prototype.toString.call(e).slice(8,-1);"Object"===n&&e.constructor&&(n=e.constructor.name);if("Map"===n||"Set"===n)return Array.from(e);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return M(e,t)}(e)||function(){throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()}function M(e,t){(null==t||t>e.length)&&(t=e.length);for(var n=0,r=new Array(t);n<t;n++)r[n]=e[n];return r}function U(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function H(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?U(Object(n),!0).forEach((function(t){V(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):U(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function V(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function W(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function Q(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?W(Object(n),!0).forEach((function(t){$(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):W(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function $(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function z(e){return function(e){if(Array.isArray(e))return G(e)}(e)||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(e)||function(e,t){if(!e)return;if("string"==typeof e)return G(e,t);var n=Object.prototype.toString.call(e).slice(8,-1);"Object"===n&&e.constructor&&(n=e.constructor.name);if("Map"===n||"Set"===n)return Array.from(e);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return G(e,t)}(e)||function(){throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()}function G(e,t){(null==t||t>e.length)&&(t=e.length);for(var n=0,r=new Array(t);n<t;n++)r[n]=e[n];return r}function K(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function J(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?K(Object(n),!0).forEach((function(t){Y(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):K(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function Y(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function X(e){return Boolean(e.execute)}function Z(e,t){return n=e,Boolean(null==n?void 0:n.execute)?J(J({},e),{},{requests:e.queries.map((function(n){return{query:n,sourceId:t,transformResponse:e.transformResponse}}))}):{items:e,sourceId:t};var n}function ee(e){var t=e.reduce((function(e,t){if(!X(t))return e.push(t),e;var n=t.searchClient,r=t.execute,o=t.requesterId,i=t.requests,u=e.find((function(e){return X(t)&&X(e)&&e.searchClient===n&&Boolean(o)&&e.requesterId===o}));if(u){var a;(a=u.items).push.apply(a,z(i))}else{var c={execute:r,requesterId:o,items:i,searchClient:n};e.push(c)}return e}),[]).map((function(e){if(!X(e))return Promise.resolve(e);var t=e,n=t.execute,r=t.items;return n({searchClient:t.searchClient,requests:r})}));return Promise.all(t).then((function(e){return d(e)}))}function te(e,t){return t.map((function(t){var n=e.filter((function(e){return e.sourceId===t.sourceId})),r=n.map((function(e){return e.items})),o=n[0].transformResponse,i=o?o(function(e){var t=e.map((function(e){var t;return k(k({},e),{},{hits:null===(t=e.hits)||void 0===t?void 0:t.map((function(t){return k(k({},t),{},{__autocomplete_indexName:e.index,__autocomplete_queryID:e.queryID})}))})}));return{results:t,hits:t.map((function(e){return e.hits})).filter(Boolean),facetHits:t.map((function(e){var t;return null===(t=e.facetHits)||void 0===t?void 0:t.map((function(e){return{label:e.value,count:e.count,_highlightResult:{label:{value:e.highlighted}}}}))})).filter(Boolean)}}(r)):r;return i.every(Boolean),'The `getItems` function from source "'.concat(t.sourceId,'" must return an array of items but returned ').concat(JSON.stringify(void 0),".\n\nDid you forget to return items?\n\nSee: https://www.algolia.com/doc/ui-libraries/autocomplete/core-concepts/sources/#param-getitems"),{source:t,items:i}}))}var ne=["event","nextState","props","query","refresh","store"];function re(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function oe(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?re(Object(n),!0).forEach((function(t){ie(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):re(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function ie(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function ue(e,t){if(null==e)return{};var n,r,o=function(e,t){if(null==e)return{};var n,r,o={},i=Object.keys(e);for(r=0;r<i.length;r++)n=i[r],t.indexOf(n)>=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r<i.length;r++)n=i[r],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var ae,ce,le,se=null,pe=(ae=-1,ce=-1,le=void 0,function(e){var t=++ae;return Promise.resolve(e).then((function(e){return le&&t<ce?le:(ce=t,le=e,e)}))});function fe(e){var t=e.event,n=e.nextState,r=void 0===n?{}:n,o=e.props,i=e.query,u=e.refresh,a=e.store,c=ue(e,ne);se&&o.environment.clearTimeout(se);var l=c.setCollections,s=c.setIsOpen,p=c.setQuery,f=c.setActiveItemId,m=c.setStatus;if(p(i),f(o.defaultActiveItemId),!i&&!1===o.openOnFocus){var v,h=a.getState().collections.map((function(e){return oe(oe({},e),{},{items:[]})}));m("idle"),l(h),s(null!==(v=r.isOpen)&&void 0!==v?v:o.shouldPanelOpen({state:a.getState()}));var g=P(pe(h).then((function(){return Promise.resolve()})));return a.pendingRequests.add(g)}m("loading"),se=o.environment.setTimeout((function(){m("stalled")}),o.stallThreshold);var y=P(pe(o.getSources(oe({query:i,refresh:u,state:a.getState()},c)).then((function(e){return Promise.all(e.map((function(e){return Promise.resolve(e.getItems(oe({query:i,refresh:u,state:a.getState()},c))).then((function(t){return Z(t,e.sourceId)}))}))).then(ee).then((function(t){return te(t,e)})).then((function(e){return function(e){var t=e.collections,n=e.props,r=e.state,o=t.reduce((function(e,t){return Q(Q({},e),{},$({},t.source.sourceId,Q(Q({},t.source),{},{getItems:function(){return d(t.items)}})))}),{});return d(n.reshape({sources:Object.values(o),sourcesBySourceId:o,state:r})).filter(Boolean).map((function(e){return{source:e,items:e.getItems()}}))}({collections:e,props:o,state:a.getState()})}))})))).then((function(e){var n;m("idle"),l(e);var p=o.shouldPanelOpen({state:a.getState()});s(null!==(n=r.isOpen)&&void 0!==n?n:o.openOnFocus&&!i&&p||p);var f=A(a.getState());if(null!==a.getState().activeItemId&&f){var d=f.item,v=f.itemInputValue,h=f.itemUrl,g=f.source;g.onActive(oe({event:t,item:d,itemInputValue:v,itemUrl:h,refresh:u,source:g,state:a.getState()},c))}})).finally((function(){m("idle"),se&&o.environment.clearTimeout(se)}));return a.pendingRequests.add(y)}var de=["event","props","refresh","store"];function me(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function ve(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?me(Object(n),!0).forEach((function(t){he(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):me(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function he(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function ge(e,t){if(null==e)return{};var n,r,o=function(e,t){if(null==e)return{};var n,r,o={},i=Object.keys(e);for(r=0;r<i.length;r++)n=i[r],t.indexOf(n)>=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r<i.length;r++)n=i[r],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}var ye=["props","refresh","store"],be=["inputElement","formElement","panelElement"],Oe=["inputElement"],_e=["inputElement","maxLength"],Pe=["item","source"];function je(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function we(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?je(Object(n),!0).forEach((function(t){Se(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):je(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function Se(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function Ie(e,t){if(null==e)return{};var n,r,o=function(e,t){if(null==e)return{};var n,r,o={},i=Object.keys(e);for(r=0;r<i.length;r++)n=i[r],t.indexOf(n)>=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r<i.length;r++)n=i[r],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}function Ee(e){var t=e.props,n=e.refresh,r=e.store,o=Ie(e,ye);return{getEnvironmentProps:function(e){var n=e.inputElement,o=e.formElement,i=e.panelElement;function u(e){!r.getState().isOpen&&r.pendingRequests.isEmpty()||e.target===n||!1===[o,i].some((function(t){return n=t,r=e.target,n===r||n.contains(r);var n,r}))&&(r.dispatch("blur",null),t.debug||r.pendingRequests.cancelAll())}return we({onTouchStart:u,onMouseDown:u,onTouchMove:function(e){!1!==r.getState().isOpen&&n===t.environment.document.activeElement&&e.target!==n&&n.blur()}},Ie(e,be))},getRootProps:function(e){return we({role:"combobox","aria-expanded":r.getState().isOpen,"aria-haspopup":"listbox","aria-owns":r.getState().isOpen?"".concat(t.id,"-list"):void 0,"aria-labelledby":"".concat(t.id,"-label")},e)},getFormProps:function(e){return e.inputElement,we({action:"",noValidate:!0,role:"search",onSubmit:function(i){var u;i.preventDefault(),t.onSubmit(we({event:i,refresh:n,state:r.getState()},o)),r.dispatch("submit",null),null===(u=e.inputElement)||void 0===u||u.blur()},onReset:function(i){var u;i.preventDefault(),t.onReset(we({event:i,refresh:n,state:r.getState()},o)),r.dispatch("reset",null),null===(u=e.inputElement)||void 0===u||u.focus()}},Ie(e,Oe))},getLabelProps:function(e){return we({htmlFor:"".concat(t.id,"-input"),id:"".concat(t.id,"-label")},e)},getInputProps:function(e){var i;function u(e){(t.openOnFocus||Boolean(r.getState().query))&&fe(we({event:e,props:t,query:r.getState().completion||r.getState().query,refresh:n,store:r},o)),r.dispatch("focus",null)}var a=e||{};a.inputElement;var c=a.maxLength,l=void 0===c?512:c,s=Ie(a,_e),p=A(r.getState()),f=function(e){return Boolean(e&&e.match(C))}((null===(i=t.environment.navigator)||void 0===i?void 0:i.userAgent)||""),d=null!=p&&p.itemUrl&&!f?"go":"search";return we({"aria-autocomplete":"both","aria-activedescendant":r.getState().isOpen&&null!==r.getState().activeItemId?"".concat(t.id,"-item-").concat(r.getState().activeItemId):void 0,"aria-controls":r.getState().isOpen?"".concat(t.id,"-list"):void 0,"aria-labelledby":"".concat(t.id,"-label"),value:r.getState().completion||r.getState().query,id:"".concat(t.id,"-input"),autoComplete:"off",autoCorrect:"off",autoCapitalize:"off",enterKeyHint:d,spellCheck:"false",autoFocus:t.autoFocus,placeholder:t.placeholder,maxLength:l,type:"search",onChange:function(e){fe(we({event:e,props:t,query:e.currentTarget.value.slice(0,l),refresh:n,store:r},o))},onKeyDown:function(e){!function(e){var t=e.event,n=e.props,r=e.refresh,o=e.store,i=ge(e,de);if("ArrowUp"===t.key||"ArrowDown"===t.key){var u=function(){var e=n.environment.document.getElementById("".concat(n.id,"-item-").concat(o.getState().activeItemId));e&&(e.scrollIntoViewIfNeeded?e.scrollIntoViewIfNeeded(!1):e.scrollIntoView(!1))},a=function(){var e=A(o.getState());if(null!==o.getState().activeItemId&&e){var n=e.item,u=e.itemInputValue,a=e.itemUrl,c=e.source;c.onActive(ve({event:t,item:n,itemInputValue:u,itemUrl:a,refresh:r,source:c,state:o.getState()},i))}};t.preventDefault(),!1===o.getState().isOpen&&(n.openOnFocus||Boolean(o.getState().query))?fe(ve({event:t,props:n,query:o.getState().query,refresh:r,store:o},i)).then((function(){o.dispatch(t.key,{nextActiveItemId:n.defaultActiveItemId}),a(),setTimeout(u,0)})):(o.dispatch(t.key,{}),a(),u())}else if("Escape"===t.key)t.preventDefault(),o.dispatch(t.key,null),o.pendingRequests.cancelAll();else if("Tab"===t.key)o.dispatch("blur",null),o.pendingRequests.cancelAll();else if("Enter"===t.key){if(null===o.getState().activeItemId||o.getState().collections.every((function(e){return 0===e.items.length})))return void(n.debug||o.pendingRequests.cancelAll());t.preventDefault();var c=A(o.getState()),l=c.item,s=c.itemInputValue,p=c.itemUrl,f=c.source;if(t.metaKey||t.ctrlKey)void 0!==p&&(f.onSelect(ve({event:t,item:l,itemInputValue:s,itemUrl:p,refresh:r,source:f,state:o.getState()},i)),n.navigator.navigateNewTab({itemUrl:p,item:l,state:o.getState()}));else if(t.shiftKey)void 0!==p&&(f.onSelect(ve({event:t,item:l,itemInputValue:s,itemUrl:p,refresh:r,source:f,state:o.getState()},i)),n.navigator.navigateNewWindow({itemUrl:p,item:l,state:o.getState()}));else if(t.altKey);else{if(void 0!==p)return f.onSelect(ve({event:t,item:l,itemInputValue:s,itemUrl:p,refresh:r,source:f,state:o.getState()},i)),void n.navigator.navigate({itemUrl:p,item:l,state:o.getState()});fe(ve({event:t,nextState:{isOpen:!1},props:n,query:s,refresh:r,store:o},i)).then((function(){f.onSelect(ve({event:t,item:l,itemInputValue:s,itemUrl:p,refresh:r,source:f,state:o.getState()},i))}))}}}(we({event:e,props:t,refresh:n,store:r},o))},onFocus:u,onBlur:y,onClick:function(n){e.inputElement!==t.environment.document.activeElement||r.getState().isOpen||u(n)}},s)},getPanelProps:function(e){return we({onMouseDown:function(e){e.preventDefault()},onMouseLeave:function(){r.dispatch("mouseleave",null)}},e)},getListProps:function(e){return we({role:"listbox","aria-labelledby":"".concat(t.id,"-label"),id:"".concat(t.id,"-list")},e)},getItemProps:function(e){var i=e.item,u=e.source,a=Ie(e,Pe);return we({id:"".concat(t.id,"-item-").concat(i.__autocomplete_id),role:"option","aria-selected":r.getState().activeItemId===i.__autocomplete_id,onMouseMove:function(e){if(i.__autocomplete_id!==r.getState().activeItemId){r.dispatch("mousemove",i.__autocomplete_id);var t=A(r.getState());if(null!==r.getState().activeItemId&&t){var u=t.item,a=t.itemInputValue,c=t.itemUrl,l=t.source;l.onActive(we({event:e,item:u,itemInputValue:a,itemUrl:c,refresh:n,source:l,state:r.getState()},o))}}},onMouseDown:function(e){e.preventDefault()},onClick:function(e){var a=u.getItemInputValue({item:i,state:r.getState()}),c=u.getItemUrl({item:i,state:r.getState()});(c?Promise.resolve():fe(we({event:e,nextState:{isOpen:!1},props:t,query:a,refresh:n,store:r},o))).then((function(){u.onSelect(we({event:e,item:i,itemInputValue:a,itemUrl:c,refresh:n,source:u,state:r.getState()},o))}))}},a)}}}function Ae(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function Ce(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?Ae(Object(n),!0).forEach((function(t){De(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):Ae(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function De(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function ke(e){var t,n,r,o,i=e.plugins,u=e.options,a=null===(t=((null===(n=u.__autocomplete_metadata)||void 0===n?void 0:n.userAgents)||[])[0])||void 0===t?void 0:t.segment,c=a?De({},a,Object.keys((null===(r=u.__autocomplete_metadata)||void 0===r?void 0:r.options)||{})):{};return{plugins:i.map((function(e){return{name:e.name,options:Object.keys(e.__autocomplete_pluginOptions||[])}})),options:Ce({"autocomplete-core":Object.keys(u)},c),ua:O.concat((null===(o=u.__autocomplete_metadata)||void 0===o?void 0:o.userAgents)||[])}}function xe(e){var t,n=e.state;return!1===n.isOpen||null===n.activeItemId?null:(null===(t=A(n))||void 0===t?void 0:t.itemInputValue)||null}function Ne(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function qe(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?Ne(Object(n),!0).forEach((function(t){Re(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):Ne(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function Re(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}var Te=function(e,t){switch(t.type){case"setActiveItemId":case"mousemove":return qe(qe({},e),{},{activeItemId:t.payload});case"setQuery":return qe(qe({},e),{},{query:t.payload,completion:null});case"setCollections":return qe(qe({},e),{},{collections:t.payload});case"setIsOpen":return qe(qe({},e),{},{isOpen:t.payload});case"setStatus":return qe(qe({},e),{},{status:t.payload});case"setContext":return qe(qe({},e),{},{context:qe(qe({},e.context),t.payload)});case"ArrowDown":var n=qe(qe({},e),{},{activeItemId:t.payload.hasOwnProperty("nextActiveItemId")?t.payload.nextActiveItemId:w(1,e.activeItemId,g(e),t.props.defaultActiveItemId)});return qe(qe({},n),{},{completion:xe({state:n})});case"ArrowUp":var r=qe(qe({},e),{},{activeItemId:w(-1,e.activeItemId,g(e),t.props.defaultActiveItemId)});return qe(qe({},r),{},{completion:xe({state:r})});case"Escape":return e.isOpen?qe(qe({},e),{},{activeItemId:null,isOpen:!1,completion:null}):qe(qe({},e),{},{activeItemId:null,query:"",status:"idle",collections:[]});case"submit":return qe(qe({},e),{},{activeItemId:null,isOpen:!1,status:"idle"});case"reset":return qe(qe({},e),{},{activeItemId:!0===t.props.openOnFocus?t.props.defaultActiveItemId:null,status:"idle",query:""});case"focus":return qe(qe({},e),{},{activeItemId:t.props.defaultActiveItemId,isOpen:(t.props.openOnFocus||Boolean(e.query))&&t.props.shouldPanelOpen({state:e})});case"blur":return t.props.debug?e:qe(qe({},e),{},{isOpen:!1,activeItemId:null});case"mouseleave":return qe(qe({},e),{},{activeItemId:t.props.defaultActiveItemId});default:return"The reducer action ".concat(JSON.stringify(t.type)," is not supported."),e}};function Le(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function Be(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?Le(Object(n),!0).forEach((function(t){Fe(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):Le(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function Fe(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function Me(e){var t=[],n=function(e,t){var n,r="undefined"!=typeof window?window:{},o=e.plugins||[];return H(H({debug:!1,openOnFocus:!1,placeholder:"",autoFocus:!1,defaultActiveItemId:null,stallThreshold:300,environment:r,shouldPanelOpen:function(e){return g(e.state)>0},reshape:function(e){return e.sources}},e),{},{id:null!==(n=e.id)&&void 0!==n?n:v(),plugins:o,initialState:H({activeItemId:null,query:"",completion:null,collections:[],isOpen:!1,status:"idle",context:{}},e.initialState),onStateChange:function(t){var n;null===(n=e.onStateChange)||void 0===n||n.call(e,t),o.forEach((function(e){var n;return null===(n=e.onStateChange)||void 0===n?void 0:n.call(e,t)}))},onSubmit:function(t){var n;null===(n=e.onSubmit)||void 0===n||n.call(e,t),o.forEach((function(e){var n;return null===(n=e.onSubmit)||void 0===n?void 0:n.call(e,t)}))},onReset:function(t){var n;null===(n=e.onReset)||void 0===n||n.call(e,t),o.forEach((function(e){var n;return null===(n=e.onReset)||void 0===n?void 0:n.call(e,t)}))},getSources:function(n){return Promise.all([].concat(F(o.map((function(e){return e.getSources}))),[e.getSources]).filter(Boolean).map((function(e){return E(e,n)}))).then((function(e){return d(e)})).then((function(e){return e.map((function(e){return H(H({},e),{},{onSelect:function(n){e.onSelect(n),t.forEach((function(e){var t;return null===(t=e.onSelect)||void 0===t?void 0:t.call(e,n)}))},onActive:function(n){e.onActive(n),t.forEach((function(e){var t;return null===(t=e.onActive)||void 0===t?void 0:t.call(e,n)}))}})}))}))},navigator:H({navigate:function(e){var t=e.itemUrl;r.location.assign(t)},navigateNewTab:function(e){var t=e.itemUrl,n=r.open(t,"_blank","noopener");null==n||n.focus()},navigateNewWindow:function(e){var t=e.itemUrl;r.open(t,"_blank","noopener")}},e.navigator)})}(e,t),r=R(Te,n,(function(e){var t=e.prevState,r=e.state;n.onStateChange(Be({prevState:t,state:r,refresh:u},o))})),o=function(e){var t=e.store;return{setActiveItemId:function(e){t.dispatch("setActiveItemId",e)},setQuery:function(e){t.dispatch("setQuery",e)},setCollections:function(e){var n=0,r=e.map((function(e){return L(L({},e),{},{items:d(e.items).map((function(e){return L(L({},e),{},{__autocomplete_id:n++})}))})}));t.dispatch("setCollections",r)},setIsOpen:function(e){t.dispatch("setIsOpen",e)},setStatus:function(e){t.dispatch("setStatus",e)},setContext:function(e){t.dispatch("setContext",e)}}}({store:r}),i=Ee(Be({props:n,refresh:u,store:r},o));function u(){return fe(Be({event:new Event("input"),nextState:{isOpen:r.getState().isOpen},props:n,query:r.getState().query,refresh:u,store:r},o))}return n.plugins.forEach((function(e){var n;return null===(n=e.subscribe)||void 0===n?void 0:n.call(e,Be(Be({},o),{},{refresh:u,onSelect:function(e){t.push({onSelect:e})},onActive:function(e){t.push({onActive:e})}}))})),function(e){var t,n,r=e.metadata,o=e.environment;if(null===(t=o.navigator)||void 0===t||null===(n=t.userAgent)||void 0===n?void 0:n.includes("Algolia Crawler")){var i=o.document.createElement("meta"),u=o.document.querySelector("head");i.name="algolia:metadata",setTimeout((function(){i.content=JSON.stringify(r),u.appendChild(i)}),0)}}({metadata:ke({plugins:n.plugins,options:e}),environment:n.environment}),Be(Be({refresh:u},i),o)}var Ue=function(e,t,n,r){var o;t[0]=0;for(var i=1;i<t.length;i++){var u=t[i++],a=t[i]?(t[0]|=u?1:2,n[t[i++]]):t[++i];3===u?r[0]=a:4===u?r[1]=Object.assign(r[1]||{},a):5===u?(r[1]=r[1]||{})[t[++i]]=a:6===u?r[1][t[++i]]+=a+"":u?(o=e.apply(a,Ue(e,a,n,["",null])),r.push(o),a[0]?t[0]|=2:(t[i-2]=0,t[i]=o)):r.push(a)}return r},He=new Map;function Ve(e){var t=He.get(this);return t||(t=new Map,He.set(this,t)),(t=Ue(this,t.get(e)||(t.set(e,t=function(e){for(var t,n,r=1,o="",i="",u=[0],a=function(e){1===r&&(e||(o=o.replace(/^\s*\n\s*|\s*\n\s*$/g,"")))?u.push(0,e,o):3===r&&(e||o)?(u.push(3,e,o),r=2):2===r&&"..."===o&&e?u.push(4,e,0):2===r&&o&&!e?u.push(5,0,!0,o):r>=5&&((o||!e&&5===r)&&(u.push(r,0,o,n),r=6),e&&(u.push(r,e,0,n),r=6)),o=""},c=0;c<e.length;c++){c&&(1===r&&a(),a(c));for(var l=0;l<e[c].length;l++)t=e[c][l],1===r?"<"===t?(a(),u=[u],r=3):o+=t:4===r?"--"===o&&">"===t?(r=1,o=""):o=t+o[0]:i?t===i?i="":o+=t:'"'===t||"'"===t?i=t:">"===t?(a(),r=1):r&&("="===t?(r=5,n=o,o=""):"/"===t&&(r<5||">"===e[c][l+1])?(a(),3===r&&(u=u[0]),r=u,(u=u[0]).push(2,0,r),r=0):" "===t||"\t"===t||"\n"===t||"\r"===t?(a(),r=2):o+=t),3===r&&"!--"===o&&(r=4,u=u[0])}return a(),u}(e)),t),arguments,[])).length>1?t:t[0]}var We=function(e){var t=e.environment,n=t.document.createElementNS("http://www.w3.org/2000/svg","svg");n.setAttribute("class","aa-ClearIcon"),n.setAttribute("viewBox","0 0 24 24"),n.setAttribute("width","18"),n.setAttribute("height","18"),n.setAttribute("fill","currentColor");var r=t.document.createElementNS("http://www.w3.org/2000/svg","path");return r.setAttribute("d","M5.293 6.707l5.293 5.293-5.293 5.293c-0.391 0.391-0.391 1.024 0 1.414s1.024 0.391 1.414 0l5.293-5.293 5.293 5.293c0.391 0.391 1.024 0.391 1.414 0s0.391-1.024 0-1.414l-5.293-5.293 5.293-5.293c0.391-0.391 0.391-1.024 0-1.414s-1.024-0.391-1.414 0l-5.293 5.293-5.293-5.293c-0.391-0.391-1.024-0.391-1.414 0s-0.391 1.024 0 1.414z"),n.appendChild(r),n};function Qe(e,t){if("string"==typeof t){var n=e.document.querySelector(t);return"The element ".concat(JSON.stringify(t)," is not in the document."),n}return t}function $e(){for(var e=arguments.length,t=new Array(e),n=0;n<e;n++)t[n]=arguments[n];return t.reduce((function(e,t){return Object.keys(t).forEach((function(n){var r=e[n],o=t[n];r!==o&&(e[n]=[r,o].filter(Boolean).join(" "))})),e}),{})}var ze=function(e){return e&&"object"===r(e)&&"[object Object]"===Object.prototype.toString.call(e)};function Ge(){for(var e=arguments.length,t=new Array(e),n=0;n<e;n++)t[n]=arguments[n];return t.reduce((function(e,t){return Object.keys(t).forEach((function(n){var r=e[n],o=t[n];Array.isArray(r)&&Array.isArray(o)?e[n]=r.concat.apply(r,c(o)):ze(r)&&ze(o)?e[n]=Ge(r,o):e[n]=o})),e}),{})}function Ke(e,t){return Object.entries(e).reduce((function(e,r){var i=a(r,2),u=i[0],c=i[1];return t({key:u,value:c})?n(n({},e),{},o({},u,c)):e}),{})}var Je=["ontouchstart","ontouchend","ontouchmove","ontouchcancel"];function Ye(e,t,n){e[t]=null===n?"":"number"!=typeof n?n:n+"px"}function Xe(e){this._listeners[e.type](e)}function Ze(e,t,n){var r,o,i=e[t];if("style"===t)if("string"==typeof n)e.style=n;else if(null===n)e.style="";else for(t in n)i&&n[t]===i[t]||Ye(e.style,t,n[t]);else"o"===t[0]&&"n"===t[1]?(r=t!==(t=t.replace(/Capture$/,"")),((o=t.toLowerCase())in e||Je.includes(o))&&(t=o),t=t.slice(2),e._listeners||(e._listeners={}),e._listeners[t]=n,n?i||e.addEventListener(t,Xe,r):e.removeEventListener(t,Xe,r)):"list"!==t&&"tagName"!==t&&"form"!==t&&"type"!==t&&"size"!==t&&"download"!==t&&"href"!==t&&t in e?e[t]=null==n?"":n:"function"!=typeof n&&"dangerouslySetInnerHTML"!==t&&(null==n||!1===n&&!/^ar/.test(t)?e.removeAttribute(t):e.setAttribute(t,n))}function et(e){return"onChange"===e?"onInput":e}function tt(e,t){for(var n in t)Ze(e,et(n),t[n])}function nt(e,t){for(var n in t)"o"===n[0]&&"n"===n[1]||Ze(e,et(n),t[n])}var rt=["children"];function ot(e){return function(t,n){var r=n.children,o=void 0===r?[]:r,i=u(n,rt),a=e.document.createElement(t);return tt(a,i),a.append.apply(a,c(o)),a}}var it=["autocompleteScopeApi","environment","classNames","getInputProps","getInputPropsCore","isDetached","state"],ut=function(e){var t=e.environment.document.createElementNS("http://www.w3.org/2000/svg","svg");return t.setAttribute("class","aa-LoadingIcon"),t.setAttribute("viewBox","0 0 100 100"),t.setAttribute("width","20"),t.setAttribute("height","20"),t.innerHTML='<circle\n cx="50"\n cy="50"\n fill="none"\n r="35"\n stroke="currentColor"\n stroke-dasharray="164.93361431346415 56.97787143782138"\n stroke-width="6"\n>\n <animateTransform\n attributeName="transform"\n type="rotate"\n repeatCount="indefinite"\n dur="1s"\n values="0 50 50;90 50 50;180 50 50;360 50 50"\n keyTimes="0;0.40;0.65;1"\n />\n</circle>',t},at=function(e){var t=e.environment,n=t.document.createElementNS("http://www.w3.org/2000/svg","svg");n.setAttribute("class","aa-SubmitIcon"),n.setAttribute("viewBox","0 0 24 24"),n.setAttribute("width","20"),n.setAttribute("height","20"),n.setAttribute("fill","currentColor");var r=t.document.createElementNS("http://www.w3.org/2000/svg","path");return r.setAttribute("d","M16.041 15.856c-0.034 0.026-0.067 0.055-0.099 0.087s-0.060 0.064-0.087 0.099c-1.258 1.213-2.969 1.958-4.855 1.958-1.933 0-3.682-0.782-4.95-2.050s-2.050-3.017-2.050-4.95 0.782-3.682 2.050-4.95 3.017-2.050 4.95-2.050 3.682 0.782 4.95 2.050 2.050 3.017 2.050 4.95c0 1.886-0.745 3.597-1.959 4.856zM21.707 20.293l-3.675-3.675c1.231-1.54 1.968-3.493 1.968-5.618 0-2.485-1.008-4.736-2.636-6.364s-3.879-2.636-6.364-2.636-4.736 1.008-6.364 2.636-2.636 3.879-2.636 6.364 1.008 4.736 2.636 6.364 3.879 2.636 6.364 2.636c2.125 0 4.078-0.737 5.618-1.968l3.675 3.675c0.391 0.391 1.024 0.391 1.414 0s0.391-1.024 0-1.414z"),n.appendChild(r),n};function ct(e){var t=e.autocomplete,r=e.autocompleteScopeApi,o=e.classNames,i=e.environment,a=e.isDetached,c=e.placeholder,l=void 0===c?"Search":c,s=e.propGetters,p=e.setIsModalOpen,f=e.state,d=e.translations,m=ot(i),v=s.getRootProps(n({state:f,props:t.getRootProps({})},r)),h=m("div",n({class:o.root},v)),g=m("div",{class:o.detachedContainer,onMouseDown:function(e){e.stopPropagation()}}),y=m("div",{class:o.detachedOverlay,children:[g],onMouseDown:function(){p(!1),t.setIsOpen(!1)}}),b=s.getLabelProps(n({state:f,props:t.getLabelProps({})},r)),O=m("button",{class:o.submitButton,type:"submit",title:d.submitButtonTitle,children:[at({environment:i})]}),_=m("label",n({class:o.label,children:[O]},b)),P=m("button",{class:o.clearButton,type:"reset",title:d.clearButtonTitle,children:[We({environment:i})]}),j=m("div",{class:o.loadingIndicator,children:[ut({environment:i})]}),w=function(e){var t=e.autocompleteScopeApi,r=e.environment;e.classNames;var o=e.getInputProps,i=e.getInputPropsCore,a=e.isDetached,c=e.state,l=u(e,it),s=ot(r)("input",l),p=o(n({state:c,props:i({inputElement:s}),inputElement:s},t));return tt(s,n(n({},p),{},{onKeyDown:function(e){a&&"Tab"===e.key||p.onKeyDown(e)}})),s}({class:o.input,environment:i,state:f,getInputProps:s.getInputProps,getInputPropsCore:t.getInputProps,autocompleteScopeApi:r,isDetached:a}),S=m("div",{class:o.inputWrapperPrefix,children:[_,j]}),I=m("div",{class:o.inputWrapperSuffix,children:[P]}),E=m("div",{class:o.inputWrapper,children:[w]}),A=s.getFormProps(n({state:f,props:t.getFormProps({inputElement:w})},r)),C=m("form",n({class:o.form,children:[S,E,I]},A)),D=s.getPanelProps(n({state:f,props:t.getPanelProps({})},r)),k=m("div",n({class:o.panel},D));if(a){var x=m("div",{class:o.detachedSearchButtonIcon,children:[at({environment:i})]}),N=m("div",{class:o.detachedSearchButtonPlaceholder,textContent:l}),q=m("button",{type:"button",class:o.detachedSearchButton,onClick:function(){p(!0)},children:[x,N]}),R=m("button",{type:"button",class:o.detachedCancelButton,textContent:d.detachedCancelButtonText,onTouchStart:function(e){e.stopPropagation()},onClick:function(){t.setIsOpen(!1),p(!1)}}),T=m("div",{class:o.detachedFormContainer,children:[C,R]});g.appendChild(T),h.appendChild(q)}else h.appendChild(C);return{detachedContainer:g,detachedOverlay:y,inputWrapper:E,input:w,root:h,form:C,label:_,submitButton:O,clearButton:P,loadingIndicator:j,panel:k}}var lt,st,pt,ft,dt,mt,vt={},ht=[],gt=/acit|ex(?:s|g|n|p|$)|rph|grid|ows|mnc|ntw|ine[ch]|zoo|^ord|itera/i;function yt(e,t){for(var n in t)e[n]=t[n];return e}function bt(e){var t=e.parentNode;t&&t.removeChild(e)}function Ot(e,t,n){var r,o,i,u={};for(i in t)"key"==i?r=t[i]:"ref"==i?o=t[i]:u[i]=t[i];if(arguments.length>2&&(u.children=arguments.length>3?lt.call(arguments,2):n),"function"==typeof e&&null!=e.defaultProps)for(i in e.defaultProps)void 0===u[i]&&(u[i]=e.defaultProps[i]);return _t(e,u,r,o,null)}function _t(e,t,n,r,o){var i={type:e,props:t,key:n,ref:r,__k:null,__:null,__b:0,__e:null,__d:void 0,__c:null,__h:null,constructor:void 0,__v:null==o?++pt:o};return null==o&&null!=st.vnode&&st.vnode(i),i}function Pt(e){return e.children}function jt(e,t){this.props=e,this.context=t}function wt(e,t){if(null==t)return e.__?wt(e.__,e.__.__k.indexOf(e)+1):null;for(var n;t<e.__k.length;t++)if(null!=(n=e.__k[t])&&null!=n.__e)return n.__e;return"function"==typeof e.type?wt(e):null}function St(e){var t,n;if(null!=(e=e.__)&&null!=e.__c){for(e.__e=e.__c.base=null,t=0;t<e.__k.length;t++)if(null!=(n=e.__k[t])&&null!=n.__e){e.__e=e.__c.base=n.__e;break}return St(e)}}function It(e){(!e.__d&&(e.__d=!0)&&ft.push(e)&&!Et.__r++||mt!==st.debounceRendering)&&((mt=st.debounceRendering)||dt)(Et)}function Et(){for(var e;Et.__r=ft.length;)e=ft.sort((function(e,t){return e.__v.__b-t.__v.__b})),ft=[],e.some((function(e){var t,n,r,o,i,u;e.__d&&(i=(o=(t=e).__v).__e,(u=t.__P)&&(n=[],(r=yt({},o)).__v=o.__v+1,Rt(u,o,r,t.__n,void 0!==u.ownerSVGElement,null!=o.__h?[i]:null,n,null==i?wt(o):i,o.__h),Tt(n,o),o.__e!=i&&St(o)))}))}function At(e,t,n,r,o,i,u,a,c,l){var s,p,f,d,m,v,h,g=r&&r.__k||ht,y=g.length;for(n.__k=[],s=0;s<t.length;s++)if(null!=(d=n.__k[s]=null==(d=t[s])||"boolean"==typeof d?null:"string"==typeof d||"number"==typeof d||"bigint"==typeof d?_t(null,d,null,null,d):Array.isArray(d)?_t(Pt,{children:d},null,null,null):d.__b>0?_t(d.type,d.props,d.key,null,d.__v):d)){if(d.__=n,d.__b=n.__b+1,null===(f=g[s])||f&&d.key==f.key&&d.type===f.type)g[s]=void 0;else for(p=0;p<y;p++){if((f=g[p])&&d.key==f.key&&d.type===f.type){g[p]=void 0;break}f=null}Rt(e,d,f=f||vt,o,i,u,a,c,l),m=d.__e,(p=d.ref)&&f.ref!=p&&(h||(h=[]),f.ref&&h.push(f.ref,null,d),h.push(p,d.__c||m,d)),null!=m?(null==v&&(v=m),"function"==typeof d.type&&d.__k===f.__k?d.__d=c=Ct(d,c,e):c=Dt(e,d,f,g,m,c),"function"==typeof n.type&&(n.__d=c)):c&&f.__e==c&&c.parentNode!=e&&(c=wt(f))}for(n.__e=v,s=y;s--;)null!=g[s]&&("function"==typeof n.type&&null!=g[s].__e&&g[s].__e==n.__d&&(n.__d=wt(r,s+1)),Ft(g[s],g[s]));if(h)for(s=0;s<h.length;s++)Bt(h[s],h[++s],h[++s])}function Ct(e,t,n){for(var r,o=e.__k,i=0;o&&i<o.length;i++)(r=o[i])&&(r.__=e,t="function"==typeof r.type?Ct(r,t,n):Dt(n,r,r,o,r.__e,t));return t}function Dt(e,t,n,r,o,i){var u,a,c;if(void 0!==t.__d)u=t.__d,t.__d=void 0;else if(null==n||o!=i||null==o.parentNode)e:if(null==i||i.parentNode!==e)e.appendChild(o),u=null;else{for(a=i,c=0;(a=a.nextSibling)&&c<r.length;c+=2)if(a==o)break e;e.insertBefore(o,i),u=i}return void 0!==u?u:o.nextSibling}function kt(e,t,n){"-"===t[0]?e.setProperty(t,n):e[t]=null==n?"":"number"!=typeof n||gt.test(t)?n:n+"px"}function xt(e,t,n,r,o){var i;e:if("style"===t)if("string"==typeof n)e.style.cssText=n;else{if("string"==typeof r&&(e.style.cssText=r=""),r)for(t in r)n&&t in n||kt(e.style,t,"");if(n)for(t in n)r&&n[t]===r[t]||kt(e.style,t,n[t])}else if("o"===t[0]&&"n"===t[1])i=t!==(t=t.replace(/Capture$/,"")),t=t.toLowerCase()in e?t.toLowerCase().slice(2):t.slice(2),e.l||(e.l={}),e.l[t+i]=n,n?r||e.addEventListener(t,i?qt:Nt,i):e.removeEventListener(t,i?qt:Nt,i);else if("dangerouslySetInnerHTML"!==t){if(o)t=t.replace(/xlink[H:h]/,"h").replace(/sName$/,"s");else if("href"!==t&&"list"!==t&&"form"!==t&&"tabIndex"!==t&&"download"!==t&&t in e)try{e[t]=null==n?"":n;break e}catch(e){}"function"==typeof n||(null!=n&&(!1!==n||"a"===t[0]&&"r"===t[1])?e.setAttribute(t,n):e.removeAttribute(t))}}function Nt(e){this.l[e.type+!1](st.event?st.event(e):e)}function qt(e){this.l[e.type+!0](st.event?st.event(e):e)}function Rt(e,t,n,r,o,i,u,a,c){var l,s,p,f,d,m,v,h,g,y,b,O=t.type;if(void 0!==t.constructor)return null;null!=n.__h&&(c=n.__h,a=t.__e=n.__e,t.__h=null,i=[a]),(l=st.__b)&&l(t);try{e:if("function"==typeof O){if(h=t.props,g=(l=O.contextType)&&r[l.__c],y=l?g?g.props.value:l.__:r,n.__c?v=(s=t.__c=n.__c).__=s.__E:("prototype"in O&&O.prototype.render?t.__c=s=new O(h,y):(t.__c=s=new jt(h,y),s.constructor=O,s.render=Mt),g&&g.sub(s),s.props=h,s.state||(s.state={}),s.context=y,s.__n=r,p=s.__d=!0,s.__h=[]),null==s.__s&&(s.__s=s.state),null!=O.getDerivedStateFromProps&&(s.__s==s.state&&(s.__s=yt({},s.__s)),yt(s.__s,O.getDerivedStateFromProps(h,s.__s))),f=s.props,d=s.state,p)null==O.getDerivedStateFromProps&&null!=s.componentWillMount&&s.componentWillMount(),null!=s.componentDidMount&&s.__h.push(s.componentDidMount);else{if(null==O.getDerivedStateFromProps&&h!==f&&null!=s.componentWillReceiveProps&&s.componentWillReceiveProps(h,y),!s.__e&&null!=s.shouldComponentUpdate&&!1===s.shouldComponentUpdate(h,s.__s,y)||t.__v===n.__v){s.props=h,s.state=s.__s,t.__v!==n.__v&&(s.__d=!1),s.__v=t,t.__e=n.__e,t.__k=n.__k,t.__k.forEach((function(e){e&&(e.__=t)})),s.__h.length&&u.push(s);break e}null!=s.componentWillUpdate&&s.componentWillUpdate(h,s.__s,y),null!=s.componentDidUpdate&&s.__h.push((function(){s.componentDidUpdate(f,d,m)}))}s.context=y,s.props=h,s.state=s.__s,(l=st.__r)&&l(t),s.__d=!1,s.__v=t,s.__P=e,l=s.render(s.props,s.state,s.context),s.state=s.__s,null!=s.getChildContext&&(r=yt(yt({},r),s.getChildContext())),p||null==s.getSnapshotBeforeUpdate||(m=s.getSnapshotBeforeUpdate(f,d)),b=null!=l&&l.type===Pt&&null==l.key?l.props.children:l,At(e,Array.isArray(b)?b:[b],t,n,r,o,i,u,a,c),s.base=t.__e,t.__h=null,s.__h.length&&u.push(s),v&&(s.__E=s.__=null),s.__e=!1}else null==i&&t.__v===n.__v?(t.__k=n.__k,t.__e=n.__e):t.__e=Lt(n.__e,t,n,r,o,i,u,c);(l=st.diffed)&&l(t)}catch(e){t.__v=null,(c||null!=i)&&(t.__e=a,t.__h=!!c,i[i.indexOf(a)]=null),st.__e(e,t,n)}}function Tt(e,t){st.__c&&st.__c(t,e),e.some((function(t){try{e=t.__h,t.__h=[],e.some((function(e){e.call(t)}))}catch(e){st.__e(e,t.__v)}}))}function Lt(e,t,n,r,o,i,u,a){var c,l,s,p=n.props,f=t.props,d=t.type,m=0;if("svg"===d&&(o=!0),null!=i)for(;m<i.length;m++)if((c=i[m])&&"setAttribute"in c==!!d&&(d?c.localName===d:3===c.nodeType)){e=c,i[m]=null;break}if(null==e){if(null===d)return document.createTextNode(f);e=o?document.createElementNS("http://www.w3.org/2000/svg",d):document.createElement(d,f.is&&f),i=null,a=!1}if(null===d)p===f||a&&e.data===f||(e.data=f);else{if(i=i&&lt.call(e.childNodes),l=(p=n.props||vt).dangerouslySetInnerHTML,s=f.dangerouslySetInnerHTML,!a){if(null!=i)for(p={},m=0;m<e.attributes.length;m++)p[e.attributes[m].name]=e.attributes[m].value;(s||l)&&(s&&(l&&s.__html==l.__html||s.__html===e.innerHTML)||(e.innerHTML=s&&s.__html||""))}if(function(e,t,n,r,o){var i;for(i in n)"children"===i||"key"===i||i in t||xt(e,i,null,n[i],r);for(i in t)o&&"function"!=typeof t[i]||"children"===i||"key"===i||"value"===i||"checked"===i||n[i]===t[i]||xt(e,i,t[i],n[i],r)}(e,f,p,o,a),s)t.__k=[];else if(m=t.props.children,At(e,Array.isArray(m)?m:[m],t,n,r,o&&"foreignObject"!==d,i,u,i?i[0]:n.__k&&wt(n,0),a),null!=i)for(m=i.length;m--;)null!=i[m]&&bt(i[m]);a||("value"in f&&void 0!==(m=f.value)&&(m!==p.value||m!==e.value||"progress"===d&&!m)&&xt(e,"value",m,p.value,!1),"checked"in f&&void 0!==(m=f.checked)&&m!==e.checked&&xt(e,"checked",m,p.checked,!1))}return e}function Bt(e,t,n){try{"function"==typeof e?e(t):e.current=t}catch(e){st.__e(e,n)}}function Ft(e,t,n){var r,o;if(st.unmount&&st.unmount(e),(r=e.ref)&&(r.current&&r.current!==e.__e||Bt(r,null,t)),null!=(r=e.__c)){if(r.componentWillUnmount)try{r.componentWillUnmount()}catch(e){st.__e(e,t)}r.base=r.__P=null}if(r=e.__k)for(o=0;o<r.length;o++)r[o]&&Ft(r[o],t,"function"!=typeof e.type);n||null==e.__e||bt(e.__e),e.__e=e.__d=void 0}function Mt(e,t,n){return this.constructor(e,n)}lt=ht.slice,st={__e:function(e,t){for(var n,r,o;t=t.__;)if((n=t.__c)&&!n.__)try{if((r=n.constructor)&&null!=r.getDerivedStateFromError&&(n.setState(r.getDerivedStateFromError(e)),o=n.__d),null!=n.componentDidCatch&&(n.componentDidCatch(e),o=n.__d),o)return n.__E=n}catch(t){e=t}throw e}},pt=0,jt.prototype.setState=function(e,t){var n;n=null!=this.__s&&this.__s!==this.state?this.__s:this.__s=yt({},this.state),"function"==typeof e&&(e=e(yt({},n),this.props)),e&&yt(n,e),null!=e&&this.__v&&(t&&this.__h.push(t),It(this))},jt.prototype.forceUpdate=function(e){this.__v&&(this.__e=!0,e&&this.__h.push(e),It(this))},jt.prototype.render=Pt,ft=[],dt="function"==typeof Promise?Promise.prototype.then.bind(Promise.resolve()):setTimeout,Et.__r=0;var Ut="__aa-highlight__",Ht="__/aa-highlight__";function Vt(e){var t=e.highlightedValue.split(Ut),n=t.shift(),r=function(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:[];return{get:function(){return e},add:function(t){var n=e[e.length-1];(null==n?void 0:n.isHighlighted)===t.isHighlighted?e[e.length-1]={value:n.value+t.value,isHighlighted:n.isHighlighted}:e.push(t)}}}(n?[{value:n,isHighlighted:!1}]:[]);return t.forEach((function(e){var t=e.split(Ht);r.add({value:t[0],isHighlighted:!0}),""!==t[1]&&r.add({value:t[1],isHighlighted:!1})})),r.get()}function Wt(e){return function(e){if(Array.isArray(e))return Qt(e)}(e)||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(e)||function(e,t){if(!e)return;if("string"==typeof e)return Qt(e,t);var n=Object.prototype.toString.call(e).slice(8,-1);"Object"===n&&e.constructor&&(n=e.constructor.name);if("Map"===n||"Set"===n)return Array.from(e);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return Qt(e,t)}(e)||function(){throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()}function Qt(e,t){(null==t||t>e.length)&&(t=e.length);for(var n=0,r=new Array(t);n<t;n++)r[n]=e[n];return r}function $t(e){var t=e.hit,n=e.attribute,r=Array.isArray(n)?n:[n],o=h(t,["_highlightResult"].concat(Wt(r),["value"]));return"string"!=typeof o&&(o=h(t,r)||""),Vt({highlightedValue:o})}var zt={"&amp;":"&","&lt;":"<","&gt;":">","&quot;":'"',"&#39;":"'"},Gt=new RegExp(/\w/i),Kt=/&(amp|quot|lt|gt|#39);/g,Jt=RegExp(Kt.source);function Yt(e,t){var n,r,o,i=e[t],u=(null===(n=e[t+1])||void 0===n?void 0:n.isHighlighted)||!0,a=(null===(r=e[t-1])||void 0===r?void 0:r.isHighlighted)||!0;return Gt.test((o=i.value)&&Jt.test(o)?o.replace(Kt,(function(e){return zt[e]})):o)||a!==u?i.isHighlighted:a}function Xt(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function Zt(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?Xt(Object(n),!0).forEach((function(t){en(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):Xt(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function en(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function tn(e){return e.some((function(e){return e.isHighlighted}))?e.map((function(t,n){return Zt(Zt({},t),{},{isHighlighted:!Yt(e,n)})})):e.map((function(e){return Zt(Zt({},e),{},{isHighlighted:!1})}))}function nn(e){return function(e){if(Array.isArray(e))return rn(e)}(e)||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(e)||function(e,t){if(!e)return;if("string"==typeof e)return rn(e,t);var n=Object.prototype.toString.call(e).slice(8,-1);"Object"===n&&e.constructor&&(n=e.constructor.name);if("Map"===n||"Set"===n)return Array.from(e);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return rn(e,t)}(e)||function(){throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()}function rn(e,t){(null==t||t>e.length)&&(t=e.length);for(var n=0,r=new Array(t);n<t;n++)r[n]=e[n];return r}function on(e){var t=e.hit,n=e.attribute,r=Array.isArray(n)?n:[n],o=h(t,["_snippetResult"].concat(nn(r),["value"]));return"string"!=typeof o&&(o=h(t,r)||""),Vt({highlightedValue:o})}function un(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function an(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?un(Object(n),!0).forEach((function(t){cn(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):un(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function cn(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}var ln=["params"];function sn(e,t){var n=Object.keys(e);if(Object.getOwnPropertySymbols){var r=Object.getOwnPropertySymbols(e);t&&(r=r.filter((function(t){return Object.getOwnPropertyDescriptor(e,t).enumerable}))),n.push.apply(n,r)}return n}function pn(e){for(var t=1;t<arguments.length;t++){var n=null!=arguments[t]?arguments[t]:{};t%2?sn(Object(n),!0).forEach((function(t){fn(e,t,n[t])})):Object.getOwnPropertyDescriptors?Object.defineProperties(e,Object.getOwnPropertyDescriptors(n)):sn(Object(n)).forEach((function(t){Object.defineProperty(e,t,Object.getOwnPropertyDescriptor(n,t))}))}return e}function fn(e,t,n){return t in e?Object.defineProperty(e,t,{value:n,enumerable:!0,configurable:!0,writable:!0}):e[t]=n,e}function dn(e,t){if(null==e)return{};var n,r,o=function(e,t){if(null==e)return{};var n,r,o={},i=Object.keys(e);for(r=0;r<i.length;r++)n=i[r],t.indexOf(n)>=0||(o[n]=e[n]);return o}(e,t);if(Object.getOwnPropertySymbols){var i=Object.getOwnPropertySymbols(e);for(r=0;r<i.length;r++)n=i[r],t.indexOf(n)>=0||Object.prototype.propertyIsEnumerable.call(e,n)&&(o[n]=e[n])}return o}function mn(e){return function(e){if(Array.isArray(e))return vn(e)}(e)||function(e){if("undefined"!=typeof Symbol&&null!=e[Symbol.iterator]||null!=e["@@iterator"])return Array.from(e)}(e)||function(e,t){if(!e)return;if("string"==typeof e)return vn(e,t);var n=Object.prototype.toString.call(e).slice(8,-1);"Object"===n&&e.constructor&&(n=e.constructor.name);if("Map"===n||"Set"===n)return Array.from(e);if("Arguments"===n||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return vn(e,t)}(e)||function(){throw new TypeError("Invalid attempt to spread non-iterable instance.\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.")}()}function vn(e,t){(null==t||t>e.length)&&(t=e.length);for(var n=0,r=new Array(t);n<t;n++)r[n]=e[n];return r}function hn(e){var t=e.createElement,n=e.Fragment;function r(e){var r=e.hit,o=e.attribute,i=e.tagName,u=void 0===i?"mark":i;return t(n,{},$t({hit:r,attribute:o}).map((function(e,n){return e.isHighlighted?t(u,{key:n},e.value):e.value})))}return r.__autocomplete_componentName="Highlight",r}function gn(e){var t=e.createElement,n=e.Fragment;function r(e){var r,o=e.hit,i=e.attribute,u=e.tagName,a=void 0===u?"mark":u;return t(n,{},(r={hit:o,attribute:i},tn($t(r))).map((function(e,n){return e.isHighlighted?t(a,{key:n},e.value):e.value})))}return r.__autocomplete_componentName="ReverseHighlight",r}function yn(e){var t=e.createElement,n=e.Fragment;function r(e){var r,o=e.hit,i=e.attribute,u=e.tagName,a=void 0===u?"mark":u;return t(n,{},(r={hit:o,attribute:i},tn(on(r))).map((function(e,n){return e.isHighlighted?t(a,{key:n},e.value):e.value})))}return r.__autocomplete_componentName="ReverseSnippet",r}function bn(e){var t=e.createElement,n=e.Fragment;function r(e){var r=e.hit,o=e.attribute,i=e.tagName,u=void 0===i?"mark":i;return t(n,{},on({hit:r,attribute:o}).map((function(e,n){return e.isHighlighted?t(u,{key:n},e.value):e.value})))}return r.__autocomplete_componentName="Snippet",r}var On=["classNames","container","getEnvironmentProps","getFormProps","getInputProps","getItemProps","getLabelProps","getListProps","getPanelProps","getRootProps","panelContainer","panelPlacement","render","renderNoResults","renderer","detachedMediaQuery","components","translations"],_n={clearButton:"aa-ClearButton",detachedCancelButton:"aa-DetachedCancelButton",detachedContainer:"aa-DetachedContainer",detachedFormContainer:"aa-DetachedFormContainer",detachedOverlay:"aa-DetachedOverlay",detachedSearchButton:"aa-DetachedSearchButton",detachedSearchButtonIcon:"aa-DetachedSearchButtonIcon",detachedSearchButtonPlaceholder:"aa-DetachedSearchButtonPlaceholder",form:"aa-Form",input:"aa-Input",inputWrapper:"aa-InputWrapper",inputWrapperPrefix:"aa-InputWrapperPrefix",inputWrapperSuffix:"aa-InputWrapperSuffix",item:"aa-Item",label:"aa-Label",list:"aa-List",loadingIndicator:"aa-LoadingIndicator",panel:"aa-Panel",panelLayout:"aa-PanelLayout aa-Panel--scrollable",root:"aa-Autocomplete",source:"aa-Source",sourceFooter:"aa-SourceFooter",sourceHeader:"aa-SourceHeader",sourceNoResults:"aa-SourceNoResults",submitButton:"aa-SubmitButton"},Pn=function(e,t){var n=e.children;(0,e.render)(n,t)},jn={createElement:Ot,Fragment:Pt,render:function(e,t,n){var r,o,i;st.__&&st.__(e,t),o=(r="function"==typeof n)?null:n&&n.__k||t.__k,i=[],Rt(t,e=(!r&&n||t).__k=Ot(Pt,null,[e]),o||vt,vt,void 0!==t.ownerSVGElement,!r&&n?[n]:o?null:t.firstChild?lt.call(t.childNodes):null,i,!r&&n?n:o?o.__e:t.firstChild,r),Tt(i,e)}};function wn(e){var t=e.panelPlacement,n=e.container,r=e.form,o=e.environment,i=n.getBoundingClientRect(),u=(o.pageYOffset||o.document.documentElement.scrollTop||o.document.body.scrollTop||0)+i.top+i.height;switch(t){case"start":return{top:u,left:i.left};case"end":return{top:u,right:o.document.documentElement.clientWidth-(i.left+i.width)};case"full-width":return{top:u,left:0,right:0,width:"unset",maxWidth:"unset"};case"input-wrapper-width":var a=r.getBoundingClientRect();return{top:u,left:a.left,right:o.document.documentElement.clientWidth-(a.left+a.width),width:"unset",maxWidth:"unset"};default:throw new Error("[Autocomplete] The `panelPlacement` value ".concat(JSON.stringify(t)," is not valid."))}}var Sn=[{segment:"autocomplete-js",version:b}],In=["components"];var En=function(e,t){function n(t){return e({searchClient:t.searchClient,queries:t.requests.map((function(e){return e.query}))}).then((function(e){return e.map((function(e,n){var r=t.requests[n];return{items:e,sourceId:r.sourceId,transformResponse:r.transformResponse}}))}))}return function(e){return function(r){return an(an({requesterId:t,execute:n},e),r)}}}((function(e){return function(e){var t=e.searchClient,n=e.queries,r=e.userAgents,o=void 0===r?[]:r;return"function"==typeof t.addAlgoliaAgent&&[].concat(mn(O),mn(o)).forEach((function(e){var n=e.segment,r=e.version;t.addAlgoliaAgent(n,r)})),t.search(n.map((function(e){var t=e.params;return pn(pn({},dn(e,ln)),{},{params:pn({hitsPerPage:5,highlightPreTag:Ut,highlightPostTag:Ht},t)})}))).then((function(e){return e.results}))}(n(n({},e),{},{userAgents:Sn}))}),"algolia");var An=En({transformResponse:function(e){return e.hits}});e.autocomplete=function(e){var t,r=function(){var e=[],t=[];function n(n){e.push(n);var r=n();t.push(r)}return{runEffect:n,cleanupEffects:function(){var e=t;t=[],e.forEach((function(e){e()}))},runEffects:function(){var t=e;e=[],t.forEach((function(e){n(e)}))}}}(),a=r.runEffect,c=r.cleanupEffects,l=r.runEffects,s=(t=[],{reactive:function(e){var n=e(),r={_fn:e,_ref:{current:n},get value(){return this._ref.current},set value(e){this._ref.current=e}};return t.push(r),r},runReactives:function(){t.forEach((function(e){e._ref.current=e._fn()}))}}),d=s.reactive,m=s.runReactives,h=p(!1),y=p(e),b=p(void 0),O=d((function(){return function(e){var t,r=e.classNames,o=e.container,i=e.getEnvironmentProps,a=e.getFormProps,c=e.getInputProps,l=e.getItemProps,s=e.getLabelProps,p=e.getListProps,f=e.getPanelProps,d=e.getRootProps,m=e.panelContainer,h=e.panelPlacement,g=e.render,y=e.renderNoResults,b=e.renderer,O=e.detachedMediaQuery,_=e.components,P=e.translations,j=u(e,On),w="undefined"!=typeof window?window:{},S=Qe(w,o);S.tagName;var I=n(n({},jn),b),E={Highlight:hn(I),ReverseHighlight:gn(I),ReverseSnippet:yn(I),Snippet:bn(I)};return{renderer:{classNames:$e(_n,null!=r?r:{}),container:S,getEnvironmentProps:null!=i?i:function(e){return e.props},getFormProps:null!=a?a:function(e){return e.props},getInputProps:null!=c?c:function(e){return e.props},getItemProps:null!=l?l:function(e){return e.props},getLabelProps:null!=s?s:function(e){return e.props},getListProps:null!=p?p:function(e){return e.props},getPanelProps:null!=f?f:function(e){return e.props},getRootProps:null!=d?d:function(e){return e.props},panelContainer:m?Qe(w,m):w.document.body,panelPlacement:null!=h?h:"input-wrapper-width",render:null!=g?g:Pn,renderNoResults:y,renderer:I,detachedMediaQuery:null!=O?O:getComputedStyle(w.document.documentElement).getPropertyValue("--aa-detached-media-query"),components:n(n({},E),_),translations:n(n({},{clearButtonTitle:"Clear",detachedCancelButtonText:"Cancel",submitButtonTitle:"Submit"}),P)},core:n(n({},j),{},{id:null!==(t=j.id)&&void 0!==t?t:v(),environment:w})}}(y.current)})),_=d((function(){return O.value.core.environment.matchMedia(O.value.renderer.detachedMediaQuery).matches})),P=d((function(){return Me(n(n({},O.value.core),{},{onStateChange:function(e){var t,n,r;h.current=e.state.collections.some((function(e){return e.source.templates.noResults})),null===(t=b.current)||void 0===t||t.call(b,e),null===(n=(r=O.value.core).onStateChange)||void 0===n||n.call(r,e)},shouldPanelOpen:y.current.shouldPanelOpen||function(e){var t=e.state;if(_.value)return!0;var n=g(t)>0;if(!O.value.core.openOnFocus&&!t.query)return n;var r=Boolean(h.current||O.value.renderer.renderNoResults);return!n&&r||n},__autocomplete_metadata:{userAgents:Sn,options:e}}))})),j=p(n({collections:[],completion:null,context:{},isOpen:!1,query:"",activeItemId:null,status:"idle"},O.value.core.initialState)),w={getEnvironmentProps:O.value.renderer.getEnvironmentProps,getFormProps:O.value.renderer.getFormProps,getInputProps:O.value.renderer.getInputProps,getItemProps:O.value.renderer.getItemProps,getLabelProps:O.value.renderer.getLabelProps,getListProps:O.value.renderer.getListProps,getPanelProps:O.value.renderer.getPanelProps,getRootProps:O.value.renderer.getRootProps},S={setActiveItemId:P.value.setActiveItemId,setQuery:P.value.setQuery,setCollections:P.value.setCollections,setIsOpen:P.value.setIsOpen,setStatus:P.value.setStatus,setContext:P.value.setContext,refresh:P.value.refresh},I=d((function(){return Ve.bind(O.value.renderer.renderer.createElement)})),E=d((function(){return ct({autocomplete:P.value,autocompleteScopeApi:S,classNames:O.value.renderer.classNames,environment:O.value.core.environment,isDetached:_.value,placeholder:O.value.core.placeholder,propGetters:w,setIsModalOpen:k,state:j.current,translations:O.value.renderer.translations})}));function A(){tt(E.value.panel,{style:_.value?{}:wn({panelPlacement:O.value.renderer.panelPlacement,container:E.value.root,form:E.value.form,environment:O.value.core.environment})})}function C(e){j.current=e;var t={autocomplete:P.value,autocompleteScopeApi:S,classNames:O.value.renderer.classNames,components:O.value.renderer.components,container:O.value.renderer.container,html:I.value,dom:E.value,panelContainer:_.value?E.value.detachedContainer:O.value.renderer.panelContainer,propGetters:w,state:j.current,renderer:O.value.renderer.renderer},r=!g(e)&&!h.current&&O.value.renderer.renderNoResults||O.value.renderer.render;!function(e){var t=e.autocomplete,r=e.autocompleteScopeApi,o=e.dom,i=e.propGetters,u=e.state;nt(o.root,i.getRootProps(n({state:u,props:t.getRootProps({})},r))),nt(o.input,i.getInputProps(n({state:u,props:t.getInputProps({inputElement:o.input}),inputElement:o.input},r))),tt(o.label,{hidden:"stalled"===u.status}),tt(o.loadingIndicator,{hidden:"stalled"!==u.status}),tt(o.clearButton,{hidden:!u.query})}(t),function(e,t){var r=t.autocomplete,o=t.autocompleteScopeApi,u=t.classNames,a=t.html,c=t.dom,l=t.panelContainer,s=t.propGetters,p=t.state,f=t.components,d=t.renderer;if(p.isOpen){l.contains(c.panel)||"loading"===p.status||l.appendChild(c.panel),c.panel.classList.toggle("aa-Panel--stalled","stalled"===p.status);var m=p.collections.filter((function(e){var t=e.source,n=e.items;return t.templates.noResults||n.length>0})).map((function(e,t){var c=e.source,l=e.items;return d.createElement("section",{key:t,className:u.source,"data-autocomplete-source-id":c.sourceId},c.templates.header&&d.createElement("div",{className:u.sourceHeader},c.templates.header({components:f,createElement:d.createElement,Fragment:d.Fragment,items:l,source:c,state:p,html:a})),c.templates.noResults&&0===l.length?d.createElement("div",{className:u.sourceNoResults},c.templates.noResults({components:f,createElement:d.createElement,Fragment:d.Fragment,source:c,state:p,html:a})):d.createElement("ul",i({className:u.list},s.getListProps(n({state:p,props:r.getListProps({})},o))),l.map((function(e){var t=r.getItemProps({item:e,source:c});return d.createElement("li",i({key:t.id,className:u.item},s.getItemProps(n({state:p,props:t},o))),c.templates.item({components:f,createElement:d.createElement,Fragment:d.Fragment,item:e,state:p,html:a}))}))),c.templates.footer&&d.createElement("div",{className:u.sourceFooter},c.templates.footer({components:f,createElement:d.createElement,Fragment:d.Fragment,items:l,source:c,state:p,html:a})))})),v=d.createElement(d.Fragment,null,d.createElement("div",{className:u.panelLayout},m),d.createElement("div",{className:"aa-GradientBottom"})),h=m.reduce((function(e,t){return e[t.props["data-autocomplete-source-id"]]=t,e}),{});e(n(n({children:v,state:p,sections:m,elements:h},d),{},{components:f,html:a},o),c.panel)}else l.contains(c.panel)&&l.removeChild(c.panel)}(r,t)}function D(){var e=arguments.length>0&&void 0!==arguments[0]?arguments[0]:{};c();var t=O.value.renderer,n=t.components,r=u(t,In);y.current=Ge(r,O.value.core,{components:Ke(n,(function(e){return!e.value.hasOwnProperty("__autocomplete_componentName")})),initialState:j.current},e),m(),l(),P.value.refresh().then((function(){C(j.current)}))}function k(e){requestAnimationFrame((function(){var t=O.value.core.environment.document.body.contains(E.value.detachedOverlay);e!==t&&(e?(O.value.core.environment.document.body.appendChild(E.value.detachedOverlay),O.value.core.environment.document.body.classList.add("aa-Detached"),E.value.input.focus()):(O.value.core.environment.document.body.removeChild(E.value.detachedOverlay),O.value.core.environment.document.body.classList.remove("aa-Detached"),P.value.setQuery(""),P.value.refresh()))}))}return a((function(){var e=P.value.getEnvironmentProps({formElement:E.value.form,panelElement:E.value.panel,inputElement:E.value.input});return tt(O.value.core.environment,e),function(){tt(O.value.core.environment,Object.keys(e).reduce((function(e,t){return n(n({},e),{},o({},t,void 0))}),{}))}})),a((function(){var e=_.value?O.value.core.environment.document.body:O.value.renderer.panelContainer,t=_.value?E.value.detachedOverlay:E.value.panel;return _.value&&j.current.isOpen&&k(!0),C(j.current),function(){e.contains(t)&&e.removeChild(t)}})),a((function(){var e=O.value.renderer.container;return e.appendChild(E.value.root),function(){e.removeChild(E.value.root)}})),a((function(){var e=f((function(e){C(e.state)}),0);return b.current=function(t){var n=t.state,r=t.prevState;(_.value&&r.isOpen!==n.isOpen&&k(n.isOpen),_.value||!n.isOpen||r.isOpen||A(),n.query!==r.query)&&O.value.core.environment.document.querySelectorAll(".aa-Panel--scrollable").forEach((function(e){0!==e.scrollTop&&(e.scrollTop=0)}));e({state:n})},function(){b.current=void 0}})),a((function(){var e=f((function(){var e=_.value;_.value=O.value.core.environment.matchMedia(O.value.renderer.detachedMediaQuery).matches,e!==_.value?D({}):requestAnimationFrame(A)}),20);return O.value.core.environment.addEventListener("resize",e),function(){O.value.core.environment.removeEventListener("resize",e)}})),a((function(){if(!_.value)return function(){};function e(e){E.value.detachedContainer.classList.toggle("aa-DetachedContainer--modal",e)}function t(t){e(t.matches)}var n=O.value.core.environment.matchMedia(getComputedStyle(O.value.core.environment.document.documentElement).getPropertyValue("--aa-detached-modal-media-query"));e(n.matches);var r=Boolean(n.addEventListener);return r?n.addEventListener("change",t):n.addListener(t),function(){r?n.removeEventListener("change",t):n.removeListener(t)}})),a((function(){return requestAnimationFrame(A),function(){}})),n(n({},S),{},{update:D,destroy:function(){c()}})},e.getAlgoliaFacets=function(e){var t=En({transformResponse:function(e){return e.facetHits}}),r=e.queries.map((function(e){return n(n({},e),{},{type:"facet"})}));return t(n(n({},e),{},{queries:r}))},e.getAlgoliaResults=An,Object.defineProperty(e,"__esModule",{value:!0})}));
3
-
 
 
 
 
spaces/AnishKumbhar/ChatBot/text-generation-webui-main/extensions/openai/models.py DELETED
@@ -1,78 +0,0 @@
1
- from extensions.openai.embeddings import get_embeddings_model_name
2
- from extensions.openai.errors import OpenAIError
3
- from modules import shared
4
- from modules.models import load_model as _load_model
5
- from modules.models import unload_model
6
- from modules.models_settings import get_model_metadata, update_model_parameters
7
- from modules.utils import get_available_models
8
-
9
-
10
- def get_current_model_list() -> list:
11
- return [shared.model_name] # The real chat/completions model, maybe "None"
12
-
13
-
14
- def get_pseudo_model_list() -> list:
15
- return [ # these are expected by so much, so include some here as a dummy
16
- 'gpt-3.5-turbo',
17
- 'text-embedding-ada-002',
18
- ]
19
-
20
-
21
- def load_model(model_name: str) -> dict:
22
- resp = {
23
- "id": model_name,
24
- "object": "engine",
25
- "owner": "self",
26
- "ready": True,
27
- }
28
- if model_name not in get_pseudo_model_list() + [get_embeddings_model_name()] + get_current_model_list(): # Real model only
29
- # No args. Maybe it works anyways!
30
- # TODO: hack some heuristics into args for better results
31
-
32
- shared.model_name = model_name
33
- unload_model()
34
-
35
- model_settings = get_model_metadata(shared.model_name)
36
- shared.settings.update({k: v for k, v in model_settings.items() if k in shared.settings})
37
- update_model_parameters(model_settings, initial=True)
38
-
39
- if shared.settings['mode'] != 'instruct':
40
- shared.settings['instruction_template'] = None
41
-
42
- shared.model, shared.tokenizer = _load_model(shared.model_name)
43
-
44
- if not shared.model: # load failed.
45
- shared.model_name = "None"
46
- raise OpenAIError(f"Model load failed for: {shared.model_name}")
47
-
48
- return resp
49
-
50
-
51
- def list_models(is_legacy: bool = False) -> dict:
52
- # TODO: Lora's?
53
- all_model_list = get_current_model_list() + [get_embeddings_model_name()] + get_pseudo_model_list() + get_available_models()
54
-
55
- models = {}
56
-
57
- if is_legacy:
58
- models = [{"id": id, "object": "engine", "owner": "user", "ready": True} for id in all_model_list]
59
- if not shared.model:
60
- models[0]['ready'] = False
61
- else:
62
- models = [{"id": id, "object": "model", "owned_by": "user", "permission": []} for id in all_model_list]
63
-
64
- resp = {
65
- "object": "list",
66
- "data": models,
67
- }
68
-
69
- return resp
70
-
71
-
72
- def model_info(model_name: str) -> dict:
73
- return {
74
- "id": model_name,
75
- "object": "model",
76
- "owned_by": "user",
77
- "permission": []
78
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/Anonymous-sub/Rerender/ControlNet/annotator/uniformer/mmcv/cnn/utils/fuse_conv_bn.py DELETED
@@ -1,59 +0,0 @@
1
- # Copyright (c) OpenMMLab. All rights reserved.
2
- import torch
3
- import torch.nn as nn
4
-
5
-
6
- def _fuse_conv_bn(conv, bn):
7
- """Fuse conv and bn into one module.
8
-
9
- Args:
10
- conv (nn.Module): Conv to be fused.
11
- bn (nn.Module): BN to be fused.
12
-
13
- Returns:
14
- nn.Module: Fused module.
15
- """
16
- conv_w = conv.weight
17
- conv_b = conv.bias if conv.bias is not None else torch.zeros_like(
18
- bn.running_mean)
19
-
20
- factor = bn.weight / torch.sqrt(bn.running_var + bn.eps)
21
- conv.weight = nn.Parameter(conv_w *
22
- factor.reshape([conv.out_channels, 1, 1, 1]))
23
- conv.bias = nn.Parameter((conv_b - bn.running_mean) * factor + bn.bias)
24
- return conv
25
-
26
-
27
- def fuse_conv_bn(module):
28
- """Recursively fuse conv and bn in a module.
29
-
30
- During inference, the functionary of batch norm layers is turned off
31
- but only the mean and var alone channels are used, which exposes the
32
- chance to fuse it with the preceding conv layers to save computations and
33
- simplify network structures.
34
-
35
- Args:
36
- module (nn.Module): Module to be fused.
37
-
38
- Returns:
39
- nn.Module: Fused module.
40
- """
41
- last_conv = None
42
- last_conv_name = None
43
-
44
- for name, child in module.named_children():
45
- if isinstance(child,
46
- (nn.modules.batchnorm._BatchNorm, nn.SyncBatchNorm)):
47
- if last_conv is None: # only fuse BN that is after Conv
48
- continue
49
- fused_conv = _fuse_conv_bn(last_conv, child)
50
- module._modules[last_conv_name] = fused_conv
51
- # To reduce changes, set BN as Identity instead of deleting it.
52
- module._modules[name] = nn.Identity()
53
- last_conv = None
54
- elif isinstance(child, nn.Conv2d):
55
- last_conv = child
56
- last_conv_name = name
57
- else:
58
- fuse_conv_bn(child)
59
- return module
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/Anustup/NS_AI_LABS/tests/segments_test.py DELETED
@@ -1,48 +0,0 @@
1
- import sys
2
- import unittest
3
-
4
- sys.path.append('../NS_AI_LABS')
5
-
6
- from src.segments import merge_timestamps
7
-
8
- class TestSegments(unittest.TestCase):
9
- def __init__(self, *args, **kwargs):
10
- super(TestSegments, self).__init__(*args, **kwargs)
11
-
12
- def test_merge_segments(self):
13
- segments = [
14
- {'start': 10.0, 'end': 20.0},
15
- {'start': 22.0, 'end': 27.0},
16
- {'start': 31.0, 'end': 35.0},
17
- {'start': 45.0, 'end': 60.0},
18
- {'start': 61.0, 'end': 65.0},
19
- {'start': 68.0, 'end': 98.0},
20
- {'start': 100.0, 'end': 102.0},
21
- {'start': 110.0, 'end': 112.0}
22
- ]
23
-
24
- result = merge_timestamps(segments, merge_window=5, max_merge_size=30, padding_left=1, padding_right=1)
25
-
26
- self.assertListEqual(result, [
27
- {'start': 9.0, 'end': 36.0},
28
- {'start': 44.0, 'end': 66.0},
29
- {'start': 67.0, 'end': 99.0},
30
- {'start': 99.0, 'end': 103.0},
31
- {'start': 109.0, 'end': 113.0}
32
- ])
33
-
34
- def test_overlap_next(self):
35
- segments = [
36
- {'start': 5.0, 'end': 39.182},
37
- {'start': 39.986, 'end': 40.814}
38
- ]
39
-
40
- result = merge_timestamps(segments, merge_window=5, max_merge_size=30, padding_left=1, padding_right=1)
41
-
42
- self.assertListEqual(result, [
43
- {'start': 4.0, 'end': 39.584},
44
- {'start': 39.584, 'end': 41.814}
45
- ])
46
-
47
- if __name__ == '__main__':
48
- unittest.main()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/ArtGAN/Diffusion-API/diffusion_webui/utils/__init__.py DELETED
File without changes
spaces/Ataturk-Chatbot/HuggingFaceChat/venv/lib/python3.11/site-packages/pip/_internal/utils/virtualenv.py DELETED
@@ -1,104 +0,0 @@
1
- import logging
2
- import os
3
- import re
4
- import site
5
- import sys
6
- from typing import List, Optional
7
-
8
- logger = logging.getLogger(__name__)
9
- _INCLUDE_SYSTEM_SITE_PACKAGES_REGEX = re.compile(
10
- r"include-system-site-packages\s*=\s*(?P<value>true|false)"
11
- )
12
-
13
-
14
- def _running_under_venv() -> bool:
15
- """Checks if sys.base_prefix and sys.prefix match.
16
-
17
- This handles PEP 405 compliant virtual environments.
18
- """
19
- return sys.prefix != getattr(sys, "base_prefix", sys.prefix)
20
-
21
-
22
- def _running_under_legacy_virtualenv() -> bool:
23
- """Checks if sys.real_prefix is set.
24
-
25
- This handles virtual environments created with pypa's virtualenv.
26
- """
27
- # pypa/virtualenv case
28
- return hasattr(sys, "real_prefix")
29
-
30
-
31
- def running_under_virtualenv() -> bool:
32
- """True if we're running inside a virtual environment, False otherwise."""
33
- return _running_under_venv() or _running_under_legacy_virtualenv()
34
-
35
-
36
- def _get_pyvenv_cfg_lines() -> Optional[List[str]]:
37
- """Reads {sys.prefix}/pyvenv.cfg and returns its contents as list of lines
38
-
39
- Returns None, if it could not read/access the file.
40
- """
41
- pyvenv_cfg_file = os.path.join(sys.prefix, "pyvenv.cfg")
42
- try:
43
- # Although PEP 405 does not specify, the built-in venv module always
44
- # writes with UTF-8. (pypa/pip#8717)
45
- with open(pyvenv_cfg_file, encoding="utf-8") as f:
46
- return f.read().splitlines() # avoids trailing newlines
47
- except OSError:
48
- return None
49
-
50
-
51
- def _no_global_under_venv() -> bool:
52
- """Check `{sys.prefix}/pyvenv.cfg` for system site-packages inclusion
53
-
54
- PEP 405 specifies that when system site-packages are not supposed to be
55
- visible from a virtual environment, `pyvenv.cfg` must contain the following
56
- line:
57
-
58
- include-system-site-packages = false
59
-
60
- Additionally, log a warning if accessing the file fails.
61
- """
62
- cfg_lines = _get_pyvenv_cfg_lines()
63
- if cfg_lines is None:
64
- # We're not in a "sane" venv, so assume there is no system
65
- # site-packages access (since that's PEP 405's default state).
66
- logger.warning(
67
- "Could not access 'pyvenv.cfg' despite a virtual environment "
68
- "being active. Assuming global site-packages is not accessible "
69
- "in this environment."
70
- )
71
- return True
72
-
73
- for line in cfg_lines:
74
- match = _INCLUDE_SYSTEM_SITE_PACKAGES_REGEX.match(line)
75
- if match is not None and match.group("value") == "false":
76
- return True
77
- return False
78
-
79
-
80
- def _no_global_under_legacy_virtualenv() -> bool:
81
- """Check if "no-global-site-packages.txt" exists beside site.py
82
-
83
- This mirrors logic in pypa/virtualenv for determining whether system
84
- site-packages are visible in the virtual environment.
85
- """
86
- site_mod_dir = os.path.dirname(os.path.abspath(site.__file__))
87
- no_global_site_packages_file = os.path.join(
88
- site_mod_dir,
89
- "no-global-site-packages.txt",
90
- )
91
- return os.path.exists(no_global_site_packages_file)
92
-
93
-
94
- def virtualenv_no_global() -> bool:
95
- """Returns a boolean, whether running in venv with no system site-packages."""
96
- # PEP 405 compliance needs to be checked first since virtualenv >=20 would
97
- # return True for both checks, but is only able to use the PEP 405 config.
98
- if _running_under_venv():
99
- return _no_global_under_venv()
100
-
101
- if _running_under_legacy_virtualenv():
102
- return _no_global_under_legacy_virtualenv()
103
-
104
- return False
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/Ataturk-Chatbot/HuggingFaceChat/venv/lib/python3.11/site-packages/setuptools/_vendor/jaraco/text/__init__.py DELETED
@@ -1,599 +0,0 @@
1
- import re
2
- import itertools
3
- import textwrap
4
- import functools
5
-
6
- try:
7
- from importlib.resources import files # type: ignore
8
- except ImportError: # pragma: nocover
9
- from setuptools.extern.importlib_resources import files # type: ignore
10
-
11
- from setuptools.extern.jaraco.functools import compose, method_cache
12
- from setuptools.extern.jaraco.context import ExceptionTrap
13
-
14
-
15
- def substitution(old, new):
16
- """
17
- Return a function that will perform a substitution on a string
18
- """
19
- return lambda s: s.replace(old, new)
20
-
21
-
22
- def multi_substitution(*substitutions):
23
- """
24
- Take a sequence of pairs specifying substitutions, and create
25
- a function that performs those substitutions.
26
-
27
- >>> multi_substitution(('foo', 'bar'), ('bar', 'baz'))('foo')
28
- 'baz'
29
- """
30
- substitutions = itertools.starmap(substitution, substitutions)
31
- # compose function applies last function first, so reverse the
32
- # substitutions to get the expected order.
33
- substitutions = reversed(tuple(substitutions))
34
- return compose(*substitutions)
35
-
36
-
37
- class FoldedCase(str):
38
- """
39
- A case insensitive string class; behaves just like str
40
- except compares equal when the only variation is case.
41
-
42
- >>> s = FoldedCase('hello world')
43
-
44
- >>> s == 'Hello World'
45
- True
46
-
47
- >>> 'Hello World' == s
48
- True
49
-
50
- >>> s != 'Hello World'
51
- False
52
-
53
- >>> s.index('O')
54
- 4
55
-
56
- >>> s.split('O')
57
- ['hell', ' w', 'rld']
58
-
59
- >>> sorted(map(FoldedCase, ['GAMMA', 'alpha', 'Beta']))
60
- ['alpha', 'Beta', 'GAMMA']
61
-
62
- Sequence membership is straightforward.
63
-
64
- >>> "Hello World" in [s]
65
- True
66
- >>> s in ["Hello World"]
67
- True
68
-
69
- You may test for set inclusion, but candidate and elements
70
- must both be folded.
71
-
72
- >>> FoldedCase("Hello World") in {s}
73
- True
74
- >>> s in {FoldedCase("Hello World")}
75
- True
76
-
77
- String inclusion works as long as the FoldedCase object
78
- is on the right.
79
-
80
- >>> "hello" in FoldedCase("Hello World")
81
- True
82
-
83
- But not if the FoldedCase object is on the left:
84
-
85
- >>> FoldedCase('hello') in 'Hello World'
86
- False
87
-
88
- In that case, use ``in_``:
89
-
90
- >>> FoldedCase('hello').in_('Hello World')
91
- True
92
-
93
- >>> FoldedCase('hello') > FoldedCase('Hello')
94
- False
95
- """
96
-
97
- def __lt__(self, other):
98
- return self.lower() < other.lower()
99
-
100
- def __gt__(self, other):
101
- return self.lower() > other.lower()
102
-
103
- def __eq__(self, other):
104
- return self.lower() == other.lower()
105
-
106
- def __ne__(self, other):
107
- return self.lower() != other.lower()
108
-
109
- def __hash__(self):
110
- return hash(self.lower())
111
-
112
- def __contains__(self, other):
113
- return super().lower().__contains__(other.lower())
114
-
115
- def in_(self, other):
116
- "Does self appear in other?"
117
- return self in FoldedCase(other)
118
-
119
- # cache lower since it's likely to be called frequently.
120
- @method_cache
121
- def lower(self):
122
- return super().lower()
123
-
124
- def index(self, sub):
125
- return self.lower().index(sub.lower())
126
-
127
- def split(self, splitter=' ', maxsplit=0):
128
- pattern = re.compile(re.escape(splitter), re.I)
129
- return pattern.split(self, maxsplit)
130
-
131
-
132
- # Python 3.8 compatibility
133
- _unicode_trap = ExceptionTrap(UnicodeDecodeError)
134
-
135
-
136
- @_unicode_trap.passes
137
- def is_decodable(value):
138
- r"""
139
- Return True if the supplied value is decodable (using the default
140
- encoding).
141
-
142
- >>> is_decodable(b'\xff')
143
- False
144
- >>> is_decodable(b'\x32')
145
- True
146
- """
147
- value.decode()
148
-
149
-
150
- def is_binary(value):
151
- r"""
152
- Return True if the value appears to be binary (that is, it's a byte
153
- string and isn't decodable).
154
-
155
- >>> is_binary(b'\xff')
156
- True
157
- >>> is_binary('\xff')
158
- False
159
- """
160
- return isinstance(value, bytes) and not is_decodable(value)
161
-
162
-
163
- def trim(s):
164
- r"""
165
- Trim something like a docstring to remove the whitespace that
166
- is common due to indentation and formatting.
167
-
168
- >>> trim("\n\tfoo = bar\n\t\tbar = baz\n")
169
- 'foo = bar\n\tbar = baz'
170
- """
171
- return textwrap.dedent(s).strip()
172
-
173
-
174
- def wrap(s):
175
- """
176
- Wrap lines of text, retaining existing newlines as
177
- paragraph markers.
178
-
179
- >>> print(wrap(lorem_ipsum))
180
- Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do
181
- eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad
182
- minim veniam, quis nostrud exercitation ullamco laboris nisi ut
183
- aliquip ex ea commodo consequat. Duis aute irure dolor in
184
- reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla
185
- pariatur. Excepteur sint occaecat cupidatat non proident, sunt in
186
- culpa qui officia deserunt mollit anim id est laborum.
187
- <BLANKLINE>
188
- Curabitur pretium tincidunt lacus. Nulla gravida orci a odio. Nullam
189
- varius, turpis et commodo pharetra, est eros bibendum elit, nec luctus
190
- magna felis sollicitudin mauris. Integer in mauris eu nibh euismod
191
- gravida. Duis ac tellus et risus vulputate vehicula. Donec lobortis
192
- risus a elit. Etiam tempor. Ut ullamcorper, ligula eu tempor congue,
193
- eros est euismod turpis, id tincidunt sapien risus a quam. Maecenas
194
- fermentum consequat mi. Donec fermentum. Pellentesque malesuada nulla
195
- a mi. Duis sapien sem, aliquet nec, commodo eget, consequat quis,
196
- neque. Aliquam faucibus, elit ut dictum aliquet, felis nisl adipiscing
197
- sapien, sed malesuada diam lacus eget erat. Cras mollis scelerisque
198
- nunc. Nullam arcu. Aliquam consequat. Curabitur augue lorem, dapibus
199
- quis, laoreet et, pretium ac, nisi. Aenean magna nisl, mollis quis,
200
- molestie eu, feugiat in, orci. In hac habitasse platea dictumst.
201
- """
202
- paragraphs = s.splitlines()
203
- wrapped = ('\n'.join(textwrap.wrap(para)) for para in paragraphs)
204
- return '\n\n'.join(wrapped)
205
-
206
-
207
- def unwrap(s):
208
- r"""
209
- Given a multi-line string, return an unwrapped version.
210
-
211
- >>> wrapped = wrap(lorem_ipsum)
212
- >>> wrapped.count('\n')
213
- 20
214
- >>> unwrapped = unwrap(wrapped)
215
- >>> unwrapped.count('\n')
216
- 1
217
- >>> print(unwrapped)
218
- Lorem ipsum dolor sit amet, consectetur adipiscing ...
219
- Curabitur pretium tincidunt lacus. Nulla gravida orci ...
220
-
221
- """
222
- paragraphs = re.split(r'\n\n+', s)
223
- cleaned = (para.replace('\n', ' ') for para in paragraphs)
224
- return '\n'.join(cleaned)
225
-
226
-
227
-
228
-
229
- class Splitter(object):
230
- """object that will split a string with the given arguments for each call
231
-
232
- >>> s = Splitter(',')
233
- >>> s('hello, world, this is your, master calling')
234
- ['hello', ' world', ' this is your', ' master calling']
235
- """
236
-
237
- def __init__(self, *args):
238
- self.args = args
239
-
240
- def __call__(self, s):
241
- return s.split(*self.args)
242
-
243
-
244
- def indent(string, prefix=' ' * 4):
245
- """
246
- >>> indent('foo')
247
- ' foo'
248
- """
249
- return prefix + string
250
-
251
-
252
- class WordSet(tuple):
253
- """
254
- Given an identifier, return the words that identifier represents,
255
- whether in camel case, underscore-separated, etc.
256
-
257
- >>> WordSet.parse("camelCase")
258
- ('camel', 'Case')
259
-
260
- >>> WordSet.parse("under_sep")
261
- ('under', 'sep')
262
-
263
- Acronyms should be retained
264
-
265
- >>> WordSet.parse("firstSNL")
266
- ('first', 'SNL')
267
-
268
- >>> WordSet.parse("you_and_I")
269
- ('you', 'and', 'I')
270
-
271
- >>> WordSet.parse("A simple test")
272
- ('A', 'simple', 'test')
273
-
274
- Multiple caps should not interfere with the first cap of another word.
275
-
276
- >>> WordSet.parse("myABCClass")
277
- ('my', 'ABC', 'Class')
278
-
279
- The result is a WordSet, so you can get the form you need.
280
-
281
- >>> WordSet.parse("myABCClass").underscore_separated()
282
- 'my_ABC_Class'
283
-
284
- >>> WordSet.parse('a-command').camel_case()
285
- 'ACommand'
286
-
287
- >>> WordSet.parse('someIdentifier').lowered().space_separated()
288
- 'some identifier'
289
-
290
- Slices of the result should return another WordSet.
291
-
292
- >>> WordSet.parse('taken-out-of-context')[1:].underscore_separated()
293
- 'out_of_context'
294
-
295
- >>> WordSet.from_class_name(WordSet()).lowered().space_separated()
296
- 'word set'
297
-
298
- >>> example = WordSet.parse('figured it out')
299
- >>> example.headless_camel_case()
300
- 'figuredItOut'
301
- >>> example.dash_separated()
302
- 'figured-it-out'
303
-
304
- """
305
-
306
- _pattern = re.compile('([A-Z]?[a-z]+)|([A-Z]+(?![a-z]))')
307
-
308
- def capitalized(self):
309
- return WordSet(word.capitalize() for word in self)
310
-
311
- def lowered(self):
312
- return WordSet(word.lower() for word in self)
313
-
314
- def camel_case(self):
315
- return ''.join(self.capitalized())
316
-
317
- def headless_camel_case(self):
318
- words = iter(self)
319
- first = next(words).lower()
320
- new_words = itertools.chain((first,), WordSet(words).camel_case())
321
- return ''.join(new_words)
322
-
323
- def underscore_separated(self):
324
- return '_'.join(self)
325
-
326
- def dash_separated(self):
327
- return '-'.join(self)
328
-
329
- def space_separated(self):
330
- return ' '.join(self)
331
-
332
- def trim_right(self, item):
333
- """
334
- Remove the item from the end of the set.
335
-
336
- >>> WordSet.parse('foo bar').trim_right('foo')
337
- ('foo', 'bar')
338
- >>> WordSet.parse('foo bar').trim_right('bar')
339
- ('foo',)
340
- >>> WordSet.parse('').trim_right('bar')
341
- ()
342
- """
343
- return self[:-1] if self and self[-1] == item else self
344
-
345
- def trim_left(self, item):
346
- """
347
- Remove the item from the beginning of the set.
348
-
349
- >>> WordSet.parse('foo bar').trim_left('foo')
350
- ('bar',)
351
- >>> WordSet.parse('foo bar').trim_left('bar')
352
- ('foo', 'bar')
353
- >>> WordSet.parse('').trim_left('bar')
354
- ()
355
- """
356
- return self[1:] if self and self[0] == item else self
357
-
358
- def trim(self, item):
359
- """
360
- >>> WordSet.parse('foo bar').trim('foo')
361
- ('bar',)
362
- """
363
- return self.trim_left(item).trim_right(item)
364
-
365
- def __getitem__(self, item):
366
- result = super(WordSet, self).__getitem__(item)
367
- if isinstance(item, slice):
368
- result = WordSet(result)
369
- return result
370
-
371
- @classmethod
372
- def parse(cls, identifier):
373
- matches = cls._pattern.finditer(identifier)
374
- return WordSet(match.group(0) for match in matches)
375
-
376
- @classmethod
377
- def from_class_name(cls, subject):
378
- return cls.parse(subject.__class__.__name__)
379
-
380
-
381
- # for backward compatibility
382
- words = WordSet.parse
383
-
384
-
385
- def simple_html_strip(s):
386
- r"""
387
- Remove HTML from the string `s`.
388
-
389
- >>> str(simple_html_strip(''))
390
- ''
391
-
392
- >>> print(simple_html_strip('A <bold>stormy</bold> day in paradise'))
393
- A stormy day in paradise
394
-
395
- >>> print(simple_html_strip('Somebody <!-- do not --> tell the truth.'))
396
- Somebody tell the truth.
397
-
398
- >>> print(simple_html_strip('What about<br/>\nmultiple lines?'))
399
- What about
400
- multiple lines?
401
- """
402
- html_stripper = re.compile('(<!--.*?-->)|(<[^>]*>)|([^<]+)', re.DOTALL)
403
- texts = (match.group(3) or '' for match in html_stripper.finditer(s))
404
- return ''.join(texts)
405
-
406
-
407
- class SeparatedValues(str):
408
- """
409
- A string separated by a separator. Overrides __iter__ for getting
410
- the values.
411
-
412
- >>> list(SeparatedValues('a,b,c'))
413
- ['a', 'b', 'c']
414
-
415
- Whitespace is stripped and empty values are discarded.
416
-
417
- >>> list(SeparatedValues(' a, b , c, '))
418
- ['a', 'b', 'c']
419
- """
420
-
421
- separator = ','
422
-
423
- def __iter__(self):
424
- parts = self.split(self.separator)
425
- return filter(None, (part.strip() for part in parts))
426
-
427
-
428
- class Stripper:
429
- r"""
430
- Given a series of lines, find the common prefix and strip it from them.
431
-
432
- >>> lines = [
433
- ... 'abcdefg\n',
434
- ... 'abc\n',
435
- ... 'abcde\n',
436
- ... ]
437
- >>> res = Stripper.strip_prefix(lines)
438
- >>> res.prefix
439
- 'abc'
440
- >>> list(res.lines)
441
- ['defg\n', '\n', 'de\n']
442
-
443
- If no prefix is common, nothing should be stripped.
444
-
445
- >>> lines = [
446
- ... 'abcd\n',
447
- ... '1234\n',
448
- ... ]
449
- >>> res = Stripper.strip_prefix(lines)
450
- >>> res.prefix = ''
451
- >>> list(res.lines)
452
- ['abcd\n', '1234\n']
453
- """
454
-
455
- def __init__(self, prefix, lines):
456
- self.prefix = prefix
457
- self.lines = map(self, lines)
458
-
459
- @classmethod
460
- def strip_prefix(cls, lines):
461
- prefix_lines, lines = itertools.tee(lines)
462
- prefix = functools.reduce(cls.common_prefix, prefix_lines)
463
- return cls(prefix, lines)
464
-
465
- def __call__(self, line):
466
- if not self.prefix:
467
- return line
468
- null, prefix, rest = line.partition(self.prefix)
469
- return rest
470
-
471
- @staticmethod
472
- def common_prefix(s1, s2):
473
- """
474
- Return the common prefix of two lines.
475
- """
476
- index = min(len(s1), len(s2))
477
- while s1[:index] != s2[:index]:
478
- index -= 1
479
- return s1[:index]
480
-
481
-
482
- def remove_prefix(text, prefix):
483
- """
484
- Remove the prefix from the text if it exists.
485
-
486
- >>> remove_prefix('underwhelming performance', 'underwhelming ')
487
- 'performance'
488
-
489
- >>> remove_prefix('something special', 'sample')
490
- 'something special'
491
- """
492
- null, prefix, rest = text.rpartition(prefix)
493
- return rest
494
-
495
-
496
- def remove_suffix(text, suffix):
497
- """
498
- Remove the suffix from the text if it exists.
499
-
500
- >>> remove_suffix('name.git', '.git')
501
- 'name'
502
-
503
- >>> remove_suffix('something special', 'sample')
504
- 'something special'
505
- """
506
- rest, suffix, null = text.partition(suffix)
507
- return rest
508
-
509
-
510
- def normalize_newlines(text):
511
- r"""
512
- Replace alternate newlines with the canonical newline.
513
-
514
- >>> normalize_newlines('Lorem Ipsum\u2029')
515
- 'Lorem Ipsum\n'
516
- >>> normalize_newlines('Lorem Ipsum\r\n')
517
- 'Lorem Ipsum\n'
518
- >>> normalize_newlines('Lorem Ipsum\x85')
519
- 'Lorem Ipsum\n'
520
- """
521
- newlines = ['\r\n', '\r', '\n', '\u0085', '\u2028', '\u2029']
522
- pattern = '|'.join(newlines)
523
- return re.sub(pattern, '\n', text)
524
-
525
-
526
- def _nonblank(str):
527
- return str and not str.startswith('#')
528
-
529
-
530
- @functools.singledispatch
531
- def yield_lines(iterable):
532
- r"""
533
- Yield valid lines of a string or iterable.
534
-
535
- >>> list(yield_lines(''))
536
- []
537
- >>> list(yield_lines(['foo', 'bar']))
538
- ['foo', 'bar']
539
- >>> list(yield_lines('foo\nbar'))
540
- ['foo', 'bar']
541
- >>> list(yield_lines('\nfoo\n#bar\nbaz #comment'))
542
- ['foo', 'baz #comment']
543
- >>> list(yield_lines(['foo\nbar', 'baz', 'bing\n\n\n']))
544
- ['foo', 'bar', 'baz', 'bing']
545
- """
546
- return itertools.chain.from_iterable(map(yield_lines, iterable))
547
-
548
-
549
- @yield_lines.register(str)
550
- def _(text):
551
- return filter(_nonblank, map(str.strip, text.splitlines()))
552
-
553
-
554
- def drop_comment(line):
555
- """
556
- Drop comments.
557
-
558
- >>> drop_comment('foo # bar')
559
- 'foo'
560
-
561
- A hash without a space may be in a URL.
562
-
563
- >>> drop_comment('http://example.com/foo#bar')
564
- 'http://example.com/foo#bar'
565
- """
566
- return line.partition(' #')[0]
567
-
568
-
569
- def join_continuation(lines):
570
- r"""
571
- Join lines continued by a trailing backslash.
572
-
573
- >>> list(join_continuation(['foo \\', 'bar', 'baz']))
574
- ['foobar', 'baz']
575
- >>> list(join_continuation(['foo \\', 'bar', 'baz']))
576
- ['foobar', 'baz']
577
- >>> list(join_continuation(['foo \\', 'bar \\', 'baz']))
578
- ['foobarbaz']
579
-
580
- Not sure why, but...
581
- The character preceeding the backslash is also elided.
582
-
583
- >>> list(join_continuation(['goo\\', 'dly']))
584
- ['godly']
585
-
586
- A terrible idea, but...
587
- If no line is available to continue, suppress the lines.
588
-
589
- >>> list(join_continuation(['foo', 'bar\\', 'baz\\']))
590
- ['foo']
591
- """
592
- lines = iter(lines)
593
- for item in lines:
594
- while item.endswith('\\'):
595
- try:
596
- item = item[:-2].strip() + next(lines)
597
- except StopIteration:
598
- return
599
- yield item
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/Awiny/Image2Paragraph/models/grit_src/third_party/CenterNet2/detectron2/modeling/roi_heads/fast_rcnn.py DELETED
@@ -1,462 +0,0 @@
1
- # Copyright (c) Facebook, Inc. and its affiliates.
2
- import logging
3
- from typing import Dict, List, Tuple, Union
4
- import torch
5
- from torch import nn
6
- from torch.nn import functional as F
7
-
8
- from detectron2.config import configurable
9
- from detectron2.layers import ShapeSpec, batched_nms, cat, cross_entropy, nonzero_tuple
10
- from detectron2.modeling.box_regression import Box2BoxTransform, _dense_box_regression_loss
11
- from detectron2.structures import Boxes, Instances
12
- from detectron2.utils.events import get_event_storage
13
-
14
- __all__ = ["fast_rcnn_inference", "FastRCNNOutputLayers"]
15
-
16
-
17
- logger = logging.getLogger(__name__)
18
-
19
- """
20
- Shape shorthand in this module:
21
-
22
- N: number of images in the minibatch
23
- R: number of ROIs, combined over all images, in the minibatch
24
- Ri: number of ROIs in image i
25
- K: number of foreground classes. E.g.,there are 80 foreground classes in COCO.
26
-
27
- Naming convention:
28
-
29
- deltas: refers to the 4-d (dx, dy, dw, dh) deltas that parameterize the box2box
30
- transform (see :class:`box_regression.Box2BoxTransform`).
31
-
32
- pred_class_logits: predicted class scores in [-inf, +inf]; use
33
- softmax(pred_class_logits) to estimate P(class).
34
-
35
- gt_classes: ground-truth classification labels in [0, K], where [0, K) represent
36
- foreground object classes and K represents the background class.
37
-
38
- pred_proposal_deltas: predicted box2box transform deltas for transforming proposals
39
- to detection box predictions.
40
-
41
- gt_proposal_deltas: ground-truth box2box transform deltas
42
- """
43
-
44
-
45
- def fast_rcnn_inference(
46
- boxes: List[torch.Tensor],
47
- scores: List[torch.Tensor],
48
- image_shapes: List[Tuple[int, int]],
49
- score_thresh: float,
50
- nms_thresh: float,
51
- topk_per_image: int,
52
- ):
53
- """
54
- Call `fast_rcnn_inference_single_image` for all images.
55
-
56
- Args:
57
- boxes (list[Tensor]): A list of Tensors of predicted class-specific or class-agnostic
58
- boxes for each image. Element i has shape (Ri, K * 4) if doing
59
- class-specific regression, or (Ri, 4) if doing class-agnostic
60
- regression, where Ri is the number of predicted objects for image i.
61
- This is compatible with the output of :meth:`FastRCNNOutputLayers.predict_boxes`.
62
- scores (list[Tensor]): A list of Tensors of predicted class scores for each image.
63
- Element i has shape (Ri, K + 1), where Ri is the number of predicted objects
64
- for image i. Compatible with the output of :meth:`FastRCNNOutputLayers.predict_probs`.
65
- image_shapes (list[tuple]): A list of (width, height) tuples for each image in the batch.
66
- score_thresh (float): Only return detections with a confidence score exceeding this
67
- threshold.
68
- nms_thresh (float): The threshold to use for box non-maximum suppression. Value in [0, 1].
69
- topk_per_image (int): The number of top scoring detections to return. Set < 0 to return
70
- all detections.
71
-
72
- Returns:
73
- instances: (list[Instances]): A list of N instances, one for each image in the batch,
74
- that stores the topk most confidence detections.
75
- kept_indices: (list[Tensor]): A list of 1D tensor of length of N, each element indicates
76
- the corresponding boxes/scores index in [0, Ri) from the input, for image i.
77
- """
78
- result_per_image = [
79
- fast_rcnn_inference_single_image(
80
- boxes_per_image, scores_per_image, image_shape, score_thresh, nms_thresh, topk_per_image
81
- )
82
- for scores_per_image, boxes_per_image, image_shape in zip(scores, boxes, image_shapes)
83
- ]
84
- return [x[0] for x in result_per_image], [x[1] for x in result_per_image]
85
-
86
-
87
- def _log_classification_stats(pred_logits, gt_classes, prefix="fast_rcnn"):
88
- """
89
- Log the classification metrics to EventStorage.
90
-
91
- Args:
92
- pred_logits: Rx(K+1) logits. The last column is for background class.
93
- gt_classes: R labels
94
- """
95
- num_instances = gt_classes.numel()
96
- if num_instances == 0:
97
- return
98
- pred_classes = pred_logits.argmax(dim=1)
99
- bg_class_ind = pred_logits.shape[1] - 1
100
-
101
- fg_inds = (gt_classes >= 0) & (gt_classes < bg_class_ind)
102
- num_fg = fg_inds.nonzero().numel()
103
- fg_gt_classes = gt_classes[fg_inds]
104
- fg_pred_classes = pred_classes[fg_inds]
105
-
106
- num_false_negative = (fg_pred_classes == bg_class_ind).nonzero().numel()
107
- num_accurate = (pred_classes == gt_classes).nonzero().numel()
108
- fg_num_accurate = (fg_pred_classes == fg_gt_classes).nonzero().numel()
109
-
110
- storage = get_event_storage()
111
- storage.put_scalar(f"{prefix}/cls_accuracy", num_accurate / num_instances)
112
- if num_fg > 0:
113
- storage.put_scalar(f"{prefix}/fg_cls_accuracy", fg_num_accurate / num_fg)
114
- storage.put_scalar(f"{prefix}/false_negative", num_false_negative / num_fg)
115
-
116
-
117
- def fast_rcnn_inference_single_image(
118
- boxes,
119
- scores,
120
- image_shape: Tuple[int, int],
121
- score_thresh: float,
122
- nms_thresh: float,
123
- topk_per_image: int,
124
- ):
125
- """
126
- Single-image inference. Return bounding-box detection results by thresholding
127
- on scores and applying non-maximum suppression (NMS).
128
-
129
- Args:
130
- Same as `fast_rcnn_inference`, but with boxes, scores, and image shapes
131
- per image.
132
-
133
- Returns:
134
- Same as `fast_rcnn_inference`, but for only one image.
135
- """
136
- valid_mask = torch.isfinite(boxes).all(dim=1) & torch.isfinite(scores).all(dim=1)
137
- if not valid_mask.all():
138
- boxes = boxes[valid_mask]
139
- scores = scores[valid_mask]
140
-
141
- scores = scores[:, :-1]
142
- num_bbox_reg_classes = boxes.shape[1] // 4
143
- # Convert to Boxes to use the `clip` function ...
144
- boxes = Boxes(boxes.reshape(-1, 4))
145
- boxes.clip(image_shape)
146
- boxes = boxes.tensor.view(-1, num_bbox_reg_classes, 4) # R x C x 4
147
-
148
- # 1. Filter results based on detection scores. It can make NMS more efficient
149
- # by filtering out low-confidence detections.
150
- filter_mask = scores > score_thresh # R x K
151
- # R' x 2. First column contains indices of the R predictions;
152
- # Second column contains indices of classes.
153
- filter_inds = filter_mask.nonzero()
154
- if num_bbox_reg_classes == 1:
155
- boxes = boxes[filter_inds[:, 0], 0]
156
- else:
157
- boxes = boxes[filter_mask]
158
- scores = scores[filter_mask]
159
-
160
- # 2. Apply NMS for each class independently.
161
- keep = batched_nms(boxes, scores, filter_inds[:, 1], nms_thresh)
162
- if topk_per_image >= 0:
163
- keep = keep[:topk_per_image]
164
- boxes, scores, filter_inds = boxes[keep], scores[keep], filter_inds[keep]
165
-
166
- result = Instances(image_shape)
167
- result.pred_boxes = Boxes(boxes)
168
- result.scores = scores
169
- result.pred_classes = filter_inds[:, 1]
170
- return result, filter_inds[:, 0]
171
-
172
-
173
- class FastRCNNOutputLayers(nn.Module):
174
- """
175
- Two linear layers for predicting Fast R-CNN outputs:
176
-
177
- 1. proposal-to-detection box regression deltas
178
- 2. classification scores
179
- """
180
-
181
- @configurable
182
- def __init__(
183
- self,
184
- input_shape: ShapeSpec,
185
- *,
186
- box2box_transform,
187
- num_classes: int,
188
- test_score_thresh: float = 0.0,
189
- test_nms_thresh: float = 0.5,
190
- test_topk_per_image: int = 100,
191
- cls_agnostic_bbox_reg: bool = False,
192
- smooth_l1_beta: float = 0.0,
193
- box_reg_loss_type: str = "smooth_l1",
194
- loss_weight: Union[float, Dict[str, float]] = 1.0,
195
- ):
196
- """
197
- NOTE: this interface is experimental.
198
-
199
- Args:
200
- input_shape (ShapeSpec): shape of the input feature to this module
201
- box2box_transform (Box2BoxTransform or Box2BoxTransformRotated):
202
- num_classes (int): number of foreground classes
203
- test_score_thresh (float): threshold to filter predictions results.
204
- test_nms_thresh (float): NMS threshold for prediction results.
205
- test_topk_per_image (int): number of top predictions to produce per image.
206
- cls_agnostic_bbox_reg (bool): whether to use class agnostic for bbox regression
207
- smooth_l1_beta (float): transition point from L1 to L2 loss. Only used if
208
- `box_reg_loss_type` is "smooth_l1"
209
- box_reg_loss_type (str): Box regression loss type. One of: "smooth_l1", "giou",
210
- "diou", "ciou"
211
- loss_weight (float|dict): weights to use for losses. Can be single float for weighting
212
- all losses, or a dict of individual weightings. Valid dict keys are:
213
- * "loss_cls": applied to classification loss
214
- * "loss_box_reg": applied to box regression loss
215
- """
216
- super().__init__()
217
- if isinstance(input_shape, int): # some backward compatibility
218
- input_shape = ShapeSpec(channels=input_shape)
219
- self.num_classes = num_classes
220
- input_size = input_shape.channels * (input_shape.width or 1) * (input_shape.height or 1)
221
- # prediction layer for num_classes foreground classes and one background class (hence + 1)
222
- self.cls_score = nn.Linear(input_size, num_classes + 1)
223
- num_bbox_reg_classes = 1 if cls_agnostic_bbox_reg else num_classes
224
- box_dim = len(box2box_transform.weights)
225
- self.bbox_pred = nn.Linear(input_size, num_bbox_reg_classes * box_dim)
226
-
227
- nn.init.normal_(self.cls_score.weight, std=0.01)
228
- nn.init.normal_(self.bbox_pred.weight, std=0.001)
229
- for l in [self.cls_score, self.bbox_pred]:
230
- nn.init.constant_(l.bias, 0)
231
-
232
- self.box2box_transform = box2box_transform
233
- self.smooth_l1_beta = smooth_l1_beta
234
- self.test_score_thresh = test_score_thresh
235
- self.test_nms_thresh = test_nms_thresh
236
- self.test_topk_per_image = test_topk_per_image
237
- self.box_reg_loss_type = box_reg_loss_type
238
- if isinstance(loss_weight, float):
239
- loss_weight = {"loss_cls": loss_weight, "loss_box_reg": loss_weight}
240
- self.loss_weight = loss_weight
241
-
242
- @classmethod
243
- def from_config(cls, cfg, input_shape):
244
- return {
245
- "input_shape": input_shape,
246
- "box2box_transform": Box2BoxTransform(weights=cfg.MODEL.ROI_BOX_HEAD.BBOX_REG_WEIGHTS),
247
- # fmt: off
248
- "num_classes" : cfg.MODEL.ROI_HEADS.NUM_CLASSES,
249
- "cls_agnostic_bbox_reg" : cfg.MODEL.ROI_BOX_HEAD.CLS_AGNOSTIC_BBOX_REG,
250
- "smooth_l1_beta" : cfg.MODEL.ROI_BOX_HEAD.SMOOTH_L1_BETA,
251
- "test_score_thresh" : cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST,
252
- "test_nms_thresh" : cfg.MODEL.ROI_HEADS.NMS_THRESH_TEST,
253
- "test_topk_per_image" : cfg.TEST.DETECTIONS_PER_IMAGE,
254
- "box_reg_loss_type" : cfg.MODEL.ROI_BOX_HEAD.BBOX_REG_LOSS_TYPE,
255
- "loss_weight" : {"loss_box_reg": cfg.MODEL.ROI_BOX_HEAD.BBOX_REG_LOSS_WEIGHT},
256
- # fmt: on
257
- }
258
-
259
- def forward(self, x):
260
- """
261
- Args:
262
- x: per-region features of shape (N, ...) for N bounding boxes to predict.
263
-
264
- Returns:
265
- (Tensor, Tensor):
266
- First tensor: shape (N,K+1), scores for each of the N box. Each row contains the
267
- scores for K object categories and 1 background class.
268
-
269
- Second tensor: bounding box regression deltas for each box. Shape is shape (N,Kx4),
270
- or (N,4) for class-agnostic regression.
271
- """
272
- if x.dim() > 2:
273
- x = torch.flatten(x, start_dim=1)
274
- scores = self.cls_score(x)
275
- proposal_deltas = self.bbox_pred(x)
276
- return scores, proposal_deltas
277
-
278
- def losses(self, predictions, proposals):
279
- """
280
- Args:
281
- predictions: return values of :meth:`forward()`.
282
- proposals (list[Instances]): proposals that match the features that were used
283
- to compute predictions. The fields ``proposal_boxes``, ``gt_boxes``,
284
- ``gt_classes`` are expected.
285
-
286
- Returns:
287
- Dict[str, Tensor]: dict of losses
288
- """
289
- scores, proposal_deltas = predictions
290
-
291
- # parse classification outputs
292
- gt_classes = (
293
- cat([p.gt_classes for p in proposals], dim=0) if len(proposals) else torch.empty(0)
294
- )
295
- _log_classification_stats(scores, gt_classes)
296
-
297
- # parse box regression outputs
298
- if len(proposals):
299
- proposal_boxes = cat([p.proposal_boxes.tensor for p in proposals], dim=0) # Nx4
300
- assert not proposal_boxes.requires_grad, "Proposals should not require gradients!"
301
- # If "gt_boxes" does not exist, the proposals must be all negative and
302
- # should not be included in regression loss computation.
303
- # Here we just use proposal_boxes as an arbitrary placeholder because its
304
- # value won't be used in self.box_reg_loss().
305
- gt_boxes = cat(
306
- [(p.gt_boxes if p.has("gt_boxes") else p.proposal_boxes).tensor for p in proposals],
307
- dim=0,
308
- )
309
- else:
310
- proposal_boxes = gt_boxes = torch.empty((0, 4), device=proposal_deltas.device)
311
-
312
- losses = {
313
- "loss_cls": cross_entropy(scores, gt_classes, reduction="mean"),
314
- "loss_box_reg": self.box_reg_loss(
315
- proposal_boxes, gt_boxes, proposal_deltas, gt_classes
316
- ),
317
- }
318
- return {k: v * self.loss_weight.get(k, 1.0) for k, v in losses.items()}
319
-
320
- def box_reg_loss(self, proposal_boxes, gt_boxes, pred_deltas, gt_classes):
321
- """
322
- Args:
323
- proposal_boxes/gt_boxes are tensors with the same shape (R, 4 or 5).
324
- pred_deltas has shape (R, 4 or 5), or (R, num_classes * (4 or 5)).
325
- gt_classes is a long tensor of shape R, the gt class label of each proposal.
326
- R shall be the number of proposals.
327
- """
328
- box_dim = proposal_boxes.shape[1] # 4 or 5
329
- # Regression loss is only computed for foreground proposals (those matched to a GT)
330
- fg_inds = nonzero_tuple((gt_classes >= 0) & (gt_classes < self.num_classes))[0]
331
- if pred_deltas.shape[1] == box_dim: # cls-agnostic regression
332
- fg_pred_deltas = pred_deltas[fg_inds]
333
- else:
334
- fg_pred_deltas = pred_deltas.view(-1, self.num_classes, box_dim)[
335
- fg_inds, gt_classes[fg_inds]
336
- ]
337
-
338
- loss_box_reg = _dense_box_regression_loss(
339
- [proposal_boxes[fg_inds]],
340
- self.box2box_transform,
341
- [fg_pred_deltas.unsqueeze(0)],
342
- [gt_boxes[fg_inds]],
343
- ...,
344
- self.box_reg_loss_type,
345
- self.smooth_l1_beta,
346
- )
347
-
348
- # The reg loss is normalized using the total number of regions (R), not the number
349
- # of foreground regions even though the box regression loss is only defined on
350
- # foreground regions. Why? Because doing so gives equal training influence to
351
- # each foreground example. To see how, consider two different minibatches:
352
- # (1) Contains a single foreground region
353
- # (2) Contains 100 foreground regions
354
- # If we normalize by the number of foreground regions, the single example in
355
- # minibatch (1) will be given 100 times as much influence as each foreground
356
- # example in minibatch (2). Normalizing by the total number of regions, R,
357
- # means that the single example in minibatch (1) and each of the 100 examples
358
- # in minibatch (2) are given equal influence.
359
- return loss_box_reg / max(gt_classes.numel(), 1.0) # return 0 if empty
360
-
361
- def inference(self, predictions: Tuple[torch.Tensor, torch.Tensor], proposals: List[Instances]):
362
- """
363
- Args:
364
- predictions: return values of :meth:`forward()`.
365
- proposals (list[Instances]): proposals that match the features that were
366
- used to compute predictions. The ``proposal_boxes`` field is expected.
367
-
368
- Returns:
369
- list[Instances]: same as `fast_rcnn_inference`.
370
- list[Tensor]: same as `fast_rcnn_inference`.
371
- """
372
- boxes = self.predict_boxes(predictions, proposals)
373
- scores = self.predict_probs(predictions, proposals)
374
- image_shapes = [x.image_size for x in proposals]
375
- return fast_rcnn_inference(
376
- boxes,
377
- scores,
378
- image_shapes,
379
- self.test_score_thresh,
380
- self.test_nms_thresh,
381
- self.test_topk_per_image,
382
- )
383
-
384
- def predict_boxes_for_gt_classes(self, predictions, proposals):
385
- """
386
- Args:
387
- predictions: return values of :meth:`forward()`.
388
- proposals (list[Instances]): proposals that match the features that were used
389
- to compute predictions. The fields ``proposal_boxes``, ``gt_classes`` are expected.
390
-
391
- Returns:
392
- list[Tensor]:
393
- A list of Tensors of predicted boxes for GT classes in case of
394
- class-specific box head. Element i of the list has shape (Ri, B), where Ri is
395
- the number of proposals for image i and B is the box dimension (4 or 5)
396
- """
397
- if not len(proposals):
398
- return []
399
- scores, proposal_deltas = predictions
400
- proposal_boxes = cat([p.proposal_boxes.tensor for p in proposals], dim=0)
401
- N, B = proposal_boxes.shape
402
- predict_boxes = self.box2box_transform.apply_deltas(
403
- proposal_deltas, proposal_boxes
404
- ) # Nx(KxB)
405
-
406
- K = predict_boxes.shape[1] // B
407
- if K > 1:
408
- gt_classes = torch.cat([p.gt_classes for p in proposals], dim=0)
409
- # Some proposals are ignored or have a background class. Their gt_classes
410
- # cannot be used as index.
411
- gt_classes = gt_classes.clamp_(0, K - 1)
412
-
413
- predict_boxes = predict_boxes.view(N, K, B)[
414
- torch.arange(N, dtype=torch.long, device=predict_boxes.device), gt_classes
415
- ]
416
- num_prop_per_image = [len(p) for p in proposals]
417
- return predict_boxes.split(num_prop_per_image)
418
-
419
- def predict_boxes(
420
- self, predictions: Tuple[torch.Tensor, torch.Tensor], proposals: List[Instances]
421
- ):
422
- """
423
- Args:
424
- predictions: return values of :meth:`forward()`.
425
- proposals (list[Instances]): proposals that match the features that were
426
- used to compute predictions. The ``proposal_boxes`` field is expected.
427
-
428
- Returns:
429
- list[Tensor]:
430
- A list of Tensors of predicted class-specific or class-agnostic boxes
431
- for each image. Element i has shape (Ri, K * B) or (Ri, B), where Ri is
432
- the number of proposals for image i and B is the box dimension (4 or 5)
433
- """
434
- if not len(proposals):
435
- return []
436
- _, proposal_deltas = predictions
437
- num_prop_per_image = [len(p) for p in proposals]
438
- proposal_boxes = cat([p.proposal_boxes.tensor for p in proposals], dim=0)
439
- predict_boxes = self.box2box_transform.apply_deltas(
440
- proposal_deltas,
441
- proposal_boxes,
442
- ) # Nx(KxB)
443
- return predict_boxes.split(num_prop_per_image)
444
-
445
- def predict_probs(
446
- self, predictions: Tuple[torch.Tensor, torch.Tensor], proposals: List[Instances]
447
- ):
448
- """
449
- Args:
450
- predictions: return values of :meth:`forward()`.
451
- proposals (list[Instances]): proposals that match the features that were
452
- used to compute predictions.
453
-
454
- Returns:
455
- list[Tensor]:
456
- A list of Tensors of predicted class probabilities for each image.
457
- Element i has shape (Ri, K + 1), where Ri is the number of proposals for image i.
458
- """
459
- scores, _ = predictions
460
- num_inst_per_image = [len(p) for p in proposals]
461
- probs = F.softmax(scores, dim=-1)
462
- return probs.split(num_inst_per_image, dim=0)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/BAAI/vid2vid-zero/vid2vid_zero/p2p/seq_aligner.py DELETED
@@ -1,197 +0,0 @@
1
- # Copyright 2022 Google LLC
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
-
15
- import torch
16
- import numpy as np
17
-
18
-
19
- class ScoreParams:
20
-
21
- def __init__(self, gap, match, mismatch):
22
- self.gap = gap
23
- self.match = match
24
- self.mismatch = mismatch
25
-
26
- def mis_match_char(self, x, y):
27
- if x != y:
28
- return self.mismatch
29
- else:
30
- return self.match
31
-
32
-
33
- def get_matrix(size_x, size_y, gap):
34
- matrix = []
35
- for i in range(len(size_x) + 1):
36
- sub_matrix = []
37
- for j in range(len(size_y) + 1):
38
- sub_matrix.append(0)
39
- matrix.append(sub_matrix)
40
- for j in range(1, len(size_y) + 1):
41
- matrix[0][j] = j*gap
42
- for i in range(1, len(size_x) + 1):
43
- matrix[i][0] = i*gap
44
- return matrix
45
-
46
-
47
- def get_matrix(size_x, size_y, gap):
48
- matrix = np.zeros((size_x + 1, size_y + 1), dtype=np.int32)
49
- matrix[0, 1:] = (np.arange(size_y) + 1) * gap
50
- matrix[1:, 0] = (np.arange(size_x) + 1) * gap
51
- return matrix
52
-
53
-
54
- def get_traceback_matrix(size_x, size_y):
55
- matrix = np.zeros((size_x + 1, size_y +1), dtype=np.int32)
56
- matrix[0, 1:] = 1
57
- matrix[1:, 0] = 2
58
- matrix[0, 0] = 4
59
- return matrix
60
-
61
-
62
- def global_align(x, y, score):
63
- matrix = get_matrix(len(x), len(y), score.gap)
64
- trace_back = get_traceback_matrix(len(x), len(y))
65
- for i in range(1, len(x) + 1):
66
- for j in range(1, len(y) + 1):
67
- left = matrix[i, j - 1] + score.gap
68
- up = matrix[i - 1, j] + score.gap
69
- diag = matrix[i - 1, j - 1] + score.mis_match_char(x[i - 1], y[j - 1])
70
- matrix[i, j] = max(left, up, diag)
71
- if matrix[i, j] == left:
72
- trace_back[i, j] = 1
73
- elif matrix[i, j] == up:
74
- trace_back[i, j] = 2
75
- else:
76
- trace_back[i, j] = 3
77
- return matrix, trace_back
78
-
79
-
80
- def get_aligned_sequences(x, y, trace_back):
81
- x_seq = []
82
- y_seq = []
83
- i = len(x)
84
- j = len(y)
85
- mapper_y_to_x = []
86
- while i > 0 or j > 0:
87
- if trace_back[i, j] == 3:
88
- x_seq.append(x[i-1])
89
- y_seq.append(y[j-1])
90
- i = i-1
91
- j = j-1
92
- mapper_y_to_x.append((j, i))
93
- elif trace_back[i][j] == 1:
94
- x_seq.append('-')
95
- y_seq.append(y[j-1])
96
- j = j-1
97
- mapper_y_to_x.append((j, -1))
98
- elif trace_back[i][j] == 2:
99
- x_seq.append(x[i-1])
100
- y_seq.append('-')
101
- i = i-1
102
- elif trace_back[i][j] == 4:
103
- break
104
- mapper_y_to_x.reverse()
105
- return x_seq, y_seq, torch.tensor(mapper_y_to_x, dtype=torch.int64)
106
-
107
-
108
- def get_mapper(x: str, y: str, tokenizer, max_len=77):
109
- x_seq = tokenizer.encode(x)
110
- y_seq = tokenizer.encode(y)
111
- score = ScoreParams(0, 1, -1)
112
- matrix, trace_back = global_align(x_seq, y_seq, score)
113
- mapper_base = get_aligned_sequences(x_seq, y_seq, trace_back)[-1]
114
- alphas = torch.ones(max_len)
115
- alphas[: mapper_base.shape[0]] = mapper_base[:, 1].ne(-1).float()
116
- mapper = torch.zeros(max_len, dtype=torch.int64)
117
- mapper[:mapper_base.shape[0]] = mapper_base[:, 1]
118
- mapper[mapper_base.shape[0]:] = len(y_seq) + torch.arange(max_len - len(y_seq))
119
- return mapper, alphas
120
-
121
-
122
- def get_refinement_mapper(prompts, tokenizer, max_len=77):
123
- x_seq = prompts[0]
124
- mappers, alphas = [], []
125
- for i in range(1, len(prompts)):
126
- mapper, alpha = get_mapper(x_seq, prompts[i], tokenizer, max_len)
127
- mappers.append(mapper)
128
- alphas.append(alpha)
129
- return torch.stack(mappers), torch.stack(alphas)
130
-
131
-
132
- def get_word_inds(text: str, word_place: int, tokenizer):
133
- split_text = text.split(" ")
134
- if type(word_place) is str:
135
- word_place = [i for i, word in enumerate(split_text) if word_place == word]
136
- elif type(word_place) is int:
137
- word_place = [word_place]
138
- out = []
139
- if len(word_place) > 0:
140
- words_encode = [tokenizer.decode([item]).strip("#") for item in tokenizer.encode(text)][1:-1]
141
- cur_len, ptr = 0, 0
142
-
143
- for i in range(len(words_encode)):
144
- cur_len += len(words_encode[i])
145
- if ptr in word_place:
146
- out.append(i + 1)
147
- if cur_len >= len(split_text[ptr]):
148
- ptr += 1
149
- cur_len = 0
150
- return np.array(out)
151
-
152
-
153
- def get_replacement_mapper_(x: str, y: str, tokenizer, max_len=77):
154
- words_x = x.split(' ')
155
- words_y = y.split(' ')
156
- if len(words_x) != len(words_y):
157
- raise ValueError(f"attention replacement edit can only be applied on prompts with the same length"
158
- f" but prompt A has {len(words_x)} words and prompt B has {len(words_y)} words.")
159
- inds_replace = [i for i in range(len(words_y)) if words_y[i] != words_x[i]]
160
- inds_source = [get_word_inds(x, i, tokenizer) for i in inds_replace]
161
- inds_target = [get_word_inds(y, i, tokenizer) for i in inds_replace]
162
- mapper = np.zeros((max_len, max_len))
163
- i = j = 0
164
- cur_inds = 0
165
- while i < max_len and j < max_len:
166
- if cur_inds < len(inds_source) and inds_source[cur_inds][0] == i:
167
- inds_source_, inds_target_ = inds_source[cur_inds], inds_target[cur_inds]
168
- if len(inds_source_) == len(inds_target_):
169
- mapper[inds_source_, inds_target_] = 1
170
- else:
171
- ratio = 1 / len(inds_target_)
172
- for i_t in inds_target_:
173
- mapper[inds_source_, i_t] = ratio
174
- cur_inds += 1
175
- i += len(inds_source_)
176
- j += len(inds_target_)
177
- elif cur_inds < len(inds_source):
178
- mapper[i, j] = 1
179
- i += 1
180
- j += 1
181
- else:
182
- mapper[j, j] = 1
183
- i += 1
184
- j += 1
185
-
186
- return torch.from_numpy(mapper).float()
187
-
188
-
189
-
190
- def get_replacement_mapper(prompts, tokenizer, max_len=77):
191
- x_seq = prompts[0]
192
- mappers = []
193
- for i in range(1, len(prompts)):
194
- mapper = get_replacement_mapper_(x_seq, prompts[i], tokenizer, max_len)
195
- mappers.append(mapper)
196
- return torch.stack(mappers)
197
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/Bala2-03-2003/MygenvioceAI/README.md DELETED
@@ -1,12 +0,0 @@
1
- ---
2
- title: MygenvioceAI
3
- emoji: 🏃
4
- colorFrom: blue
5
- colorTo: purple
6
- sdk: gradio
7
- sdk_version: 3.39.0
8
- app_file: app.py
9
- pinned: false
10
- ---
11
-
12
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/BasToTheMax/tensor/app.py DELETED
@@ -1,28 +0,0 @@
1
- import gradio as gr
2
- from stable_diffusion_tf.stable_diffusion import StableDiffusion
3
- from PIL import Image
4
-
5
- generator = StableDiffusion(
6
- img_height=512,
7
- img_width=512,
8
- jit_compile=True,
9
- )
10
-
11
-
12
- def gen(prompt):
13
- image = generator.generate(
14
- prompt,
15
- num_steps=50,
16
- unconditional_guidance_scale=7.5,
17
- temperature=1,
18
- batch_size=1,
19
- )
20
- return image[0]
21
-
22
- demo = gr.Interface(fn=gen, inputs="text", outputs=gr.Image(type="pil"))
23
-
24
- demo.launch()
25
-
26
- # prompt = "a photo of an astronaut riding a horse on mars"
27
- # image = pipe(prompt).images[0]
28
- # image.save("astronaut_rides_horse.png")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/Benson/text-generation/Examples/Cmo Descargar Mis Monstruos Cantando.md DELETED
@@ -1,79 +0,0 @@
1
- <br />
2
- <h1>Cómo descargar mis monstruos cantando</h1>
3
- <p>¿Te gustan los juegos musicales? ¿Te gusta coleccionar y criar monstruos lindos y divertidos? ¿Quieres crear tu propia isla llena de criaturas cantantes? Si respondiste sí a cualquiera de estas preguntas, entonces definitivamente deberías descargar My Singing Monsters, un juego gratuito para dispositivos Android, iOS y Steam. En este artículo, te mostraremos cómo descargar My Singing Monsters en diferentes dispositivos y plataformas, así como algunos consejos y trucos para aprovechar al máximo tu experiencia con monstruos. </p>
4
- <h2>¿Cómo descargar mis monstruos cantando</h2><br /><p><b><b>Download</b> --->>> <a href="https://bltlly.com/2v6Jrs">https://bltlly.com/2v6Jrs</a></b></p><br /><br />
5
- <h2>Descarga desde Google Play Store</h2>
6
- <p>Si tienes un teléfono o tableta Android, la forma más fácil de descargar My Singing Monsters es desde Google Play Store. Estos son los pasos que debes seguir:</p>
7
- <ol>
8
- <li>Abra la aplicación Play Store en su dispositivo o vaya a <a href="( 1 )">play.google.com</a> en su navegador. </li>
9
- <li>Buscar "Mis monstruos cantando" o usar esto <a href="( 1 )">enlace directo</a>. </li>
10
- <li>Toque en el título de la aplicación y comprobar las calificaciones de estrellas, el número de descargas, y los comentarios para asegurarse de que es confiable y seguro. </li>
11
- <li>Toque en "Instalar" (para aplicaciones gratuitas) o el precio de la aplicación (para aplicaciones de pago) y aceptar los permisos. </li>
12
- <li>Espere a que la aplicación se descargue e instale en su dispositivo. </li>
13
- <li>¡Abre la aplicación y disfruta! </li>
14
- </ol>
15
- <h2>Descarga desde App Store</h2>
16
- <p>Si tienes un iPhone o iPad, puedes descargar My Singing Monsters desde la App Store. Estos son los pasos que debes seguir:</p>
17
- <ol>
18
- <li>Abra la aplicación App Store en su dispositivo o vaya a <a href="( 3 )">apps.apple.com</a> en su navegador. </li>
19
- <li>Buscar "Mis monstruos cantando" o usar esto <a href="( 3 )">enlace directo</a>. </li>
20
- <li>Toque en el título de la aplicación y comprobar las calificaciones de estrellas, el número de descargas, y los comentarios para asegurarse de que es confiable y seguro. </li>
21
- <li>Toque en "Obtener" (para aplicaciones gratuitas) o el precio de la aplicación (para aplicaciones de pago) e ingrese su contraseña de Apple ID o use Touch ID o Face ID.</li>
22
-
23
- <li>¡Abre la aplicación y disfruta! </li>
24
- </ol>
25
- <h2>Descargar desde Steam</h2>
26
- <p>Si tienes un PC o Mac, puedes descargar My Singing Monsters de Steam, una popular plataforma de juegos. Estos son los pasos que debes seguir:</p>
27
- <ol>
28
- <li>Descargue e instale Steam en su computadora desde <a href="( 7 )">store.steampowered.com</a>. </li>
29
- <li>Crea una cuenta de Steam o inicia sesión con la existente. </li>
30
- <li>Buscar "Mis monstruos cantando" o usar esto <a href="( 2 )">enlace directo</a>. </li>
31
- <li>Haga clic en "Jugar juego" (para juegos gratis) o "Añadir al carrito" (para juegos de pago) y siga las instrucciones. </li>
32
- <li>Espere a que el juego se descargue e instale en su computadora. </li>
33
- <li>Lanza Steam y abre el juego desde tu biblioteca. </li>
34
- <li>Disfruta! </li>
35
- </ol>
36
- <h2>Descarga desde otras fuentes</h2>
37
- <h3 <h3>Riesgos y precauciones</h3>
38
- <p>Si bien descargar aplicaciones de las fuentes oficiales suele ser seguro y fácil, es posible que desee descargar My Singing Monsters de otras fuentes por varias razones. Por ejemplo, es posible que tenga un dispositivo antiguo que no sea compatible con la última versión de la aplicación, o que desee acceder a algunas funciones que no están disponibles en su región. Sin embargo, descargar aplicaciones de fuentes desconocidas también puede plantear algunos riesgos, como:</p>
39
- <p></p>
40
- <ul>
41
- <li>Malware: Algunas aplicaciones pueden contener software malicioso que puede dañar su dispositivo o robar su información personal. </li>
42
- <li>Virus: Algunas aplicaciones pueden infectar su dispositivo con virus que pueden dañar sus archivos o ralentizar su rendimiento. </li>
43
- <li>Spyware: Algunas aplicaciones pueden monitorear su actividad o recopilar sus datos sin su consentimiento. </li>
44
- <li>Adware: Algunas aplicaciones pueden mostrar anuncios molestos o inapropiados en su dispositivo. </li>
45
- <li>Estafas: Algunas aplicaciones pueden engañar a pagar por algo que no es lo que esperaba o no vale la pena el precio. </li>
46
- </ul>
47
- <p>Para evitar estos riesgos, siempre debe tener cuidado y precaución al descargar aplicaciones de otras fuentes. Aquí hay algunas precauciones que puede tomar:</p>
48
- <ul>
49
-
50
- <li>Descargar aplicaciones solo desde sitios o plataformas confiables y verificados. </li>
51
- <li>Escanear la aplicación con un antivirus fiable o software anti-malware antes de instalarla. </li>
52
- <li> Lea los permisos y términos de servicio de la aplicación cuidadosamente y solo aceptarlos si está de acuerdo con ellos. </li>
53
- <li>Copia de seguridad de su dispositivo y datos regularmente en caso de que algo salga mal. </li>
54
- </ul>
55
- <h3>Cómo cargar los APK </h3>
56
- <p>Si quieres descargar My Singing Monsters desde una fuente distinta de Google Play Store, tendrás que cargar un archivo APK. APK significa Android Package Kit, y es el formato de archivo que Android utiliza para distribuir e instalar aplicaciones. Sideload significa instalar una aplicación desde una fuente distinta de la oficial. Estos son los pasos que debes seguir para cargar un archivo APK:</p>
57
- <ol>
58
- <li>Encuentra un sitio de buena reputación que ofrece archivos APK para mis monstruos cantando, como <a href="">apkpure.com</a> o <a href="">apkmonk.com</a>. </li>
59
- <li>Descargar el archivo APK a su dispositivo o transferirlo desde su computadora a través de un cable USB o Bluetooth.</li>
60
- <li>Habilita la opción de instalar aplicaciones de fuentes desconocidas en tu dispositivo. Puede hacer esto yendo a Configuración > Seguridad > Fuentes desconocidas y activando. </li>
61
- <li>Localice el archivo APK en su dispositivo utilizando una aplicación de administrador de archivos o la carpeta Descargas. </li>
62
- <li>Toque en el archivo APK y siga las instrucciones para instalarlo. </li>
63
- <li>¡Abre la aplicación y disfruta! </li>
64
- </ol>
65
- <h2>Conclusión</h2>
66
-
67
- <h2>Preguntas frecuentes</h2>
68
- <p>Aquí hay algunas preguntas y respuestas frecuentes sobre la descarga de My Singing Monsters:</p>
69
- <h4>Q: ¿Cuánto espacio ocupa My Singing Monsters en mi dispositivo? </h4>
70
- <p>A: El tamaño de My Singing Monsters varía según el dispositivo y la plataforma, pero suele ser de unos 100 MB. Sin embargo, podría requerir más espacio a medida que avanzas en el juego y desbloqueas más contenido. </p>
71
- <h4>Q: ¿Puedo jugar mis monstruos cantando offline? </h4>
72
- <p>A: No, necesitas una conexión a Internet para jugar a My Singing Monsters, ya que es un juego online que requiere una comunicación constante con los servidores. También necesita una conexión a Internet para acceder a algunas funciones, como interacciones sociales, almacenamiento en la nube y actualizaciones. </p>
73
- <h4>Q: ¿Puedo jugar mis monstruos cantando en múltiples dispositivos? </h4>
74
- <p>A: Sí, puedes jugar My Singing Monsters en varios dispositivos usando la misma cuenta. Solo necesita vincular su cuenta a una dirección de correo electrónico o una cuenta de Facebook y luego iniciar sesión con ella en cualquier dispositivo. También puede sincronizar su progreso entre dispositivos utilizando la función de almacenamiento en la nube. </p>
75
- <h4>Q: ¿Cómo puedo actualizar mis monstruos cantando? </h4>
76
- <p>A: Si has descargado My Singing Monsters de las fuentes oficiales, recibirás notificaciones cuando haya una actualización disponible para la aplicación. A continuación, puede actualizarlo desde la Play Store, la App Store o Steam, dependiendo de su dispositivo y plataforma. Si has descargado My Singing Monsters de otras fuentes, tendrás que comprobar el sitio donde lo conseguiste y descargar la última versión del archivo APK. A continuación, puede instalarlo sobre la aplicación existente sin perder sus datos. </p>
77
- <h4>Q: ¿Cómo puedo contactar a los desarrolladores de My Singing Monsters? </h4> 64aa2da5cf<br />
78
- <br />
79
- <br />
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/Big-Web/MMSD/env/Lib/site-packages/pip/_internal/utils/egg_link.py DELETED
@@ -1,72 +0,0 @@
1
- import os
2
- import re
3
- import sys
4
- from typing import List, Optional
5
-
6
- from pip._internal.locations import site_packages, user_site
7
- from pip._internal.utils.virtualenv import (
8
- running_under_virtualenv,
9
- virtualenv_no_global,
10
- )
11
-
12
- __all__ = [
13
- "egg_link_path_from_sys_path",
14
- "egg_link_path_from_location",
15
- ]
16
-
17
-
18
- def _egg_link_name(raw_name: str) -> str:
19
- """
20
- Convert a Name metadata value to a .egg-link name, by applying
21
- the same substitution as pkg_resources's safe_name function.
22
- Note: we cannot use canonicalize_name because it has a different logic.
23
- """
24
- return re.sub("[^A-Za-z0-9.]+", "-", raw_name) + ".egg-link"
25
-
26
-
27
- def egg_link_path_from_sys_path(raw_name: str) -> Optional[str]:
28
- """
29
- Look for a .egg-link file for project name, by walking sys.path.
30
- """
31
- egg_link_name = _egg_link_name(raw_name)
32
- for path_item in sys.path:
33
- egg_link = os.path.join(path_item, egg_link_name)
34
- if os.path.isfile(egg_link):
35
- return egg_link
36
- return None
37
-
38
-
39
- def egg_link_path_from_location(raw_name: str) -> Optional[str]:
40
- """
41
- Return the path for the .egg-link file if it exists, otherwise, None.
42
-
43
- There's 3 scenarios:
44
- 1) not in a virtualenv
45
- try to find in site.USER_SITE, then site_packages
46
- 2) in a no-global virtualenv
47
- try to find in site_packages
48
- 3) in a yes-global virtualenv
49
- try to find in site_packages, then site.USER_SITE
50
- (don't look in global location)
51
-
52
- For #1 and #3, there could be odd cases, where there's an egg-link in 2
53
- locations.
54
-
55
- This method will just return the first one found.
56
- """
57
- sites: List[str] = []
58
- if running_under_virtualenv():
59
- sites.append(site_packages)
60
- if not virtualenv_no_global() and user_site:
61
- sites.append(user_site)
62
- else:
63
- if user_site:
64
- sites.append(user_site)
65
- sites.append(site_packages)
66
-
67
- egg_link_name = _egg_link_name(raw_name)
68
- for site in sites:
69
- egglink = os.path.join(site, egg_link_name)
70
- if os.path.isfile(egglink):
71
- return egglink
72
- return None
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/CVPR/LIVE/diffvg.cpp DELETED
@@ -1,1792 +0,0 @@
1
- #include "diffvg.h"
2
- #include "aabb.h"
3
- #include "shape.h"
4
- #include "sample_boundary.h"
5
- #include "atomic.h"
6
- #include "cdf.h"
7
- #include "compute_distance.h"
8
- #include "cuda_utils.h"
9
- #include "edge_query.h"
10
- #include "filter.h"
11
- #include "matrix.h"
12
- #include "parallel.h"
13
- #include "pcg.h"
14
- #include "ptr.h"
15
- #include "scene.h"
16
- #include "vector.h"
17
- #include "winding_number.h"
18
- #include "within_distance.h"
19
- #include <cassert>
20
- #include <pybind11/pybind11.h>
21
- #include <pybind11/stl.h>
22
- #include <thrust/execution_policy.h>
23
- #include <thrust/sort.h>
24
-
25
- namespace py = pybind11;
26
-
27
- struct Command {
28
- int shape_group_id;
29
- int shape_id;
30
- int point_id; // Only used by path
31
- };
32
-
33
- DEVICE
34
- bool is_inside(const SceneData &scene_data,
35
- int shape_group_id,
36
- const Vector2f &pt,
37
- EdgeQuery *edge_query) {
38
- const ShapeGroup &shape_group = scene_data.shape_groups[shape_group_id];
39
- // pt is in canvas space, transform it to shape's local space
40
- auto local_pt = xform_pt(shape_group.canvas_to_shape, pt);
41
- const auto &bvh_nodes = scene_data.shape_groups_bvh_nodes[shape_group_id];
42
- const AABB &bbox = bvh_nodes[2 * shape_group.num_shapes - 2].box;
43
- if (!inside(bbox, local_pt)) {
44
- return false;
45
- }
46
- auto winding_number = 0;
47
- // Traverse the shape group BVH
48
- constexpr auto max_bvh_stack_size = 64;
49
- int bvh_stack[max_bvh_stack_size];
50
- auto stack_size = 0;
51
- bvh_stack[stack_size++] = 2 * shape_group.num_shapes - 2;
52
- while (stack_size > 0) {
53
- const BVHNode &node = bvh_nodes[bvh_stack[--stack_size]];
54
- if (node.child1 < 0) {
55
- // leaf
56
- auto shape_id = node.child0;
57
- auto w = compute_winding_number(
58
- scene_data.shapes[shape_id], scene_data.path_bvhs[shape_id], local_pt);
59
- winding_number += w;
60
- if (edge_query != nullptr) {
61
- if (edge_query->shape_group_id == shape_group_id &&
62
- edge_query->shape_id == shape_id) {
63
- if ((shape_group.use_even_odd_rule && abs(w) % 2 == 1) ||
64
- (!shape_group.use_even_odd_rule && w != 0)) {
65
- edge_query->hit = true;
66
- }
67
- }
68
- }
69
- } else {
70
- assert(node.child0 >= 0 && node.child1 >= 0);
71
- const AABB &b0 = bvh_nodes[node.child0].box;
72
- if (inside(b0, local_pt)) {
73
- bvh_stack[stack_size++] = node.child0;
74
- }
75
- const AABB &b1 = bvh_nodes[node.child1].box;
76
- if (inside(b1, local_pt)) {
77
- bvh_stack[stack_size++] = node.child1;
78
- }
79
- assert(stack_size <= max_bvh_stack_size);
80
- }
81
- }
82
- if (shape_group.use_even_odd_rule) {
83
- return abs(winding_number) % 2 == 1;
84
- } else {
85
- return winding_number != 0;
86
- }
87
- }
88
-
89
- DEVICE void accumulate_boundary_gradient(const Shape &shape,
90
- float contrib,
91
- float t,
92
- const Vector2f &normal,
93
- const BoundaryData &boundary_data,
94
- Shape &d_shape,
95
- const Matrix3x3f &shape_to_canvas,
96
- const Vector2f &local_boundary_pt,
97
- Matrix3x3f &d_shape_to_canvas) {
98
- assert(isfinite(contrib));
99
- assert(isfinite(normal));
100
- // According to Reynold transport theorem,
101
- // the Jacobian of the boundary integral is dot(velocity, normal),
102
- // where the velocity depends on the variable being differentiated with.
103
- if (boundary_data.is_stroke) {
104
- auto has_path_thickness = false;
105
- if (shape.type == ShapeType::Path) {
106
- const Path &path = *(const Path *)shape.ptr;
107
- has_path_thickness = path.thickness != nullptr;
108
- }
109
- // differentiate stroke width: velocity is the same as normal
110
- if (has_path_thickness) {
111
- Path *d_p = (Path*)d_shape.ptr;
112
- auto base_point_id = boundary_data.path.base_point_id;
113
- auto point_id = boundary_data.path.point_id;
114
- auto t = boundary_data.path.t;
115
- const Path &path = *(const Path *)shape.ptr;
116
- if (path.num_control_points[base_point_id] == 0) {
117
- // Straight line
118
- auto i0 = point_id;
119
- auto i1 = (point_id + 1) % path.num_points;
120
- // r = r0 + t * (r1 - r0)
121
- atomic_add(&d_p->thickness[i0], (1 - t) * contrib);
122
- atomic_add(&d_p->thickness[i1], ( t) * contrib);
123
- } else if (path.num_control_points[base_point_id] == 1) {
124
- // Quadratic Bezier curve
125
- auto i0 = point_id;
126
- auto i1 = point_id + 1;
127
- auto i2 = (point_id + 2) % path.num_points;
128
- // r = (1-t)^2r0 + 2(1-t)t r1 + t^2 r2
129
- atomic_add(&d_p->thickness[i0], square(1 - t) * contrib);
130
- atomic_add(&d_p->thickness[i1], (2*(1-t)*t) * contrib);
131
- atomic_add(&d_p->thickness[i2], (t*t) * contrib);
132
- } else if (path.num_control_points[base_point_id] == 2) {
133
- auto i0 = point_id;
134
- auto i1 = point_id + 1;
135
- auto i2 = point_id + 2;
136
- auto i3 = (point_id + 3) % path.num_points;
137
- // r = (1-t)^3r0 + 3*(1-t)^2tr1 + 3*(1-t)t^2r2 + t^3r3
138
- atomic_add(&d_p->thickness[i0], cubic(1 - t) * contrib);
139
- atomic_add(&d_p->thickness[i1], 3 * square(1 - t) * t * contrib);
140
- atomic_add(&d_p->thickness[i2], 3 * (1 - t) * t * t * contrib);
141
- atomic_add(&d_p->thickness[i3], t * t * t * contrib);
142
- } else {
143
- assert(false);
144
- }
145
- } else {
146
- atomic_add(&d_shape.stroke_width, contrib);
147
- }
148
- }
149
- switch (shape.type) {
150
- case ShapeType::Circle: {
151
- Circle *d_p = (Circle*)d_shape.ptr;
152
- // velocity for the center is (1, 0) for x and (0, 1) for y
153
- atomic_add(&d_p->center[0], normal * contrib);
154
- // velocity for the radius is the same as the normal
155
- atomic_add(&d_p->radius, contrib);
156
- break;
157
- } case ShapeType::Ellipse: {
158
- Ellipse *d_p = (Ellipse*)d_shape.ptr;
159
- // velocity for the center is (1, 0) for x and (0, 1) for y
160
- atomic_add(&d_p->center[0], normal * contrib);
161
- // velocity for the radius:
162
- // x = center.x + r.x * cos(2pi * t)
163
- // y = center.y + r.y * sin(2pi * t)
164
- // for r.x: (cos(2pi * t), 0)
165
- // for r.y: (0, sin(2pi * t))
166
- atomic_add(&d_p->radius.x, cos(2 * float(M_PI) * t) * normal.x * contrib);
167
- atomic_add(&d_p->radius.y, sin(2 * float(M_PI) * t) * normal.y * contrib);
168
- break;
169
- } case ShapeType::Path: {
170
- Path *d_p = (Path*)d_shape.ptr;
171
- auto base_point_id = boundary_data.path.base_point_id;
172
- auto point_id = boundary_data.path.point_id;
173
- auto t = boundary_data.path.t;
174
- const Path &path = *(const Path *)shape.ptr;
175
- if (path.num_control_points[base_point_id] == 0) {
176
- // Straight line
177
- auto i0 = point_id;
178
- auto i1 = (point_id + 1) % path.num_points;
179
- // pt = p0 + t * (p1 - p0)
180
- // velocity for p0.x: (1 - t, 0)
181
- // p0.y: ( 0, 1 - t)
182
- // p1.x: ( t, 0)
183
- // p1.y: ( 0, t)
184
- atomic_add(&d_p->points[2 * i0 + 0], (1 - t) * normal.x * contrib);
185
- atomic_add(&d_p->points[2 * i0 + 1], (1 - t) * normal.y * contrib);
186
- atomic_add(&d_p->points[2 * i1 + 0], ( t) * normal.x * contrib);
187
- atomic_add(&d_p->points[2 * i1 + 1], ( t) * normal.y * contrib);
188
- } else if (path.num_control_points[base_point_id] == 1) {
189
- // Quadratic Bezier curve
190
- auto i0 = point_id;
191
- auto i1 = point_id + 1;
192
- auto i2 = (point_id + 2) % path.num_points;
193
- // pt = (1-t)^2p0 + 2(1-t)t p1 + t^2 p2
194
- // velocity for p0.x: ((1-t)^2, 0)
195
- // p0.y: ( 0, (1-t)^2)
196
- // p1.x: (2(1-t)t, 0)
197
- // p1.y: ( 0, 2(1-t)t)
198
- // p1.x: ( t^2, 0)
199
- // p1.y: ( 0, t^2)
200
- atomic_add(&d_p->points[2 * i0 + 0], square(1 - t) * normal.x * contrib);
201
- atomic_add(&d_p->points[2 * i0 + 1], square(1 - t) * normal.y * contrib);
202
- atomic_add(&d_p->points[2 * i1 + 0], (2*(1-t)*t) * normal.x * contrib);
203
- atomic_add(&d_p->points[2 * i1 + 1], (2*(1-t)*t) * normal.y * contrib);
204
- atomic_add(&d_p->points[2 * i2 + 0], (t*t) * normal.x * contrib);
205
- atomic_add(&d_p->points[2 * i2 + 1], (t*t) * normal.y * contrib);
206
- } else if (path.num_control_points[base_point_id] == 2) {
207
- auto i0 = point_id;
208
- auto i1 = point_id + 1;
209
- auto i2 = point_id + 2;
210
- auto i3 = (point_id + 3) % path.num_points;
211
- // pt = (1-t)^3p0 + 3*(1-t)^2tp1 + 3*(1-t)t^2p2 + t^3p3
212
- // velocity for p0.x: ( (1-t)^3, 0)
213
- // p0.y: ( 0, (1-t)^3)
214
- // p1.x: (3*(1-t)^2t, 0)
215
- // p1.y: ( 0, 3*(1-t)^2t)
216
- // p2.x: (3*(1-t)t^2, 0)
217
- // p2.y: ( 0, 3*(1-t)t^2)
218
- // p2.x: ( t^3, 0)
219
- // p2.y: ( 0, t^3)
220
- atomic_add(&d_p->points[2 * i0 + 0], cubic(1 - t) * normal.x * contrib);
221
- atomic_add(&d_p->points[2 * i0 + 1], cubic(1 - t) * normal.y * contrib);
222
- atomic_add(&d_p->points[2 * i1 + 0], 3 * square(1 - t) * t * normal.x * contrib);
223
- atomic_add(&d_p->points[2 * i1 + 1], 3 * square(1 - t) * t * normal.y * contrib);
224
- atomic_add(&d_p->points[2 * i2 + 0], 3 * (1 - t) * t * t * normal.x * contrib);
225
- atomic_add(&d_p->points[2 * i2 + 1], 3 * (1 - t) * t * t * normal.y * contrib);
226
- atomic_add(&d_p->points[2 * i3 + 0], t * t * t * normal.x * contrib);
227
- atomic_add(&d_p->points[2 * i3 + 1], t * t * t * normal.y * contrib);
228
- } else {
229
- assert(false);
230
- }
231
- break;
232
- } case ShapeType::Rect: {
233
- Rect *d_p = (Rect*)d_shape.ptr;
234
- // The velocity depends on the position of the boundary
235
- if (normal == Vector2f{-1, 0}) {
236
- // left
237
- // velocity for p_min is (1, 0) for x and (0, 0) for y
238
- atomic_add(&d_p->p_min.x, -contrib);
239
- } else if (normal == Vector2f{1, 0}) {
240
- // right
241
- // velocity for p_max is (1, 0) for x and (0, 0) for y
242
- atomic_add(&d_p->p_max.x, contrib);
243
- } else if (normal == Vector2f{0, -1}) {
244
- // top
245
- // velocity for p_min is (0, 0) for x and (0, 1) for y
246
- atomic_add(&d_p->p_min.y, -contrib);
247
- } else if (normal == Vector2f{0, 1}) {
248
- // bottom
249
- // velocity for p_max is (0, 0) for x and (0, 1) for y
250
- atomic_add(&d_p->p_max.y, contrib);
251
- } else {
252
- // incorrect normal assignment?
253
- assert(false);
254
- }
255
- break;
256
- } default: {
257
- assert(false);
258
- break;
259
- }
260
- }
261
- // for shape_to_canvas we have the following relationship:
262
- // boundary_pt = xform_pt(shape_to_canvas, local_pt)
263
- // the velocity is the derivative of boundary_pt with respect to shape_to_canvas
264
- // we can use reverse-mode AD to compute the dot product of the velocity and the Jacobian
265
- // by passing the normal in d_xform_pt
266
- auto d_shape_to_canvas_ = Matrix3x3f();
267
- auto d_local_boundary_pt = Vector2f{0, 0};
268
- d_xform_pt(shape_to_canvas,
269
- local_boundary_pt,
270
- normal * contrib,
271
- d_shape_to_canvas_,
272
- d_local_boundary_pt);
273
- atomic_add(&d_shape_to_canvas(0, 0), d_shape_to_canvas_);
274
- }
275
-
276
- DEVICE
277
- Vector4f sample_color(const ColorType &color_type,
278
- void *color,
279
- const Vector2f &pt) {
280
- switch (color_type) {
281
- case ColorType::Constant: {
282
- auto c = (const Constant*)color;
283
- assert(isfinite(c->color));
284
- return c->color;
285
- } case ColorType::LinearGradient: {
286
- auto c = (const LinearGradient*)color;
287
- // Project pt to (c->begin, c->end)
288
- auto beg = c->begin;
289
- auto end = c->end;
290
- auto t = dot(pt - beg, end - beg) / max(dot(end - beg, end - beg), 1e-3f);
291
- // Find the correponding stop:
292
- if (t < c->stop_offsets[0]) {
293
- return Vector4f{c->stop_colors[0],
294
- c->stop_colors[1],
295
- c->stop_colors[2],
296
- c->stop_colors[3]};
297
- }
298
- for (int i = 0; i < c->num_stops - 1; i++) {
299
- auto offset_curr = c->stop_offsets[i];
300
- auto offset_next = c->stop_offsets[i + 1];
301
- assert(offset_next > offset_curr);
302
- if (t >= offset_curr && t < offset_next) {
303
- auto color_curr = Vector4f{
304
- c->stop_colors[4 * i + 0],
305
- c->stop_colors[4 * i + 1],
306
- c->stop_colors[4 * i + 2],
307
- c->stop_colors[4 * i + 3]};
308
- auto color_next = Vector4f{
309
- c->stop_colors[4 * (i + 1) + 0],
310
- c->stop_colors[4 * (i + 1) + 1],
311
- c->stop_colors[4 * (i + 1) + 2],
312
- c->stop_colors[4 * (i + 1) + 3]};
313
- auto tt = (t - offset_curr) / (offset_next - offset_curr);
314
- assert(isfinite(tt));
315
- assert(isfinite(color_curr));
316
- assert(isfinite(color_next));
317
- return color_curr * (1 - tt) + color_next * tt;
318
- }
319
- }
320
- return Vector4f{c->stop_colors[4 * (c->num_stops - 1) + 0],
321
- c->stop_colors[4 * (c->num_stops - 1) + 1],
322
- c->stop_colors[4 * (c->num_stops - 1) + 2],
323
- c->stop_colors[4 * (c->num_stops - 1) + 3]};
324
- } case ColorType::RadialGradient: {
325
- auto c = (const RadialGradient*)color;
326
- // Distance from pt to center
327
- auto offset = pt - c->center;
328
- auto normalized_offset = offset / c->radius;
329
- auto t = length(normalized_offset);
330
- // Find the correponding stop:
331
- if (t < c->stop_offsets[0]) {
332
- return Vector4f{c->stop_colors[0],
333
- c->stop_colors[1],
334
- c->stop_colors[2],
335
- c->stop_colors[3]};
336
- }
337
- for (int i = 0; i < c->num_stops - 1; i++) {
338
- auto offset_curr = c->stop_offsets[i];
339
- auto offset_next = c->stop_offsets[i + 1];
340
- assert(offset_next > offset_curr);
341
- if (t >= offset_curr && t < offset_next) {
342
- auto color_curr = Vector4f{
343
- c->stop_colors[4 * i + 0],
344
- c->stop_colors[4 * i + 1],
345
- c->stop_colors[4 * i + 2],
346
- c->stop_colors[4 * i + 3]};
347
- auto color_next = Vector4f{
348
- c->stop_colors[4 * (i + 1) + 0],
349
- c->stop_colors[4 * (i + 1) + 1],
350
- c->stop_colors[4 * (i + 1) + 2],
351
- c->stop_colors[4 * (i + 1) + 3]};
352
- auto tt = (t - offset_curr) / (offset_next - offset_curr);
353
- assert(isfinite(tt));
354
- assert(isfinite(color_curr));
355
- assert(isfinite(color_next));
356
- return color_curr * (1 - tt) + color_next * tt;
357
- }
358
- }
359
- return Vector4f{c->stop_colors[4 * (c->num_stops - 1) + 0],
360
- c->stop_colors[4 * (c->num_stops - 1) + 1],
361
- c->stop_colors[4 * (c->num_stops - 1) + 2],
362
- c->stop_colors[4 * (c->num_stops - 1) + 3]};
363
- } default: {
364
- assert(false);
365
- }
366
- }
367
- return Vector4f{};
368
- }
369
-
370
- DEVICE
371
- void d_sample_color(const ColorType &color_type,
372
- void *color_ptr,
373
- const Vector2f &pt,
374
- const Vector4f &d_color,
375
- void *d_color_ptr,
376
- float *d_translation) {
377
- switch (color_type) {
378
- case ColorType::Constant: {
379
- auto d_c = (Constant*)d_color_ptr;
380
- atomic_add(&d_c->color[0], d_color);
381
- return;
382
- } case ColorType::LinearGradient: {
383
- auto c = (const LinearGradient*)color_ptr;
384
- auto d_c = (LinearGradient*)d_color_ptr;
385
- // Project pt to (c->begin, c->end)
386
- auto beg = c->begin;
387
- auto end = c->end;
388
- auto t = dot(pt - beg, end - beg) / max(dot(end - beg, end - beg), 1e-3f);
389
- // Find the correponding stop:
390
- if (t < c->stop_offsets[0]) {
391
- atomic_add(&d_c->stop_colors[0], d_color);
392
- return;
393
- }
394
- for (int i = 0; i < c->num_stops - 1; i++) {
395
- auto offset_curr = c->stop_offsets[i];
396
- auto offset_next = c->stop_offsets[i + 1];
397
- assert(offset_next > offset_curr);
398
- if (t >= offset_curr && t < offset_next) {
399
- auto color_curr = Vector4f{
400
- c->stop_colors[4 * i + 0],
401
- c->stop_colors[4 * i + 1],
402
- c->stop_colors[4 * i + 2],
403
- c->stop_colors[4 * i + 3]};
404
- auto color_next = Vector4f{
405
- c->stop_colors[4 * (i + 1) + 0],
406
- c->stop_colors[4 * (i + 1) + 1],
407
- c->stop_colors[4 * (i + 1) + 2],
408
- c->stop_colors[4 * (i + 1) + 3]};
409
- auto tt = (t - offset_curr) / (offset_next - offset_curr);
410
- // return color_curr * (1 - tt) + color_next * tt;
411
- auto d_color_curr = d_color * (1 - tt);
412
- auto d_color_next = d_color * tt;
413
- auto d_tt = sum(d_color * (color_next - color_curr));
414
- auto d_offset_next = -d_tt * tt / (offset_next - offset_curr);
415
- auto d_offset_curr = d_tt * ((tt - 1.f) / (offset_next - offset_curr));
416
- auto d_t = d_tt / (offset_next - offset_curr);
417
- assert(isfinite(d_tt));
418
- atomic_add(&d_c->stop_colors[4 * i], d_color_curr);
419
- atomic_add(&d_c->stop_colors[4 * (i + 1)], d_color_next);
420
- atomic_add(&d_c->stop_offsets[i], d_offset_curr);
421
- atomic_add(&d_c->stop_offsets[i + 1], d_offset_next);
422
- // auto t = dot(pt - beg, end - beg) / max(dot(end - beg, end - beg), 1e-6f);
423
- // l = max(dot(end - beg, end - beg), 1e-3f)
424
- // t = dot(pt - beg, end - beg) / l;
425
- auto l = max(dot(end - beg, end - beg), 1e-3f);
426
- auto d_beg = d_t * (-(pt - beg)-(end - beg)) / l;
427
- auto d_end = d_t * (pt - beg) / l;
428
- auto d_l = -d_t * t / l;
429
- if (dot(end - beg, end - beg) > 1e-3f) {
430
- d_beg += 2 * d_l * (beg - end);
431
- d_end += 2 * d_l * (end - beg);
432
- }
433
- atomic_add(&d_c->begin[0], d_beg);
434
- atomic_add(&d_c->end[0], d_end);
435
- if (d_translation != nullptr) {
436
- atomic_add(d_translation, (d_beg + d_end));
437
- }
438
- return;
439
- }
440
- }
441
- atomic_add(&d_c->stop_colors[4 * (c->num_stops - 1)], d_color);
442
- return;
443
- } case ColorType::RadialGradient: {
444
- auto c = (const RadialGradient*)color_ptr;
445
- auto d_c = (RadialGradient*)d_color_ptr;
446
- // Distance from pt to center
447
- auto offset = pt - c->center;
448
- auto normalized_offset = offset / c->radius;
449
- auto t = length(normalized_offset);
450
- // Find the correponding stop:
451
- if (t < c->stop_offsets[0]) {
452
- atomic_add(&d_c->stop_colors[0], d_color);
453
- return;
454
- }
455
- for (int i = 0; i < c->num_stops - 1; i++) {
456
- auto offset_curr = c->stop_offsets[i];
457
- auto offset_next = c->stop_offsets[i + 1];
458
- assert(offset_next > offset_curr);
459
- if (t >= offset_curr && t < offset_next) {
460
- auto color_curr = Vector4f{
461
- c->stop_colors[4 * i + 0],
462
- c->stop_colors[4 * i + 1],
463
- c->stop_colors[4 * i + 2],
464
- c->stop_colors[4 * i + 3]};
465
- auto color_next = Vector4f{
466
- c->stop_colors[4 * (i + 1) + 0],
467
- c->stop_colors[4 * (i + 1) + 1],
468
- c->stop_colors[4 * (i + 1) + 2],
469
- c->stop_colors[4 * (i + 1) + 3]};
470
- auto tt = (t - offset_curr) / (offset_next - offset_curr);
471
- assert(isfinite(tt));
472
- // return color_curr * (1 - tt) + color_next * tt;
473
- auto d_color_curr = d_color * (1 - tt);
474
- auto d_color_next = d_color * tt;
475
- auto d_tt = sum(d_color * (color_next - color_curr));
476
- auto d_offset_next = -d_tt * tt / (offset_next - offset_curr);
477
- auto d_offset_curr = d_tt * ((tt - 1.f) / (offset_next - offset_curr));
478
- auto d_t = d_tt / (offset_next - offset_curr);
479
- assert(isfinite(d_t));
480
- atomic_add(&d_c->stop_colors[4 * i], d_color_curr);
481
- atomic_add(&d_c->stop_colors[4 * (i + 1)], d_color_next);
482
- atomic_add(&d_c->stop_offsets[i], d_offset_curr);
483
- atomic_add(&d_c->stop_offsets[i + 1], d_offset_next);
484
- // offset = pt - c->center
485
- // normalized_offset = offset / c->radius
486
- // t = length(normalized_offset)
487
- auto d_normalized_offset = d_length(normalized_offset, d_t);
488
- auto d_offset = d_normalized_offset / c->radius;
489
- auto d_radius = -d_normalized_offset * offset / (c->radius * c->radius);
490
- auto d_center = -d_offset;
491
- atomic_add(&d_c->center[0], d_center);
492
- atomic_add(&d_c->radius[0], d_radius);
493
- if (d_translation != nullptr) {
494
- atomic_add(d_translation, d_center);
495
- }
496
- }
497
- }
498
- atomic_add(&d_c->stop_colors[4 * (c->num_stops - 1)], d_color);
499
- return;
500
- } default: {
501
- assert(false);
502
- }
503
- }
504
- }
505
-
506
- struct Fragment {
507
- Vector3f color;
508
- float alpha;
509
- int group_id;
510
- bool is_stroke;
511
- };
512
-
513
- struct PrefilterFragment {
514
- Vector3f color;
515
- float alpha;
516
- int group_id;
517
- bool is_stroke;
518
- int shape_id;
519
- float distance;
520
- Vector2f closest_pt;
521
- ClosestPointPathInfo path_info;
522
- bool within_distance;
523
- };
524
-
525
- DEVICE
526
- Vector4f sample_color(const SceneData &scene,
527
- const Vector4f *background_color,
528
- const Vector2f &screen_pt,
529
- const Vector4f *d_color = nullptr,
530
- EdgeQuery *edge_query = nullptr,
531
- Vector4f *d_background_color = nullptr,
532
- float *d_translation = nullptr) {
533
- if (edge_query != nullptr) {
534
- edge_query->hit = false;
535
- }
536
-
537
- // screen_pt is in screen space ([0, 1), [0, 1)),
538
- // need to transform to canvas space
539
- auto pt = screen_pt;
540
- pt.x *= scene.canvas_width;
541
- pt.y *= scene.canvas_height;
542
- constexpr auto max_hit_shapes = 256;
543
- constexpr auto max_bvh_stack_size = 64;
544
- Fragment fragments[max_hit_shapes];
545
- int bvh_stack[max_bvh_stack_size];
546
- auto stack_size = 0;
547
- auto num_fragments = 0;
548
- bvh_stack[stack_size++] = 2 * scene.num_shape_groups - 2;
549
- while (stack_size > 0) {
550
- const BVHNode &node = scene.bvh_nodes[bvh_stack[--stack_size]];
551
- if (node.child1 < 0) {
552
- // leaf
553
- auto group_id = node.child0;
554
- const ShapeGroup &shape_group = scene.shape_groups[group_id];
555
- if (shape_group.stroke_color != nullptr) {
556
- if (within_distance(scene, group_id, pt, edge_query)) {
557
- auto color_alpha = sample_color(shape_group.stroke_color_type,
558
- shape_group.stroke_color,
559
- pt);
560
- Fragment f;
561
- f.color = Vector3f{color_alpha[0], color_alpha[1], color_alpha[2]};
562
- f.alpha = color_alpha[3];
563
- f.group_id = group_id;
564
- f.is_stroke = true;
565
- assert(num_fragments < max_hit_shapes);
566
- fragments[num_fragments++] = f;
567
- }
568
- }
569
- if (shape_group.fill_color != nullptr) {
570
- if (is_inside(scene, group_id, pt, edge_query)) {
571
- auto color_alpha = sample_color(shape_group.fill_color_type,
572
- shape_group.fill_color,
573
- pt);
574
- Fragment f;
575
- f.color = Vector3f{color_alpha[0], color_alpha[1], color_alpha[2]};
576
- f.alpha = color_alpha[3];
577
- f.group_id = group_id;
578
- f.is_stroke = false;
579
- assert(num_fragments < max_hit_shapes);
580
- fragments[num_fragments++] = f;
581
- }
582
- }
583
- } else {
584
- assert(node.child0 >= 0 && node.child1 >= 0);
585
- const AABB &b0 = scene.bvh_nodes[node.child0].box;
586
- if (inside(b0, pt, scene.bvh_nodes[node.child0].max_radius)) {
587
- bvh_stack[stack_size++] = node.child0;
588
- }
589
- const AABB &b1 = scene.bvh_nodes[node.child1].box;
590
- if (inside(b1, pt, scene.bvh_nodes[node.child1].max_radius)) {
591
- bvh_stack[stack_size++] = node.child1;
592
- }
593
- assert(stack_size <= max_bvh_stack_size);
594
- }
595
- }
596
- if (num_fragments <= 0) {
597
- if (background_color != nullptr) {
598
- if (d_background_color != nullptr) {
599
- *d_background_color = *d_color;
600
- }
601
- return *background_color;
602
- }
603
- return Vector4f{0, 0, 0, 0};
604
- }
605
- // Sort the fragments from back to front (i.e. increasing order of group id)
606
- // https://github.com/frigaut/yorick-imutil/blob/master/insort.c#L37
607
- for (int i = 1; i < num_fragments; i++) {
608
- auto j = i;
609
- auto temp = fragments[j];
610
- while (j > 0 && fragments[j - 1].group_id > temp.group_id) {
611
- fragments[j] = fragments[j - 1];
612
- j--;
613
- }
614
- fragments[j] = temp;
615
- }
616
- // Blend the color
617
- Vector3f accum_color[max_hit_shapes];
618
- float accum_alpha[max_hit_shapes];
619
- // auto hit_opaque = false;
620
- auto first_alpha = 0.f;
621
- auto first_color = Vector3f{0, 0, 0};
622
- if (background_color != nullptr) {
623
- first_alpha = background_color->w;
624
- first_color = Vector3f{background_color->x,
625
- background_color->y,
626
- background_color->z};
627
- }
628
- for (int i = 0; i < num_fragments; i++) {
629
- const Fragment &fragment = fragments[i];
630
- auto new_color = fragment.color;
631
- auto new_alpha = fragment.alpha;
632
- auto prev_alpha = i > 0 ? accum_alpha[i - 1] : first_alpha;
633
- auto prev_color = i > 0 ? accum_color[i - 1] : first_color;
634
- if (edge_query != nullptr) {
635
- // Do we hit the target shape?
636
- if (new_alpha >= 1.f && edge_query->hit) {
637
- // A fully opaque shape in front of the target occludes it
638
- edge_query->hit = false;
639
- }
640
- if (edge_query->shape_group_id == fragment.group_id) {
641
- edge_query->hit = true;
642
- }
643
- }
644
- // prev_color is alpha premultiplied, don't need to multiply with
645
- // prev_alpha
646
- accum_color[i] = prev_color * (1 - new_alpha) + new_alpha * new_color;
647
- accum_alpha[i] = prev_alpha * (1 - new_alpha) + new_alpha;
648
- }
649
- auto final_color = accum_color[num_fragments - 1];
650
- auto final_alpha = accum_alpha[num_fragments - 1];
651
- if (final_alpha > 1e-6f) {
652
- final_color /= final_alpha;
653
- }
654
- assert(isfinite(final_color));
655
- assert(isfinite(final_alpha));
656
- if (d_color != nullptr) {
657
- // Backward pass
658
- auto d_final_color = Vector3f{(*d_color)[0], (*d_color)[1], (*d_color)[2]};
659
- auto d_final_alpha = (*d_color)[3];
660
- auto d_curr_color = d_final_color;
661
- auto d_curr_alpha = d_final_alpha;
662
- if (final_alpha > 1e-6f) {
663
- // final_color = curr_color / final_alpha
664
- d_curr_color = d_final_color / final_alpha;
665
- d_curr_alpha -= sum(d_final_color * final_color) / final_alpha;
666
- }
667
- assert(isfinite(*d_color));
668
- assert(isfinite(d_curr_color));
669
- assert(isfinite(d_curr_alpha));
670
- for (int i = num_fragments - 1; i >= 0; i--) {
671
- // color[n] = prev_color * (1 - new_alpha) + new_alpha * new_color;
672
- // alpha[n] = prev_alpha * (1 - new_alpha) + new_alpha;
673
- auto prev_alpha = i > 0 ? accum_alpha[i - 1] : first_alpha;
674
- auto prev_color = i > 0 ? accum_color[i - 1] : first_color;
675
- auto d_prev_alpha = d_curr_alpha * (1.f - fragments[i].alpha);
676
- auto d_alpha_i = d_curr_alpha * (1.f - prev_alpha);
677
- d_alpha_i += sum(d_curr_color * (fragments[i].color - prev_color));
678
- auto d_prev_color = d_curr_color * (1 - fragments[i].alpha);
679
- auto d_color_i = d_curr_color * fragments[i].alpha;
680
- auto group_id = fragments[i].group_id;
681
- if (fragments[i].is_stroke) {
682
- d_sample_color(scene.shape_groups[group_id].stroke_color_type,
683
- scene.shape_groups[group_id].stroke_color,
684
- pt,
685
- Vector4f{d_color_i[0], d_color_i[1], d_color_i[2], d_alpha_i},
686
- scene.d_shape_groups[group_id].stroke_color,
687
- d_translation);
688
- } else {
689
- d_sample_color(scene.shape_groups[group_id].fill_color_type,
690
- scene.shape_groups[group_id].fill_color,
691
- pt,
692
- Vector4f{d_color_i[0], d_color_i[1], d_color_i[2], d_alpha_i},
693
- scene.d_shape_groups[group_id].fill_color,
694
- d_translation);
695
- }
696
- d_curr_color = d_prev_color;
697
- d_curr_alpha = d_prev_alpha;
698
- }
699
- if (d_background_color != nullptr) {
700
- d_background_color->x += d_curr_color.x;
701
- d_background_color->y += d_curr_color.y;
702
- d_background_color->z += d_curr_color.z;
703
- d_background_color->w += d_curr_alpha;
704
- }
705
- }
706
- return Vector4f{final_color[0], final_color[1], final_color[2], final_alpha};
707
- }
708
-
709
- DEVICE
710
- float sample_distance(const SceneData &scene,
711
- const Vector2f &screen_pt,
712
- float weight,
713
- const float *d_dist = nullptr,
714
- float *d_translation = nullptr) {
715
- // screen_pt is in screen space ([0, 1), [0, 1)),
716
- // need to transform to canvas space
717
- auto pt = screen_pt;
718
- pt.x *= scene.canvas_width;
719
- pt.y *= scene.canvas_height;
720
- // for each shape
721
- auto min_group_id = -1;
722
- auto min_distance = 0.f;
723
- auto min_shape_id = -1;
724
- auto closest_pt = Vector2f{0, 0};
725
- auto min_path_info = ClosestPointPathInfo{-1, -1, 0};
726
- for (int group_id = scene.num_shape_groups - 1; group_id >= 0; group_id--) {
727
- auto s = -1;
728
- auto p = Vector2f{0, 0};
729
- ClosestPointPathInfo local_path_info;
730
- auto d = infinity<float>();
731
- if (compute_distance(scene, group_id, pt, infinity<float>(), &s, &p, &local_path_info, &d)) {
732
- if (min_group_id == -1 || d < min_distance) {
733
- min_distance = d;
734
- min_group_id = group_id;
735
- min_shape_id = s;
736
- closest_pt = p;
737
- min_path_info = local_path_info;
738
- }
739
- }
740
- }
741
- if (min_group_id == -1) {
742
- return min_distance;
743
- }
744
- min_distance *= weight;
745
- auto inside = false;
746
- const ShapeGroup &shape_group = scene.shape_groups[min_group_id];
747
- if (shape_group.fill_color != nullptr) {
748
- inside = is_inside(scene,
749
- min_group_id,
750
- pt,
751
- nullptr);
752
- if (inside) {
753
- min_distance = -min_distance;
754
- }
755
- }
756
- assert((min_group_id >= 0 && min_shape_id >= 0) || scene.num_shape_groups == 0);
757
- if (d_dist != nullptr) {
758
- auto d_abs_dist = inside ? -(*d_dist) : (*d_dist);
759
- const ShapeGroup &shape_group = scene.shape_groups[min_group_id];
760
- const Shape &shape = scene.shapes[min_shape_id];
761
- ShapeGroup &d_shape_group = scene.d_shape_groups[min_group_id];
762
- Shape &d_shape = scene.d_shapes[min_shape_id];
763
- d_compute_distance(shape_group.canvas_to_shape,
764
- shape_group.shape_to_canvas,
765
- shape,
766
- pt,
767
- closest_pt,
768
- min_path_info,
769
- d_abs_dist,
770
- d_shape_group.shape_to_canvas,
771
- d_shape,
772
- d_translation);
773
- }
774
- return min_distance;
775
- }
776
-
777
- // Gather d_color from d_image inside the filter kernel, normalize by
778
- // weight_image.
779
- DEVICE
780
- Vector4f gather_d_color(const Filter &filter,
781
- const float *d_color_image,
782
- const float *weight_image,
783
- int width,
784
- int height,
785
- const Vector2f &pt) {
786
- auto x = int(pt.x);
787
- auto y = int(pt.y);
788
- auto radius = filter.radius;
789
- assert(radius > 0);
790
- auto ri = (int)ceil(radius);
791
- auto d_color = Vector4f{0, 0, 0, 0};
792
- for (int dy = -ri; dy <= ri; dy++) {
793
- for (int dx = -ri; dx <= ri; dx++) {
794
- auto xx = x + dx;
795
- auto yy = y + dy;
796
- if (xx >= 0 && xx < width && yy >= 0 && yy < height) {
797
- auto xc = xx + 0.5f;
798
- auto yc = yy + 0.5f;
799
- auto filter_weight =
800
- compute_filter_weight(filter, xc - pt.x, yc - pt.y);
801
- // pixel = \sum weight * color / \sum weight
802
- auto weight_sum = weight_image[yy * width + xx];
803
- if (weight_sum > 0) {
804
- d_color += (filter_weight / weight_sum) * Vector4f{
805
- d_color_image[4 * (yy * width + xx) + 0],
806
- d_color_image[4 * (yy * width + xx) + 1],
807
- d_color_image[4 * (yy * width + xx) + 2],
808
- d_color_image[4 * (yy * width + xx) + 3],
809
- };
810
- }
811
- }
812
- }
813
- }
814
- return d_color;
815
- }
816
-
817
- DEVICE
818
- float smoothstep(float d) {
819
- auto t = clamp((d + 1.f) / 2.f, 0.f, 1.f);
820
- return t * t * (3 - 2 * t);
821
- }
822
-
823
- DEVICE
824
- float d_smoothstep(float d, float d_ret) {
825
- if (d < -1.f || d > 1.f) {
826
- return 0.f;
827
- }
828
- auto t = (d + 1.f) / 2.f;
829
- // ret = t * t * (3 - 2 * t)
830
- // = 3 * t * t - 2 * t * t * t
831
- auto d_t = d_ret * (6 * t - 6 * t * t);
832
- return d_t / 2.f;
833
- }
834
-
835
- DEVICE
836
- Vector4f sample_color_prefiltered(const SceneData &scene,
837
- const Vector4f *background_color,
838
- const Vector2f &screen_pt,
839
- const Vector4f *d_color = nullptr,
840
- Vector4f *d_background_color = nullptr,
841
- float *d_translation = nullptr) {
842
- // screen_pt is in screen space ([0, 1), [0, 1)),
843
- // need to transform to canvas space
844
- auto pt = screen_pt;
845
- pt.x *= scene.canvas_width;
846
- pt.y *= scene.canvas_height;
847
- constexpr auto max_hit_shapes = 64;
848
- constexpr auto max_bvh_stack_size = 64;
849
- PrefilterFragment fragments[max_hit_shapes];
850
- int bvh_stack[max_bvh_stack_size];
851
- auto stack_size = 0;
852
- auto num_fragments = 0;
853
- bvh_stack[stack_size++] = 2 * scene.num_shape_groups - 2;
854
- while (stack_size > 0) {
855
- const BVHNode &node = scene.bvh_nodes[bvh_stack[--stack_size]];
856
- if (node.child1 < 0) {
857
- // leaf
858
- auto group_id = node.child0;
859
- const ShapeGroup &shape_group = scene.shape_groups[group_id];
860
- if (shape_group.stroke_color != nullptr) {
861
- auto min_shape_id = -1;
862
- auto closest_pt = Vector2f{0, 0};
863
- auto local_path_info = ClosestPointPathInfo{-1, -1, 0};
864
- auto d = infinity<float>();
865
- compute_distance(scene, group_id, pt, infinity<float>(),
866
- &min_shape_id, &closest_pt, &local_path_info, &d);
867
- assert(min_shape_id != -1);
868
- const auto &shape = scene.shapes[min_shape_id];
869
- auto w = smoothstep(fabs(d) + shape.stroke_width) -
870
- smoothstep(fabs(d) - shape.stroke_width);
871
- if (w > 0) {
872
- auto color_alpha = sample_color(shape_group.stroke_color_type,
873
- shape_group.stroke_color,
874
- pt);
875
- color_alpha[3] *= w;
876
-
877
- PrefilterFragment f;
878
- f.color = Vector3f{color_alpha[0], color_alpha[1], color_alpha[2]};
879
- f.alpha = color_alpha[3];
880
- f.group_id = group_id;
881
- f.shape_id = min_shape_id;
882
- f.distance = d;
883
- f.closest_pt = closest_pt;
884
- f.is_stroke = true;
885
- f.path_info = local_path_info;
886
- f.within_distance = true;
887
- assert(num_fragments < max_hit_shapes);
888
- fragments[num_fragments++] = f;
889
- }
890
- }
891
- if (shape_group.fill_color != nullptr) {
892
- auto min_shape_id = -1;
893
- auto closest_pt = Vector2f{0, 0};
894
- auto local_path_info = ClosestPointPathInfo{-1, -1, 0};
895
- auto d = infinity<float>();
896
- auto found = compute_distance(scene,
897
- group_id,
898
- pt,
899
- 1.f,
900
- &min_shape_id,
901
- &closest_pt,
902
- &local_path_info,
903
- &d);
904
- auto inside = is_inside(scene, group_id, pt, nullptr);
905
- if (found || inside) {
906
- if (!inside) {
907
- d = -d;
908
- }
909
- auto w = smoothstep(d);
910
- if (w > 0) {
911
- auto color_alpha = sample_color(shape_group.fill_color_type,
912
- shape_group.fill_color,
913
- pt);
914
- color_alpha[3] *= w;
915
-
916
- PrefilterFragment f;
917
- f.color = Vector3f{color_alpha[0], color_alpha[1], color_alpha[2]};
918
- f.alpha = color_alpha[3];
919
- f.group_id = group_id;
920
- f.shape_id = min_shape_id;
921
- f.distance = d;
922
- f.closest_pt = closest_pt;
923
- f.is_stroke = false;
924
- f.path_info = local_path_info;
925
- f.within_distance = found;
926
- assert(num_fragments < max_hit_shapes);
927
- fragments[num_fragments++] = f;
928
- }
929
- }
930
- }
931
- } else {
932
- assert(node.child0 >= 0 && node.child1 >= 0);
933
- const AABB &b0 = scene.bvh_nodes[node.child0].box;
934
- if (inside(b0, pt, scene.bvh_nodes[node.child0].max_radius)) {
935
- bvh_stack[stack_size++] = node.child0;
936
- }
937
- const AABB &b1 = scene.bvh_nodes[node.child1].box;
938
- if (inside(b1, pt, scene.bvh_nodes[node.child1].max_radius)) {
939
- bvh_stack[stack_size++] = node.child1;
940
- }
941
- assert(stack_size <= max_bvh_stack_size);
942
- }
943
- }
944
- if (num_fragments <= 0) {
945
- if (background_color != nullptr) {
946
- if (d_background_color != nullptr) {
947
- *d_background_color = *d_color;
948
- }
949
- return *background_color;
950
- }
951
- return Vector4f{0, 0, 0, 0};
952
- }
953
- // Sort the fragments from back to front (i.e. increasing order of group id)
954
- // https://github.com/frigaut/yorick-imutil/blob/master/insort.c#L37
955
- for (int i = 1; i < num_fragments; i++) {
956
- auto j = i;
957
- auto temp = fragments[j];
958
- while (j > 0 && fragments[j - 1].group_id > temp.group_id) {
959
- fragments[j] = fragments[j - 1];
960
- j--;
961
- }
962
- fragments[j] = temp;
963
- }
964
- // Blend the color
965
- Vector3f accum_color[max_hit_shapes];
966
- float accum_alpha[max_hit_shapes];
967
- auto first_alpha = 0.f;
968
- auto first_color = Vector3f{0, 0, 0};
969
- if (background_color != nullptr) {
970
- first_alpha = background_color->w;
971
- first_color = Vector3f{background_color->x,
972
- background_color->y,
973
- background_color->z};
974
- }
975
- for (int i = 0; i < num_fragments; i++) {
976
- const PrefilterFragment &fragment = fragments[i];
977
- auto new_color = fragment.color;
978
- auto new_alpha = fragment.alpha;
979
- auto prev_alpha = i > 0 ? accum_alpha[i - 1] : first_alpha;
980
- auto prev_color = i > 0 ? accum_color[i - 1] : first_color;
981
- // prev_color is alpha premultiplied, don't need to multiply with
982
- // prev_alpha
983
- accum_color[i] = prev_color * (1 - new_alpha) + new_alpha * new_color;
984
- accum_alpha[i] = prev_alpha * (1 - new_alpha) + new_alpha;
985
- }
986
- auto final_color = accum_color[num_fragments - 1];
987
- auto final_alpha = accum_alpha[num_fragments - 1];
988
- if (final_alpha > 1e-6f) {
989
- final_color /= final_alpha;
990
- }
991
- assert(isfinite(final_color));
992
- assert(isfinite(final_alpha));
993
- if (d_color != nullptr) {
994
- // Backward pass
995
- auto d_final_color = Vector3f{(*d_color)[0], (*d_color)[1], (*d_color)[2]};
996
- auto d_final_alpha = (*d_color)[3];
997
- auto d_curr_color = d_final_color;
998
- auto d_curr_alpha = d_final_alpha;
999
- if (final_alpha > 1e-6f) {
1000
- // final_color = curr_color / final_alpha
1001
- d_curr_color = d_final_color / final_alpha;
1002
- d_curr_alpha -= sum(d_final_color * final_color) / final_alpha;
1003
- }
1004
- assert(isfinite(*d_color));
1005
- assert(isfinite(d_curr_color));
1006
- assert(isfinite(d_curr_alpha));
1007
- for (int i = num_fragments - 1; i >= 0; i--) {
1008
- // color[n] = prev_color * (1 - new_alpha) + new_alpha * new_color;
1009
- // alpha[n] = prev_alpha * (1 - new_alpha) + new_alpha;
1010
- auto prev_alpha = i > 0 ? accum_alpha[i - 1] : first_alpha;
1011
- auto prev_color = i > 0 ? accum_color[i - 1] : first_color;
1012
- auto d_prev_alpha = d_curr_alpha * (1.f - fragments[i].alpha);
1013
- auto d_alpha_i = d_curr_alpha * (1.f - prev_alpha);
1014
- d_alpha_i += sum(d_curr_color * (fragments[i].color - prev_color));
1015
- auto d_prev_color = d_curr_color * (1 - fragments[i].alpha);
1016
- auto d_color_i = d_curr_color * fragments[i].alpha;
1017
- auto group_id = fragments[i].group_id;
1018
- if (fragments[i].is_stroke) {
1019
- const auto &shape = scene.shapes[fragments[i].shape_id];
1020
- auto d = fragments[i].distance;
1021
- auto abs_d_plus_width = fabs(d) + shape.stroke_width;
1022
- auto abs_d_minus_width = fabs(d) - shape.stroke_width;
1023
- auto w = smoothstep(abs_d_plus_width) -
1024
- smoothstep(abs_d_minus_width);
1025
- if (w != 0) {
1026
- auto d_w = w > 0 ? (fragments[i].alpha / w) * d_alpha_i : 0.f;
1027
- d_alpha_i *= w;
1028
-
1029
- // Backprop to color
1030
- d_sample_color(scene.shape_groups[group_id].stroke_color_type,
1031
- scene.shape_groups[group_id].stroke_color,
1032
- pt,
1033
- Vector4f{d_color_i[0], d_color_i[1], d_color_i[2], d_alpha_i},
1034
- scene.d_shape_groups[group_id].stroke_color,
1035
- d_translation);
1036
-
1037
- auto d_abs_d_plus_width = d_smoothstep(abs_d_plus_width, d_w);
1038
- auto d_abs_d_minus_width = -d_smoothstep(abs_d_minus_width, d_w);
1039
-
1040
- auto d_d = d_abs_d_plus_width + d_abs_d_minus_width;
1041
- if (d < 0) {
1042
- d_d = -d_d;
1043
- }
1044
- auto d_stroke_width = d_abs_d_plus_width - d_abs_d_minus_width;
1045
-
1046
- const auto &shape_group = scene.shape_groups[group_id];
1047
- ShapeGroup &d_shape_group = scene.d_shape_groups[group_id];
1048
- Shape &d_shape = scene.d_shapes[fragments[i].shape_id];
1049
- if (fabs(d_d) > 1e-10f) {
1050
- d_compute_distance(shape_group.canvas_to_shape,
1051
- shape_group.shape_to_canvas,
1052
- shape,
1053
- pt,
1054
- fragments[i].closest_pt,
1055
- fragments[i].path_info,
1056
- d_d,
1057
- d_shape_group.shape_to_canvas,
1058
- d_shape,
1059
- d_translation);
1060
- }
1061
- atomic_add(&d_shape.stroke_width, d_stroke_width);
1062
- }
1063
- } else {
1064
- const auto &shape = scene.shapes[fragments[i].shape_id];
1065
- auto d = fragments[i].distance;
1066
- auto w = smoothstep(d);
1067
- if (w != 0) {
1068
- // color_alpha[3] = color_alpha[3] * w;
1069
- auto d_w = w > 0 ? (fragments[i].alpha / w) * d_alpha_i : 0.f;
1070
- d_alpha_i *= w;
1071
-
1072
- d_sample_color(scene.shape_groups[group_id].fill_color_type,
1073
- scene.shape_groups[group_id].fill_color,
1074
- pt,
1075
- Vector4f{d_color_i[0], d_color_i[1], d_color_i[2], d_alpha_i},
1076
- scene.d_shape_groups[group_id].fill_color,
1077
- d_translation);
1078
-
1079
- // w = smoothstep(d)
1080
- auto d_d = d_smoothstep(d, d_w);
1081
- if (d < 0) {
1082
- d_d = -d_d;
1083
- }
1084
-
1085
- const auto &shape_group = scene.shape_groups[group_id];
1086
- ShapeGroup &d_shape_group = scene.d_shape_groups[group_id];
1087
- Shape &d_shape = scene.d_shapes[fragments[i].shape_id];
1088
- if (fabs(d_d) > 1e-10f && fragments[i].within_distance) {
1089
- d_compute_distance(shape_group.canvas_to_shape,
1090
- shape_group.shape_to_canvas,
1091
- shape,
1092
- pt,
1093
- fragments[i].closest_pt,
1094
- fragments[i].path_info,
1095
- d_d,
1096
- d_shape_group.shape_to_canvas,
1097
- d_shape,
1098
- d_translation);
1099
- }
1100
- }
1101
- }
1102
- d_curr_color = d_prev_color;
1103
- d_curr_alpha = d_prev_alpha;
1104
- }
1105
- if (d_background_color != nullptr) {
1106
- d_background_color->x += d_curr_color.x;
1107
- d_background_color->y += d_curr_color.y;
1108
- d_background_color->z += d_curr_color.z;
1109
- d_background_color->w += d_curr_alpha;
1110
- }
1111
- }
1112
- return Vector4f{final_color[0], final_color[1], final_color[2], final_alpha};
1113
- }
1114
-
1115
- struct weight_kernel {
1116
- DEVICE void operator()(int idx) {
1117
- auto rng_state = init_pcg32(idx, seed);
1118
- // height * width * num_samples_y * num_samples_x
1119
- auto sx = idx % num_samples_x;
1120
- auto sy = (idx / num_samples_x) % num_samples_y;
1121
- auto x = (idx / (num_samples_x * num_samples_y)) % width;
1122
- auto y = (idx / (num_samples_x * num_samples_y * width));
1123
- assert(y < height);
1124
- auto rx = next_pcg32_float(&rng_state);
1125
- auto ry = next_pcg32_float(&rng_state);
1126
- if (use_prefiltering) {
1127
- rx = ry = 0.5f;
1128
- }
1129
- auto pt = Vector2f{x + ((float)sx + rx) / num_samples_x,
1130
- y + ((float)sy + ry) / num_samples_y};
1131
- auto radius = scene.filter->radius;
1132
- assert(radius >= 0);
1133
- auto ri = (int)ceil(radius);
1134
- for (int dy = -ri; dy <= ri; dy++) {
1135
- for (int dx = -ri; dx <= ri; dx++) {
1136
- auto xx = x + dx;
1137
- auto yy = y + dy;
1138
- if (xx >= 0 && xx < width && yy >= 0 && yy < height) {
1139
- auto xc = xx + 0.5f;
1140
- auto yc = yy + 0.5f;
1141
- auto filter_weight = compute_filter_weight(*scene.filter,
1142
- xc - pt.x,
1143
- yc - pt.y);
1144
- atomic_add(weight_image[yy * width + xx], filter_weight);
1145
- }
1146
- }
1147
- }
1148
- }
1149
-
1150
- SceneData scene;
1151
- float *weight_image;
1152
- int width;
1153
- int height;
1154
- int num_samples_x;
1155
- int num_samples_y;
1156
- uint64_t seed;
1157
- bool use_prefiltering;
1158
- };
1159
-
1160
- // We use a "mega kernel" for rendering
1161
- struct render_kernel {
1162
- DEVICE void operator()(int idx) {
1163
- // height * width * num_samples_y * num_samples_x
1164
- auto pt = Vector2f{0, 0};
1165
- auto x = 0;
1166
- auto y = 0;
1167
- if (eval_positions == nullptr) {
1168
- auto rng_state = init_pcg32(idx, seed);
1169
- auto sx = idx % num_samples_x;
1170
- auto sy = (idx / num_samples_x) % num_samples_y;
1171
- x = (idx / (num_samples_x * num_samples_y)) % width;
1172
- y = (idx / (num_samples_x * num_samples_y * width));
1173
- assert(x < width && y < height);
1174
- auto rx = next_pcg32_float(&rng_state);
1175
- auto ry = next_pcg32_float(&rng_state);
1176
- if (use_prefiltering) {
1177
- rx = ry = 0.5f;
1178
- }
1179
- pt = Vector2f{x + ((float)sx + rx) / num_samples_x,
1180
- y + ((float)sy + ry) / num_samples_y};
1181
- } else {
1182
- pt = Vector2f{eval_positions[2 * idx],
1183
- eval_positions[2 * idx + 1]};
1184
- x = int(pt.x);
1185
- y = int(pt.y);
1186
- }
1187
-
1188
- // normalize pt to [0, 1]
1189
- auto npt = pt;
1190
- npt.x /= width;
1191
- npt.y /= height;
1192
- auto num_samples = num_samples_x * num_samples_y;
1193
- if (render_image != nullptr || d_render_image != nullptr) {
1194
- Vector4f d_color = Vector4f{0, 0, 0, 0};
1195
- if (d_render_image != nullptr) {
1196
- // Gather d_color from d_render_image inside the filter kernel
1197
- // normalize using weight_image
1198
- d_color = gather_d_color(*scene.filter,
1199
- d_render_image,
1200
- weight_image,
1201
- width,
1202
- height,
1203
- pt);
1204
- }
1205
- auto color = Vector4f{0, 0, 0, 0};
1206
- if (use_prefiltering) {
1207
- color = sample_color_prefiltered(scene,
1208
- background_image != nullptr ? (const Vector4f*)&background_image[4 * ((y * width) + x)] : nullptr,
1209
- npt,
1210
- d_render_image != nullptr ? &d_color : nullptr,
1211
- d_background_image != nullptr ? (Vector4f*)&d_background_image[4 * ((y * width) + x)] : nullptr,
1212
- d_translation != nullptr ? &d_translation[2 * (y * width + x)] : nullptr);
1213
- } else {
1214
- color = sample_color(scene,
1215
- background_image != nullptr ? (const Vector4f*)&background_image[4 * ((y * width) + x)] : nullptr,
1216
- npt,
1217
- d_render_image != nullptr ? &d_color : nullptr,
1218
- nullptr,
1219
- d_background_image != nullptr ? (Vector4f*)&d_background_image[4 * ((y * width) + x)] : nullptr,
1220
- d_translation != nullptr ? &d_translation[2 * (y * width + x)] : nullptr);
1221
- }
1222
- assert(isfinite(color));
1223
- // Splat color onto render_image
1224
- auto radius = scene.filter->radius;
1225
- assert(radius >= 0);
1226
- auto ri = (int)ceil(radius);
1227
- for (int dy = -ri; dy <= ri; dy++) {
1228
- for (int dx = -ri; dx <= ri; dx++) {
1229
- auto xx = x + dx;
1230
- auto yy = y + dy;
1231
- if (xx >= 0 && xx < width && yy >= 0 && yy < height &&
1232
- weight_image[yy * width + xx] > 0) {
1233
- auto weight_sum = weight_image[yy * width + xx];
1234
- auto xc = xx + 0.5f;
1235
- auto yc = yy + 0.5f;
1236
- auto filter_weight = compute_filter_weight(*scene.filter,
1237
- xc - pt.x,
1238
- yc - pt.y);
1239
- auto weighted_color = filter_weight * color / weight_sum;
1240
- if (render_image != nullptr) {
1241
- atomic_add(render_image[4 * (yy * width + xx) + 0],
1242
- weighted_color[0]);
1243
- atomic_add(render_image[4 * (yy * width + xx) + 1],
1244
- weighted_color[1]);
1245
- atomic_add(render_image[4 * (yy * width + xx) + 2],
1246
- weighted_color[2]);
1247
- atomic_add(render_image[4 * (yy * width + xx) + 3],
1248
- weighted_color[3]);
1249
- }
1250
- if (d_render_image != nullptr) {
1251
- // Backprop to filter_weight
1252
- // pixel = \sum weight * color / \sum weight
1253
- auto d_pixel = Vector4f{
1254
- d_render_image[4 * (yy * width + xx) + 0],
1255
- d_render_image[4 * (yy * width + xx) + 1],
1256
- d_render_image[4 * (yy * width + xx) + 2],
1257
- d_render_image[4 * (yy * width + xx) + 3],
1258
- };
1259
- auto d_weight =
1260
- (dot(d_pixel, color) * weight_sum -
1261
- filter_weight * dot(d_pixel, color) * (weight_sum - filter_weight)) /
1262
- square(weight_sum);
1263
- d_compute_filter_weight(*scene.filter,
1264
- xc - pt.x,
1265
- yc - pt.y,
1266
- d_weight,
1267
- scene.d_filter);
1268
- }
1269
- }
1270
- }
1271
- }
1272
- }
1273
- if (sdf_image != nullptr || d_sdf_image != nullptr) {
1274
- float d_dist = 0.f;
1275
- if (d_sdf_image != nullptr) {
1276
- if (eval_positions == nullptr) {
1277
- d_dist = d_sdf_image[y * width + x];
1278
- } else {
1279
- d_dist = d_sdf_image[idx];
1280
- }
1281
- }
1282
- auto weight = eval_positions == nullptr ? 1.f / num_samples : 1.f;
1283
- auto dist = sample_distance(scene, npt, weight,
1284
- d_sdf_image != nullptr ? &d_dist : nullptr,
1285
- d_translation != nullptr ? &d_translation[2 * (y * width + x)] : nullptr);
1286
- if (sdf_image != nullptr) {
1287
- if (eval_positions == nullptr) {
1288
- atomic_add(sdf_image[y * width + x], dist);
1289
- } else {
1290
- atomic_add(sdf_image[idx], dist);
1291
- }
1292
- }
1293
- }
1294
- }
1295
-
1296
- SceneData scene;
1297
- float *background_image;
1298
- float *render_image;
1299
- float *weight_image;
1300
- float *sdf_image;
1301
- float *d_background_image;
1302
- float *d_render_image;
1303
- float *d_sdf_image;
1304
- float *d_translation;
1305
- int width;
1306
- int height;
1307
- int num_samples_x;
1308
- int num_samples_y;
1309
- uint64_t seed;
1310
- bool use_prefiltering;
1311
- float *eval_positions;
1312
- };
1313
-
1314
- struct BoundarySample {
1315
- Vector2f pt;
1316
- Vector2f local_pt;
1317
- Vector2f normal;
1318
- int shape_group_id;
1319
- int shape_id;
1320
- float t;
1321
- BoundaryData data;
1322
- float pdf;
1323
- };
1324
-
1325
- struct sample_boundary_kernel {
1326
- DEVICE void operator()(int idx) {
1327
- boundary_samples[idx].pt = Vector2f{0, 0};
1328
- boundary_samples[idx].shape_id = -1;
1329
- boundary_ids[idx] = idx;
1330
- morton_codes[idx] = 0;
1331
-
1332
- auto rng_state = init_pcg32(idx, seed);
1333
- auto u = next_pcg32_float(&rng_state);
1334
- // Sample a shape
1335
- auto sample_id = sample(scene.sample_shapes_cdf,
1336
- scene.num_total_shapes,
1337
- u);
1338
- assert(sample_id >= 0 && sample_id < scene.num_total_shapes);
1339
- auto shape_id = scene.sample_shape_id[sample_id];
1340
- assert(shape_id >= 0 && shape_id < scene.num_shapes);
1341
- auto shape_group_id = scene.sample_group_id[sample_id];
1342
- assert(shape_group_id >= 0 && shape_group_id < scene.num_shape_groups);
1343
- auto shape_pmf = scene.sample_shapes_pmf[shape_id];
1344
- if (shape_pmf <= 0) {
1345
- return;
1346
- }
1347
- // Sample a point on the boundary of the shape
1348
- auto boundary_pdf = 0.f;
1349
- auto normal = Vector2f{0, 0};
1350
- auto t = next_pcg32_float(&rng_state);
1351
- BoundaryData boundary_data;
1352
- const ShapeGroup &shape_group = scene.shape_groups[shape_group_id];
1353
- auto local_boundary_pt = sample_boundary(
1354
- scene, shape_group_id, shape_id,
1355
- t, normal, boundary_pdf, boundary_data);
1356
- if (boundary_pdf <= 0) {
1357
- return;
1358
- }
1359
-
1360
- // local_boundary_pt & normal are in shape's local space,
1361
- // transform them to canvas space
1362
- auto boundary_pt = xform_pt(shape_group.shape_to_canvas, local_boundary_pt);
1363
- normal = xform_normal(shape_group.canvas_to_shape, normal);
1364
- // Normalize boundary_pt to [0, 1)
1365
- boundary_pt.x /= scene.canvas_width;
1366
- boundary_pt.y /= scene.canvas_height;
1367
-
1368
- boundary_samples[idx].pt = boundary_pt;
1369
- boundary_samples[idx].local_pt = local_boundary_pt;
1370
- boundary_samples[idx].normal = normal;
1371
- boundary_samples[idx].shape_group_id = shape_group_id;
1372
- boundary_samples[idx].shape_id = shape_id;
1373
- boundary_samples[idx].t = t;
1374
- boundary_samples[idx].data = boundary_data;
1375
- boundary_samples[idx].pdf = shape_pmf * boundary_pdf;
1376
- TVector2<uint32_t> p_i{boundary_pt.x * 1023, boundary_pt.y * 1023};
1377
- morton_codes[idx] = (expand_bits(p_i.x) << 1u) |
1378
- (expand_bits(p_i.y) << 0u);
1379
- }
1380
-
1381
- SceneData scene;
1382
- uint64_t seed;
1383
- BoundarySample *boundary_samples;
1384
- int *boundary_ids;
1385
- uint32_t *morton_codes;
1386
- };
1387
-
1388
- struct render_edge_kernel {
1389
- DEVICE void operator()(int idx) {
1390
- auto bid = boundary_ids[idx];
1391
- if (boundary_samples[bid].shape_id == -1) {
1392
- return;
1393
- }
1394
- auto boundary_pt = boundary_samples[bid].pt;
1395
- auto local_boundary_pt = boundary_samples[bid].local_pt;
1396
- auto normal = boundary_samples[bid].normal;
1397
- auto shape_group_id = boundary_samples[bid].shape_group_id;
1398
- auto shape_id = boundary_samples[bid].shape_id;
1399
- auto t = boundary_samples[bid].t;
1400
- auto boundary_data = boundary_samples[bid].data;
1401
- auto pdf = boundary_samples[bid].pdf;
1402
-
1403
- const ShapeGroup &shape_group = scene.shape_groups[shape_group_id];
1404
-
1405
- auto bx = int(boundary_pt.x * width);
1406
- auto by = int(boundary_pt.y * height);
1407
- if (bx < 0 || bx >= width || by < 0 || by >= height) {
1408
- return;
1409
- }
1410
-
1411
- // Sample the two sides of the boundary
1412
- auto inside_query = EdgeQuery{shape_group_id, shape_id, false};
1413
- auto outside_query = EdgeQuery{shape_group_id, shape_id, false};
1414
- auto color_inside = sample_color(scene,
1415
- background_image != nullptr ? (const Vector4f *)&background_image[4 * ((by * width) + bx)] : nullptr,
1416
- boundary_pt - 1e-4f * normal,
1417
- nullptr, &inside_query);
1418
- auto color_outside = sample_color(scene,
1419
- background_image != nullptr ? (const Vector4f *)&background_image[4 * ((by * width) + bx)] : nullptr,
1420
- boundary_pt + 1e-4f * normal,
1421
- nullptr, &outside_query);
1422
- if (!inside_query.hit && !outside_query.hit) {
1423
- // occluded
1424
- return;
1425
- }
1426
- if (!inside_query.hit) {
1427
- normal = -normal;
1428
- swap_(inside_query, outside_query);
1429
- swap_(color_inside, color_outside);
1430
- }
1431
- // Boundary point in screen space
1432
- auto sboundary_pt = boundary_pt;
1433
- sboundary_pt.x *= width;
1434
- sboundary_pt.y *= height;
1435
- auto d_color = gather_d_color(*scene.filter,
1436
- d_render_image,
1437
- weight_image,
1438
- width,
1439
- height,
1440
- sboundary_pt);
1441
- // Normalization factor
1442
- d_color /= float(scene.canvas_width * scene.canvas_height);
1443
-
1444
- assert(isfinite(d_color));
1445
- assert(isfinite(pdf) && pdf > 0);
1446
- auto contrib = dot(color_inside - color_outside, d_color) / pdf;
1447
- ShapeGroup &d_shape_group = scene.d_shape_groups[shape_group_id];
1448
- accumulate_boundary_gradient(scene.shapes[shape_id],
1449
- contrib, t, normal, boundary_data, scene.d_shapes[shape_id],
1450
- shape_group.shape_to_canvas, local_boundary_pt, d_shape_group.shape_to_canvas);
1451
- // Don't need to backprop to filter weights:
1452
- // \int f'(x) g(x) dx doesn't contain discontinuities
1453
- // if f is continuous, even if g is discontinuous
1454
- if (d_translation != nullptr) {
1455
- // According to Reynold transport theorem,
1456
- // the Jacobian of the boundary integral is dot(velocity, normal)
1457
- // The velocity of the object translating x is (1, 0)
1458
- // The velocity of the object translating y is (0, 1)
1459
- atomic_add(&d_translation[2 * (by * width + bx) + 0], normal.x * contrib);
1460
- atomic_add(&d_translation[2 * (by * width + bx) + 1], normal.y * contrib);
1461
- }
1462
- }
1463
-
1464
- SceneData scene;
1465
- const float *background_image;
1466
- const BoundarySample *boundary_samples;
1467
- const int *boundary_ids;
1468
- float *weight_image;
1469
- float *d_render_image;
1470
- float *d_translation;
1471
- int width;
1472
- int height;
1473
- int num_samples_x;
1474
- int num_samples_y;
1475
- };
1476
-
1477
- void render(std::shared_ptr<Scene> scene,
1478
- ptr<float> background_image,
1479
- ptr<float> render_image,
1480
- ptr<float> render_sdf,
1481
- int width,
1482
- int height,
1483
- int num_samples_x,
1484
- int num_samples_y,
1485
- uint64_t seed,
1486
- ptr<float> d_background_image,
1487
- ptr<float> d_render_image,
1488
- ptr<float> d_render_sdf,
1489
- ptr<float> d_translation,
1490
- bool use_prefiltering,
1491
- ptr<float> eval_positions,
1492
- int num_eval_positions) {
1493
- #ifdef __NVCC__
1494
- int old_device_id = -1;
1495
- if (scene->use_gpu) {
1496
- checkCuda(cudaGetDevice(&old_device_id));
1497
- if (scene->gpu_index != -1) {
1498
- checkCuda(cudaSetDevice(scene->gpu_index));
1499
- }
1500
- }
1501
- #endif
1502
- parallel_init();
1503
-
1504
- float *weight_image = nullptr;
1505
- // Allocate and zero the weight image
1506
- if (scene->use_gpu) {
1507
- #ifdef __CUDACC__
1508
- if (eval_positions.get() == nullptr) {
1509
- checkCuda(cudaMallocManaged(&weight_image, width * height * sizeof(float)));
1510
- cudaMemset(weight_image, 0, width * height * sizeof(float));
1511
- }
1512
- #else
1513
- assert(false);
1514
- #endif
1515
- } else {
1516
- if (eval_positions.get() == nullptr) {
1517
- weight_image = (float*)malloc(width * height * sizeof(float));
1518
- memset(weight_image, 0, width * height * sizeof(float));
1519
- }
1520
- }
1521
-
1522
- if (render_image.get() != nullptr || d_render_image.get() != nullptr ||
1523
- render_sdf.get() != nullptr || d_render_sdf.get() != nullptr) {
1524
- if (weight_image != nullptr) {
1525
- parallel_for(weight_kernel{
1526
- get_scene_data(*scene.get()),
1527
- weight_image,
1528
- width,
1529
- height,
1530
- num_samples_x,
1531
- num_samples_y,
1532
- seed
1533
- }, width * height * num_samples_x * num_samples_y, scene->use_gpu);
1534
- }
1535
-
1536
- auto num_samples = eval_positions.get() == nullptr ?
1537
- width * height * num_samples_x * num_samples_y : num_eval_positions;
1538
- parallel_for(render_kernel{
1539
- get_scene_data(*scene.get()),
1540
- background_image.get(),
1541
- render_image.get(),
1542
- weight_image,
1543
- render_sdf.get(),
1544
- d_background_image.get(),
1545
- d_render_image.get(),
1546
- d_render_sdf.get(),
1547
- d_translation.get(),
1548
- width,
1549
- height,
1550
- num_samples_x,
1551
- num_samples_y,
1552
- seed,
1553
- use_prefiltering,
1554
- eval_positions.get()
1555
- }, num_samples, scene->use_gpu);
1556
- }
1557
-
1558
- // Boundary sampling
1559
- if (!use_prefiltering && d_render_image.get() != nullptr) {
1560
- auto num_samples = width * height * num_samples_x * num_samples_y;
1561
- BoundarySample *boundary_samples = nullptr;
1562
- int *boundary_ids = nullptr; // for sorting
1563
- uint32_t *morton_codes = nullptr; // for sorting
1564
- // Allocate boundary samples
1565
- if (scene->use_gpu) {
1566
- #ifdef __CUDACC__
1567
- checkCuda(cudaMallocManaged(&boundary_samples,
1568
- num_samples * sizeof(BoundarySample)));
1569
- checkCuda(cudaMallocManaged(&boundary_ids,
1570
- num_samples * sizeof(int)));
1571
- checkCuda(cudaMallocManaged(&morton_codes,
1572
- num_samples * sizeof(uint32_t)));
1573
- #else
1574
- assert(false);
1575
- #endif
1576
- } else {
1577
- boundary_samples = (BoundarySample*)malloc(
1578
- num_samples * sizeof(BoundarySample));
1579
- boundary_ids = (int*)malloc(
1580
- num_samples * sizeof(int));
1581
- morton_codes = (uint32_t*)malloc(
1582
- num_samples * sizeof(uint32_t));
1583
- }
1584
-
1585
- // Edge sampling
1586
- // We sort the boundary samples for better thread coherency
1587
- parallel_for(sample_boundary_kernel{
1588
- get_scene_data(*scene.get()),
1589
- seed,
1590
- boundary_samples,
1591
- boundary_ids,
1592
- morton_codes
1593
- }, num_samples, scene->use_gpu);
1594
- if (scene->use_gpu) {
1595
- thrust::sort_by_key(thrust::device, morton_codes, morton_codes + num_samples, boundary_ids);
1596
- } else {
1597
- // Don't need to sort for CPU, we are not using SIMD hardware anyway.
1598
- // thrust::sort_by_key(thrust::host, morton_codes, morton_codes + num_samples, boundary_ids);
1599
- }
1600
- parallel_for(render_edge_kernel{
1601
- get_scene_data(*scene.get()),
1602
- background_image.get(),
1603
- boundary_samples,
1604
- boundary_ids,
1605
- weight_image,
1606
- d_render_image.get(),
1607
- d_translation.get(),
1608
- width,
1609
- height,
1610
- num_samples_x,
1611
- num_samples_y
1612
- }, num_samples, scene->use_gpu);
1613
- if (scene->use_gpu) {
1614
- #ifdef __CUDACC__
1615
- checkCuda(cudaFree(boundary_samples));
1616
- checkCuda(cudaFree(boundary_ids));
1617
- checkCuda(cudaFree(morton_codes));
1618
- #else
1619
- assert(false);
1620
- #endif
1621
- } else {
1622
- free(boundary_samples);
1623
- free(boundary_ids);
1624
- free(morton_codes);
1625
- }
1626
- }
1627
-
1628
- // Clean up weight image
1629
- if (scene->use_gpu) {
1630
- #ifdef __CUDACC__
1631
- checkCuda(cudaFree(weight_image));
1632
- #else
1633
- assert(false);
1634
- #endif
1635
- } else {
1636
- free(weight_image);
1637
- }
1638
-
1639
- if (scene->use_gpu) {
1640
- cuda_synchronize();
1641
- }
1642
-
1643
- parallel_cleanup();
1644
- #ifdef __NVCC__
1645
- if (old_device_id != -1) {
1646
- checkCuda(cudaSetDevice(old_device_id));
1647
- }
1648
- #endif
1649
- }
1650
-
1651
- PYBIND11_MODULE(diffvg, m) {
1652
- m.doc() = "Differential Vector Graphics";
1653
-
1654
- py::class_<ptr<void>>(m, "void_ptr")
1655
- .def(py::init<std::size_t>())
1656
- .def("as_size_t", &ptr<void>::as_size_t);
1657
- py::class_<ptr<float>>(m, "float_ptr")
1658
- .def(py::init<std::size_t>());
1659
- py::class_<ptr<int>>(m, "int_ptr")
1660
- .def(py::init<std::size_t>());
1661
-
1662
- py::class_<Vector2f>(m, "Vector2f")
1663
- .def(py::init<float, float>())
1664
- .def_readwrite("x", &Vector2f::x)
1665
- .def_readwrite("y", &Vector2f::y);
1666
-
1667
- py::class_<Vector3f>(m, "Vector3f")
1668
- .def(py::init<float, float, float>())
1669
- .def_readwrite("x", &Vector3f::x)
1670
- .def_readwrite("y", &Vector3f::y)
1671
- .def_readwrite("z", &Vector3f::z);
1672
-
1673
- py::class_<Vector4f>(m, "Vector4f")
1674
- .def(py::init<float, float, float, float>())
1675
- .def_readwrite("x", &Vector4f::x)
1676
- .def_readwrite("y", &Vector4f::y)
1677
- .def_readwrite("z", &Vector4f::z)
1678
- .def_readwrite("w", &Vector4f::w);
1679
-
1680
- py::enum_<ShapeType>(m, "ShapeType")
1681
- .value("circle", ShapeType::Circle)
1682
- .value("ellipse", ShapeType::Ellipse)
1683
- .value("path", ShapeType::Path)
1684
- .value("rect", ShapeType::Rect);
1685
-
1686
- py::class_<Circle>(m, "Circle")
1687
- .def(py::init<float, Vector2f>())
1688
- .def("get_ptr", &Circle::get_ptr)
1689
- .def_readonly("radius", &Circle::radius)
1690
- .def_readonly("center", &Circle::center);
1691
-
1692
- py::class_<Ellipse>(m, "Ellipse")
1693
- .def(py::init<Vector2f, Vector2f>())
1694
- .def("get_ptr", &Ellipse::get_ptr)
1695
- .def_readonly("radius", &Ellipse::radius)
1696
- .def_readonly("center", &Ellipse::center);
1697
-
1698
- py::class_<Path>(m, "Path")
1699
- .def(py::init<ptr<int>, ptr<float>, ptr<float>, int, int, bool, bool>())
1700
- .def("get_ptr", &Path::get_ptr)
1701
- .def("has_thickness", &Path::has_thickness)
1702
- .def("copy_to", &Path::copy_to)
1703
- .def_readonly("num_points", &Path::num_points);
1704
-
1705
- py::class_<Rect>(m, "Rect")
1706
- .def(py::init<Vector2f, Vector2f>())
1707
- .def("get_ptr", &Rect::get_ptr)
1708
- .def_readonly("p_min", &Rect::p_min)
1709
- .def_readonly("p_max", &Rect::p_max);
1710
-
1711
- py::enum_<ColorType>(m, "ColorType")
1712
- .value("constant", ColorType::Constant)
1713
- .value("linear_gradient", ColorType::LinearGradient)
1714
- .value("radial_gradient", ColorType::RadialGradient);
1715
-
1716
- py::class_<Constant>(m, "Constant")
1717
- .def(py::init<Vector4f>())
1718
- .def("get_ptr", &Constant::get_ptr)
1719
- .def_readonly("color", &Constant::color);
1720
-
1721
- py::class_<LinearGradient>(m, "LinearGradient")
1722
- .def(py::init<Vector2f, Vector2f, int, ptr<float>, ptr<float>>())
1723
- .def("get_ptr", &LinearGradient::get_ptr)
1724
- .def("copy_to", &LinearGradient::copy_to)
1725
- .def_readonly("begin", &LinearGradient::begin)
1726
- .def_readonly("end", &LinearGradient::end)
1727
- .def_readonly("num_stops", &LinearGradient::num_stops);
1728
-
1729
- py::class_<RadialGradient>(m, "RadialGradient")
1730
- .def(py::init<Vector2f, Vector2f, int, ptr<float>, ptr<float>>())
1731
- .def("get_ptr", &RadialGradient::get_ptr)
1732
- .def("copy_to", &RadialGradient::copy_to)
1733
- .def_readonly("center", &RadialGradient::center)
1734
- .def_readonly("radius", &RadialGradient::radius)
1735
- .def_readonly("num_stops", &RadialGradient::num_stops);
1736
-
1737
- py::class_<Shape>(m, "Shape")
1738
- .def(py::init<ShapeType, ptr<void>, float>())
1739
- .def("as_circle", &Shape::as_circle)
1740
- .def("as_ellipse", &Shape::as_ellipse)
1741
- .def("as_path", &Shape::as_path)
1742
- .def("as_rect", &Shape::as_rect)
1743
- .def_readonly("type", &Shape::type)
1744
- .def_readonly("stroke_width", &Shape::stroke_width);
1745
-
1746
- py::class_<ShapeGroup>(m, "ShapeGroup")
1747
- .def(py::init<ptr<int>,
1748
- int,
1749
- ColorType,
1750
- ptr<void>,
1751
- ColorType,
1752
- ptr<void>,
1753
- bool,
1754
- ptr<float>>())
1755
- .def("fill_color_as_constant", &ShapeGroup::fill_color_as_constant)
1756
- .def("fill_color_as_linear_gradient", &ShapeGroup::fill_color_as_linear_gradient)
1757
- .def("fill_color_as_radial_gradient", &ShapeGroup::fill_color_as_radial_gradient)
1758
- .def("stroke_color_as_constant", &ShapeGroup::stroke_color_as_constant)
1759
- .def("stroke_color_as_linear_gradient", &ShapeGroup::stroke_color_as_linear_gradient)
1760
- .def("stroke_color_as_radial_gradient", &ShapeGroup::fill_color_as_radial_gradient)
1761
- .def("has_fill_color", &ShapeGroup::has_fill_color)
1762
- .def("has_stroke_color", &ShapeGroup::has_stroke_color)
1763
- .def("copy_to", &ShapeGroup::copy_to)
1764
- .def_readonly("fill_color_type", &ShapeGroup::fill_color_type)
1765
- .def_readonly("stroke_color_type", &ShapeGroup::stroke_color_type);
1766
-
1767
- py::enum_<FilterType>(m, "FilterType")
1768
- .value("box", FilterType::Box)
1769
- .value("tent", FilterType::Tent)
1770
- .value("parabolic", FilterType::RadialParabolic)
1771
- .value("hann", FilterType::Hann);
1772
-
1773
- py::class_<Filter>(m, "Filter")
1774
- .def(py::init<FilterType,
1775
- float>());
1776
-
1777
- py::class_<Scene, std::shared_ptr<Scene>>(m, "Scene")
1778
- .def(py::init<int,
1779
- int,
1780
- const std::vector<const Shape*> &,
1781
- const std::vector<const ShapeGroup*> &,
1782
- const Filter &,
1783
- bool,
1784
- int>())
1785
- .def("get_d_shape", &Scene::get_d_shape)
1786
- .def("get_d_shape_group", &Scene::get_d_shape_group)
1787
- .def("get_d_filter_radius", &Scene::get_d_filter_radius)
1788
- .def_readonly("num_shapes", &Scene::num_shapes)
1789
- .def_readonly("num_shape_groups", &Scene::num_shape_groups);
1790
-
1791
- m.def("render", &render, "");
1792
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/CVPR/LIVE/thrust/thrust/detail/complex/stream.h DELETED
@@ -1,71 +0,0 @@
1
- /*
2
- * Copyright 2008-2013 NVIDIA Corporation
3
- * Copyright 2013 Filipe RNC Maia
4
- *
5
- * Licensed under the Apache License, Version 2.0 (the "License");
6
- * you may not use this file except in compliance with the License.
7
- * You may obtain a copy of the License at
8
- *
9
- * http://www.apache.org/licenses/LICENSE-2.0
10
- *
11
- * Unless required by applicable law or agreed to in writing, software
12
- * distributed under the License is distributed on an "AS IS" BASIS,
13
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
- * See the License for the specific language governing permissions and
15
- * limitations under the License.
16
- */
17
-
18
- #include <thrust/complex.h>
19
-
20
- namespace thrust
21
- {
22
- template<typename ValueType,class charT, class traits>
23
- std::basic_ostream<charT, traits>& operator<<(std::basic_ostream<charT, traits>& os, const complex<ValueType>& z)
24
- {
25
- os << '(' << z.real() << ',' << z.imag() << ')';
26
- return os;
27
- }
28
-
29
- template<typename ValueType, typename charT, class traits>
30
- std::basic_istream<charT, traits>&
31
- operator>>(std::basic_istream<charT, traits>& is, complex<ValueType>& z)
32
- {
33
- ValueType re, im;
34
-
35
- charT ch;
36
- is >> ch;
37
-
38
- if(ch == '(')
39
- {
40
- is >> re >> ch;
41
- if (ch == ',')
42
- {
43
- is >> im >> ch;
44
- if (ch == ')')
45
- {
46
- z = complex<ValueType>(re, im);
47
- }
48
- else
49
- {
50
- is.setstate(std::ios_base::failbit);
51
- }
52
- }
53
- else if (ch == ')')
54
- {
55
- z = re;
56
- }
57
- else
58
- {
59
- is.setstate(std::ios_base::failbit);
60
- }
61
- }
62
- else
63
- {
64
- is.putback(ch);
65
- is >> re;
66
- z = re;
67
- }
68
- return is;
69
- }
70
-
71
- } // namespace thrust
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/CVPR/LIVE/thrust/thrust/system/omp/pointer.h DELETED
@@ -1,360 +0,0 @@
1
- /*
2
- * Copyright 2008-2018 NVIDIA Corporation
3
- *
4
- * Licensed under the Apache License, Version 2.0 (the "License");
5
- * you may not use this file except in compliance with the License.
6
- * You may obtain a copy of the License at
7
- *
8
- * http://www.apache.org/licenses/LICENSE-2.0
9
- *
10
- * Unless required by applicable law or agreed to in writing, software
11
- * distributed under the License is distributed on an "AS IS" BASIS,
12
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
- * See the License for the specific language governing permissions and
14
- * limitations under the License.
15
- */
16
-
17
- /*! \file thrust/system/omp/memory.h
18
- * \brief Managing memory associated with Thrust's OpenMP system.
19
- */
20
-
21
- #pragma once
22
-
23
- #include <thrust/detail/config.h>
24
- #include <thrust/system/omp/detail/execution_policy.h>
25
- #include <thrust/detail/type_traits.h>
26
- #include <thrust/detail/pointer.h>
27
- #include <thrust/detail/reference.h>
28
-
29
- namespace thrust
30
- {
31
- namespace system
32
- {
33
- namespace omp
34
- {
35
-
36
- template<typename> class pointer;
37
-
38
- } // end omp
39
- } // end system
40
- } // end thrust
41
-
42
-
43
- /*! \cond
44
- */
45
-
46
- // specialize thrust::iterator_traits to avoid problems with the name of
47
- // pointer's constructor shadowing its nested pointer type
48
- // do this before pointer is defined so the specialization is correctly
49
- // used inside the definition
50
- namespace thrust
51
- {
52
-
53
- template<typename Element>
54
- struct iterator_traits<thrust::system::omp::pointer<Element> >
55
- {
56
- private:
57
- typedef thrust::system::omp::pointer<Element> ptr;
58
-
59
- public:
60
- typedef typename ptr::iterator_category iterator_category;
61
- typedef typename ptr::value_type value_type;
62
- typedef typename ptr::difference_type difference_type;
63
- typedef ptr pointer;
64
- typedef typename ptr::reference reference;
65
- }; // end iterator_traits
66
-
67
- } // end thrust
68
-
69
- /*! \endcond
70
- */
71
-
72
-
73
- namespace thrust
74
- {
75
- namespace system
76
- {
77
-
78
- /*! \addtogroup system_backends Systems
79
- * \ingroup system
80
- * \{
81
- */
82
-
83
- /*! \namespace thrust::system::omp
84
- * \brief \p thrust::system::omp is the namespace containing functionality for allocating, manipulating,
85
- * and deallocating memory available to Thrust's OpenMP backend system.
86
- * The identifiers are provided in a separate namespace underneath <tt>thrust::system</tt>
87
- * for import convenience but are also aliased in the top-level <tt>thrust::omp</tt>
88
- * namespace for easy access.
89
- *
90
- */
91
- namespace omp
92
- {
93
-
94
- // forward declaration of reference for pointer
95
- template<typename Element> class reference;
96
-
97
- /*! \cond
98
- */
99
-
100
- // XXX nvcc + msvc have trouble instantiating reference below
101
- // this is a workaround
102
- namespace detail
103
- {
104
-
105
- template<typename Element>
106
- struct reference_msvc_workaround
107
- {
108
- typedef thrust::system::omp::reference<Element> type;
109
- }; // end reference_msvc_workaround
110
-
111
- } // end detail
112
-
113
- /*! \endcond
114
- */
115
-
116
-
117
- /*! \p pointer stores a pointer to an object allocated in memory available to the omp system.
118
- * This type provides type safety when dispatching standard algorithms on ranges resident
119
- * in omp memory.
120
- *
121
- * \p pointer has pointer semantics: it may be dereferenced and manipulated with pointer arithmetic.
122
- *
123
- * \p pointer can be created with the function \p omp::malloc, or by explicitly calling its constructor
124
- * with a raw pointer.
125
- *
126
- * The raw pointer encapsulated by a \p pointer may be obtained by eiter its <tt>get</tt> member function
127
- * or the \p raw_pointer_cast function.
128
- *
129
- * \note \p pointer is not a "smart" pointer; it is the programmer's responsibility to deallocate memory
130
- * pointed to by \p pointer.
131
- *
132
- * \tparam T specifies the type of the pointee.
133
- *
134
- * \see omp::malloc
135
- * \see omp::free
136
- * \see raw_pointer_cast
137
- */
138
- template<typename T>
139
- class pointer
140
- : public thrust::pointer<
141
- T,
142
- thrust::system::omp::tag,
143
- thrust::system::omp::reference<T>,
144
- thrust::system::omp::pointer<T>
145
- >
146
- {
147
- /*! \cond
148
- */
149
-
150
- private:
151
- typedef thrust::pointer<
152
- T,
153
- thrust::system::omp::tag,
154
- //thrust::system::omp::reference<T>,
155
- typename detail::reference_msvc_workaround<T>::type,
156
- thrust::system::omp::pointer<T>
157
- > super_t;
158
-
159
- /*! \endcond
160
- */
161
-
162
- public:
163
- // note that omp::pointer's member functions need __host__ __device__
164
- // to interoperate with nvcc + iterators' dereference member function
165
-
166
- /*! \p pointer's no-argument constructor initializes its encapsulated pointer to \c 0.
167
- */
168
- __host__ __device__
169
- pointer() : super_t() {}
170
-
171
- #if THRUST_CPP_DIALECT >= 2011
172
- // NOTE: This is needed so that Thrust smart pointers can be used in
173
- // `std::unique_ptr`.
174
- __host__ __device__
175
- pointer(decltype(nullptr)) : super_t(nullptr) {}
176
- #endif
177
-
178
- /*! This constructor allows construction of a <tt>pointer<const T></tt> from a <tt>T*</tt>.
179
- *
180
- * \param ptr A raw pointer to copy from, presumed to point to a location in memory
181
- * accessible by the \p omp system.
182
- * \tparam OtherT \p OtherT shall be convertible to \p T.
183
- */
184
- template<typename OtherT>
185
- __host__ __device__
186
- explicit pointer(OtherT *ptr) : super_t(ptr) {}
187
-
188
- /*! This constructor allows construction from another pointer-like object with related type.
189
- *
190
- * \param other The \p OtherPointer to copy.
191
- * \tparam OtherPointer The system tag associated with \p OtherPointer shall be convertible
192
- * to \p thrust::system::omp::tag and its element type shall be convertible to \p T.
193
- */
194
- template<typename OtherPointer>
195
- __host__ __device__
196
- pointer(const OtherPointer &other,
197
- typename thrust::detail::enable_if_pointer_is_convertible<
198
- OtherPointer,
199
- pointer
200
- >::type * = 0) : super_t(other) {}
201
-
202
- /*! This constructor allows construction from another pointer-like object with \p void type.
203
- *
204
- * \param other The \p OtherPointer to copy.
205
- * \tparam OtherPointer The system tag associated with \p OtherPointer shall be convertible
206
- * to \p thrust::system::omp::tag and its element type shall be \p void.
207
- */
208
- template<typename OtherPointer>
209
- __host__ __device__
210
- explicit
211
- pointer(const OtherPointer &other,
212
- typename thrust::detail::enable_if_void_pointer_is_system_convertible<
213
- OtherPointer,
214
- pointer
215
- >::type * = 0) : super_t(other) {}
216
-
217
- /*! Assignment operator allows assigning from another pointer-like object with related type.
218
- *
219
- * \param other The other pointer-like object to assign from.
220
- * \tparam OtherPointer The system tag associated with \p OtherPointer shall be convertible
221
- * to \p thrust::system::omp::tag and its element type shall be convertible to \p T.
222
- */
223
- template<typename OtherPointer>
224
- __host__ __device__
225
- typename thrust::detail::enable_if_pointer_is_convertible<
226
- OtherPointer,
227
- pointer,
228
- pointer &
229
- >::type
230
- operator=(const OtherPointer &other)
231
- {
232
- return super_t::operator=(other);
233
- }
234
-
235
- #if THRUST_CPP_DIALECT >= 2011
236
- // NOTE: This is needed so that Thrust smart pointers can be used in
237
- // `std::unique_ptr`.
238
- __host__ __device__
239
- pointer& operator=(decltype(nullptr))
240
- {
241
- super_t::operator=(nullptr);
242
- return *this;
243
- }
244
- #endif
245
- }; // end pointer
246
-
247
-
248
- /*! \p reference is a wrapped reference to an object stored in memory available to the \p omp system.
249
- * \p reference is the type of the result of dereferencing a \p omp::pointer.
250
- *
251
- * \tparam T Specifies the type of the referenced object.
252
- */
253
- template<typename T>
254
- class reference
255
- : public thrust::reference<
256
- T,
257
- thrust::system::omp::pointer<T>,
258
- thrust::system::omp::reference<T>
259
- >
260
- {
261
- /*! \cond
262
- */
263
-
264
- private:
265
- typedef thrust::reference<
266
- T,
267
- thrust::system::omp::pointer<T>,
268
- thrust::system::omp::reference<T>
269
- > super_t;
270
-
271
- /*! \endcond
272
- */
273
-
274
- public:
275
- /*! \cond
276
- */
277
-
278
- typedef typename super_t::value_type value_type;
279
- typedef typename super_t::pointer pointer;
280
-
281
- /*! \endcond
282
- */
283
-
284
- /*! This constructor initializes this \p reference to refer to an object
285
- * pointed to by the given \p pointer. After this \p reference is constructed,
286
- * it shall refer to the object pointed to by \p ptr.
287
- *
288
- * \param ptr A \p pointer to copy from.
289
- */
290
- __host__ __device__
291
- explicit reference(const pointer &ptr)
292
- : super_t(ptr)
293
- {}
294
-
295
- /*! This constructor accepts a const reference to another \p reference of related type.
296
- * After this \p reference is constructed, it shall refer to the same object as \p other.
297
- *
298
- * \param other A \p reference to copy from.
299
- * \tparam OtherT The element type of the other \p reference.
300
- *
301
- * \note This constructor is templated primarily to allow initialization of <tt>reference<const T></tt>
302
- * from <tt>reference<T></tt>.
303
- */
304
- template<typename OtherT>
305
- __host__ __device__
306
- reference(const reference<OtherT> &other,
307
- typename thrust::detail::enable_if_convertible<
308
- typename reference<OtherT>::pointer,
309
- pointer
310
- >::type * = 0)
311
- : super_t(other)
312
- {}
313
-
314
- /*! Copy assignment operator copy assigns from another \p reference of related type.
315
- *
316
- * \param other The other \p reference to assign from.
317
- * \return <tt>*this</tt>
318
- * \tparam OtherT The element type of the other \p reference.
319
- */
320
- template<typename OtherT>
321
- reference &operator=(const reference<OtherT> &other);
322
-
323
- /*! Assignment operator assigns from a \p value_type.
324
- *
325
- * \param x The \p value_type to assign from.
326
- * \return <tt>*this</tt>
327
- */
328
- reference &operator=(const value_type &x);
329
- }; // end reference
330
-
331
- /*! Exchanges the values of two objects referred to by \p reference.
332
- * \p x The first \p reference of interest.
333
- * \p y The second \p reference of interest.
334
- */
335
- template<typename T>
336
- __host__ __device__
337
- void swap(reference<T> x, reference<T> y);
338
-
339
- } // end omp
340
-
341
- /*! \}
342
- */
343
-
344
- } // end system
345
-
346
- /*! \namespace thrust::omp
347
- * \brief \p thrust::omp is a top-level alias for thrust::system::omp.
348
- */
349
- namespace omp
350
- {
351
-
352
- using thrust::system::omp::pointer;
353
- using thrust::system::omp::reference;
354
-
355
- } // end omp
356
-
357
- } // end thrust
358
-
359
- #include <thrust/system/omp/detail/pointer.inl>
360
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/CVPR/MonoScene/monoscene/unet3d_nyu.py DELETED
@@ -1,90 +0,0 @@
1
- # encoding: utf-8
2
- import torch
3
- import torch.nn as nn
4
- import torch.nn.functional as F
5
- import numpy as np
6
- from monoscene.CRP3D import CPMegaVoxels
7
- from monoscene.modules import (
8
- Process,
9
- Upsample,
10
- Downsample,
11
- SegmentationHead,
12
- ASPP,
13
- )
14
-
15
-
16
- class UNet3D(nn.Module):
17
- def __init__(
18
- self,
19
- class_num,
20
- norm_layer,
21
- feature,
22
- full_scene_size,
23
- n_relations=4,
24
- project_res=[],
25
- context_prior=True,
26
- bn_momentum=0.1,
27
- ):
28
- super(UNet3D, self).__init__()
29
- self.business_layer = []
30
- self.project_res = project_res
31
-
32
- self.feature_1_4 = feature
33
- self.feature_1_8 = feature * 2
34
- self.feature_1_16 = feature * 4
35
-
36
- self.feature_1_16_dec = self.feature_1_16
37
- self.feature_1_8_dec = self.feature_1_8
38
- self.feature_1_4_dec = self.feature_1_4
39
-
40
- self.process_1_4 = nn.Sequential(
41
- Process(self.feature_1_4, norm_layer, bn_momentum, dilations=[1, 2, 3]),
42
- Downsample(self.feature_1_4, norm_layer, bn_momentum),
43
- )
44
- self.process_1_8 = nn.Sequential(
45
- Process(self.feature_1_8, norm_layer, bn_momentum, dilations=[1, 2, 3]),
46
- Downsample(self.feature_1_8, norm_layer, bn_momentum),
47
- )
48
- self.up_1_16_1_8 = Upsample(
49
- self.feature_1_16_dec, self.feature_1_8_dec, norm_layer, bn_momentum
50
- )
51
- self.up_1_8_1_4 = Upsample(
52
- self.feature_1_8_dec, self.feature_1_4_dec, norm_layer, bn_momentum
53
- )
54
- self.ssc_head_1_4 = SegmentationHead(
55
- self.feature_1_4_dec, self.feature_1_4_dec, class_num, [1, 2, 3]
56
- )
57
-
58
- self.context_prior = context_prior
59
- size_1_16 = tuple(np.ceil(i / 4).astype(int) for i in full_scene_size)
60
-
61
- if context_prior:
62
- self.CP_mega_voxels = CPMegaVoxels(
63
- self.feature_1_16,
64
- size_1_16,
65
- n_relations=n_relations,
66
- bn_momentum=bn_momentum,
67
- )
68
-
69
- #
70
- def forward(self, input_dict):
71
- res = {}
72
-
73
- x3d_1_4 = input_dict["x3d"]
74
- x3d_1_8 = self.process_1_4(x3d_1_4)
75
- x3d_1_16 = self.process_1_8(x3d_1_8)
76
-
77
- if self.context_prior:
78
- ret = self.CP_mega_voxels(x3d_1_16)
79
- x3d_1_16 = ret["x"]
80
- for k in ret.keys():
81
- res[k] = ret[k]
82
-
83
- x3d_up_1_8 = self.up_1_16_1_8(x3d_1_16) + x3d_1_8
84
- x3d_up_1_4 = self.up_1_8_1_4(x3d_up_1_8) + x3d_1_4
85
-
86
- ssc_logit_1_4 = self.ssc_head_1_4(x3d_up_1_4)
87
-
88
- res["ssc_logit"] = ssc_logit_1_4
89
-
90
- return res
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/CikeyQI/Yunzai/Yunzai/plugins/ws-plugin/resources/admin/index.html DELETED
@@ -1,42 +0,0 @@
1
- {{extend defaultLayout}}
2
- {{block 'css'}}
3
- <link rel="stylesheet" type="text/css" href="{{_res_path}}/admin/index.css"/>
4
- {{/block}}
5
- {{block 'main'}}
6
-
7
- <div class="info_box">
8
- <div class="head-box type{{bgType}}">
9
- <div class="label">ws管理面板</div>
10
- <div class="title">#ws设置</div>
11
- </div>
12
- </div>
13
- {{each schema cfgGroup}}
14
- <div class="cfg-box">
15
- <div class="cfg-group">{{cfgGroup.title}}</div>
16
- <ul class="cfg-ul">
17
- {{each cfgGroup.cfg cfgItem cfgKey}}
18
- <li class="cfg-li">
19
- <div class="cfg-line">
20
- {{cfgItem.title}}
21
- <span class="cfg-hint"> #ws设置{{cfgItem.key}}
22
- {{if cfgItem.type==='num'}} {{cfgItem.def}}{{else}} 开启/关闭{{/if}}
23
- </span>
24
- {{if cfgItem.type === 'num'}}
25
- <div class="cfg-status">{{cfg[cfgKey]}}</div>
26
- {{else}}
27
- {{if cfg[cfgKey]}}
28
- <div class="cfg-status">已开启</div>
29
- {{else}}
30
- <div class="cfg-status status-off">已关闭</div>
31
- {{/if}}
32
- {{/if}}
33
- </div>
34
- {{if cfgItem.desc && cfgItem.showDesc!== false}}
35
- <div class="cfg-desc">{{cfgItem.desc}}</div>
36
- {{/if}}
37
- </li>
38
- {{/each}}
39
- </ul>
40
- </div>
41
- {{/each}}
42
- {{/block}}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/ClearLove443/Robby-chatbot/pages/1_📄Robby-Chat.py DELETED
@@ -1,100 +0,0 @@
1
- import os
2
- import streamlit as st
3
- from io import StringIO
4
- import re
5
- import sys
6
- from modules.history import ChatHistory
7
- from modules.layout import Layout
8
- from modules.utils import Utilities
9
- from modules.sidebar import Sidebar
10
-
11
- #To be able to update the changes made to modules in localhost (press r)
12
- def reload_module(module_name):
13
- import importlib
14
- import sys
15
- if module_name in sys.modules:
16
- importlib.reload(sys.modules[module_name])
17
- return sys.modules[module_name]
18
-
19
- history_module = reload_module('modules.history')
20
- layout_module = reload_module('modules.layout')
21
- utils_module = reload_module('modules.utils')
22
- sidebar_module = reload_module('modules.sidebar')
23
-
24
- ChatHistory = history_module.ChatHistory
25
- Layout = layout_module.Layout
26
- Utilities = utils_module.Utilities
27
- Sidebar = sidebar_module.Sidebar
28
-
29
- st.set_page_config(layout="wide", page_icon="💬", page_title="Robby | Chat-Bot 🤖")
30
-
31
- # Instantiate the main components
32
- layout, sidebar, utils = Layout(), Sidebar(), Utilities()
33
-
34
- layout.show_header("PDF, TXT, CSV")
35
-
36
- user_api_key = utils.load_api_key()
37
-
38
- if not user_api_key:
39
- layout.show_api_key_missing()
40
- else:
41
- os.environ["OPENAI_API_KEY"] = user_api_key
42
-
43
- uploaded_file = utils.handle_upload(["pdf", "txt", "csv"])
44
-
45
- if uploaded_file:
46
-
47
- # Configure the sidebar
48
- sidebar.show_options()
49
- sidebar.about()
50
-
51
- # Initialize chat history
52
- history = ChatHistory()
53
- try:
54
- chatbot = utils.setup_chatbot(
55
- uploaded_file, st.session_state["model"], st.session_state["temperature"]
56
- )
57
- st.session_state["chatbot"] = chatbot
58
-
59
- if st.session_state["ready"]:
60
- # Create containers for chat responses and user prompts
61
- response_container, prompt_container = st.container(), st.container()
62
-
63
- with prompt_container:
64
- # Display the prompt form
65
- is_ready, user_input = layout.prompt_form()
66
-
67
- # Initialize the chat history
68
- history.initialize(uploaded_file)
69
-
70
- # Reset the chat history if button clicked
71
- if st.session_state["reset_chat"]:
72
- history.reset(uploaded_file)
73
-
74
- if is_ready:
75
- # Update the chat history and display the chat messages
76
- history.append("user", user_input)
77
-
78
- old_stdout = sys.stdout
79
- sys.stdout = captured_output = StringIO()
80
-
81
- output = st.session_state["chatbot"].conversational_chat(user_input)
82
-
83
- sys.stdout = old_stdout
84
-
85
- history.append("assistant", output)
86
-
87
- # Clean up the agent's thoughts to remove unwanted characters
88
- thoughts = captured_output.getvalue()
89
- cleaned_thoughts = re.sub(r'\x1b\[[0-9;]*[a-zA-Z]', '', thoughts)
90
- cleaned_thoughts = re.sub(r'\[1m>', '', cleaned_thoughts)
91
-
92
- # Display the agent's thoughts
93
- with st.expander("Display the agent's thoughts"):
94
- st.write(cleaned_thoughts)
95
-
96
- history.generate_messages(response_container)
97
- except Exception as e:
98
- st.error(f"Error: {str(e)}")
99
-
100
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/CoPoBio/skin_cancer_risk_prediction/app_DCCPH.py DELETED
@@ -1,225 +0,0 @@
1
- import gradio as gr
2
-
3
-
4
- # -*- coding: utf-8 -*-
5
-
6
- import cv2
7
- import numpy as np
8
- import tensorflow.compat.v1 as tf
9
- tf.disable_v2_behavior()
10
-
11
-
12
-
13
- import imutils
14
- import dlib
15
- from facealigner import FaceAligner
16
- from imutils.face_utils import rect_to_bb
17
- from imutils import face_utils
18
-
19
- #-------------------------------start facial preprocessing------------------------------
20
- detector_size = 512
21
-
22
- # construct the arguments; shape-predictor & image
23
- shape_predictor = 'shape_predictor_68_face_landmarks.dat'
24
-
25
- #initialize dlib's face detector (HOG-based) and then create
26
- # the facial landmark predictor and the face aligner
27
- detector = dlib.get_frontal_face_detector()
28
- predictor = dlib.shape_predictor(shape_predictor)
29
- fa = FaceAligner(predictor, desiredFaceWidth=detector_size)
30
-
31
-
32
- def face_preprocessing(image_src):#,save_name):
33
-
34
- image_resized = imutils.resize(image_src, width=768)
35
- gray = cv2.cvtColor(image_resized, cv2.COLOR_BGR2GRAY)
36
- rects = detector(gray, 2)
37
- if len(rects) == 0:
38
- print('no face detected')
39
- return image_src, 0
40
- rect = rects[0]
41
- #print(image_resized.shape, gray.shape, rect)
42
- img = fa.align(image_resized, gray, rect)
43
- #print(img.shape)
44
- #exit(0)
45
- gray2 = img.copy()
46
- rects2 = detector(gray2, 2) #########
47
- if len(rects2) == 0:
48
- print('no face detected after alignment')
49
- return img, 0
50
- rect = rects2[0]
51
-
52
- lm = predictor(gray2, rect)
53
- lm = face_utils.shape_to_np(lm)
54
-
55
- n_size=img.shape[0]
56
- landmarks_points=[]
57
- for n in range(0,17):
58
- if n == 0:
59
- x = 0
60
- y = 0
61
- elif n == 16:
62
- x=n_size
63
- y=0
64
- else:
65
- x=lm[n][0]
66
- y=lm[n][1]
67
-
68
- landmarks_points.append((x,y))
69
- #print(landmarks_points)
70
- #exit(0)
71
- target_gray=cv2.cvtColor(img,cv2.COLOR_RGB2GRAY)
72
- mask=np.zeros_like(target_gray)
73
- points=np.array(landmarks_points,np.int32)
74
-
75
- convexhull=cv2.convexHull(points)
76
-
77
- cv2.fillConvexPoly(mask,convexhull,255)
78
-
79
- target_face_1=cv2.bitwise_and(img,img,mask=mask)
80
-
81
- left = lm[0]
82
- right = lm[16]
83
- top = lm[20]
84
- nosetip = lm[30]
85
- jaw = lm[8]
86
- n=60
87
- #print(left)
88
- #print(2*(left[0]-n)-(1024-jaw[1]),jaw[1],left[0]-n,1024-(left[0]-n))
89
- if left[0]-n >= detector_size-(left[0]-n) or left[0]-n > detector_size-(left[0]-n):
90
- print('not cropped')
91
- return img, 1
92
- img_crop = target_face_1[left[0]-n:detector_size-(left[0]-n),left[0]-n:detector_size-(left[0]-n)]
93
- #mg_crop = img[ 2*(left[0]-n)-(1024-jaw[1]):jaw[1],left[0]-n:1024-(left[0]-n)]
94
- return img_crop, 1
95
-
96
-
97
-
98
- #----------------------------end facial preprocessing------------
99
-
100
-
101
-
102
-
103
-
104
-
105
- batch_size = 1
106
- w,h=200,200
107
- ch=3
108
-
109
- # -----------------build networks----------------------
110
-
111
- x = tf.placeholder(tf.float32, shape=[batch_size, w, h, ch], name='x')
112
- y_ = tf.placeholder(tf.float32, shape=[batch_size, 1], name='y_')
113
- y_event = tf.placeholder(tf.float32, shape=[batch_size, 1], name='y_event')
114
- y_ = tf.transpose(y_)
115
- y_mask = tf.placeholder(tf.float32, shape=[batch_size, batch_size], name='y_mask')
116
- y_true = tf.concat ([y_, y_mask], axis = 0)
117
- y_true=tf.transpose(y_true)
118
- print(tf.shape(y_true))
119
-
120
-
121
- # conv1
122
- conv1 = tf.layers.conv2d(inputs=x, filters=32, kernel_size=[5, 5], padding="same", activation=tf.nn.relu,
123
- kernel_initializer=tf.truncated_normal_initializer(stddev=0.01))
124
- pool1 = tf.layers.max_pooling2d(inputs=conv1, pool_size=[2, 2], strides=2)
125
-
126
- # conv2
127
- conv2 = tf.layers.conv2d(inputs=pool1, filters=64, kernel_size=[5, 5], padding="same", activation=tf.nn.relu,
128
- kernel_initializer=tf.truncated_normal_initializer(stddev=0.01))
129
- pool2 = tf.layers.max_pooling2d(inputs=conv2, pool_size=[2, 2], strides=2)
130
-
131
- # conv3
132
- conv3 = tf.layers.conv2d(inputs=pool2, filters=128, kernel_size=[3, 3], padding="same", activation=tf.nn.relu,
133
- kernel_initializer=tf.truncated_normal_initializer(stddev=0.01))
134
- pool3 = tf.layers.max_pooling2d(inputs=conv3, pool_size=[2, 2], strides=2)
135
-
136
- # conv4
137
- conv4 = tf.layers.conv2d(inputs=pool3, filters=256, kernel_size=[3, 3], padding="same", activation=tf.nn.relu,
138
- kernel_initializer=tf.truncated_normal_initializer(stddev=0.01))
139
- pool4 = tf.layers.max_pooling2d(inputs=conv4, pool_size=[2, 2], strides=2)
140
-
141
- re1 = tf.reshape(pool4, [-1, 12 * 12 * 256])
142
-
143
- # fully connected
144
- dense1 = tf.layers.dense(inputs=re1, units=1024, activation=tf.nn.relu,
145
- kernel_initializer=tf.truncated_normal_initializer(stddev=0.01))
146
- dense2 = tf.layers.dense(inputs=dense1, units=512, activation=tf.nn.relu,
147
- kernel_initializer=tf.truncated_normal_initializer(stddev=0.01))
148
-
149
-
150
- keep_prob = tf.placeholder(tf.float32) # keep_prob: 1.0 means 100% keep
151
- h_fcl_drop = tf.nn.dropout(dense2,keep_prob) # dropout
152
- logits = tf.layers.dense(inputs=h_fcl_drop, units=1, activation=None,
153
- kernel_initializer=tf.truncated_normal_initializer(stddev=0.01))
154
- #--------------------------------------end build networks
155
-
156
-
157
- config = tf.ConfigProto()
158
- config.gpu_options.allow_growth = True
159
- saver = tf.train.Saver()
160
-
161
- sess = tf.Session(config=config)
162
- sess.run(tf.global_variables_initializer())
163
- saver.restore(sess, tf.train.latest_checkpoint('checkpoint2/'))
164
-
165
-
166
-
167
- # Images
168
-
169
-
170
- def image_classifier(image):
171
- image_processed, processed_flag = face_preprocessing(image)
172
- if processed_flag == 0:
173
- results = {}
174
- results['no face detected']= 0
175
- return 'no face detected'
176
-
177
- #image = np.array(image)
178
- image = cv2.resize(image_processed, (w, h),interpolation=cv2.INTER_AREA)
179
- print(image.shape)
180
- colorimage_b = cv2.equalizeHist(image[:,:,0])
181
- colorimage_g = cv2.equalizeHist(image[:,:,1])
182
- colorimage_r = cv2.equalizeHist(image[:,:,2])
183
-
184
- # Next we stack our equalized channels back into a single image
185
- image_feed = np.stack((colorimage_b,colorimage_g,colorimage_r), axis=2)
186
- image_feed = np.reshape(image_feed, (1, w, h, 3))
187
- image_feed = image_feed.astype(np.float32)
188
-
189
- logits_out = sess.run([logits ], feed_dict={x: image_feed, keep_prob:1.0})
190
- #normalized_score = (logits_out[0]+4326668288.0)/(8428149760.0+4326668288.0)
191
- normalized_score = (logits_out[0]+40.463287353515625)/(31.093469619750977+40.463287353515625)
192
- if normalized_score > 1:
193
- normalized_score == 1
194
- if normalized_score < 0:
195
- normalized_score == 0
196
-
197
- #results[''] = (logits_out[0]+4326668288.0)/(8428149760.0+4326668288.0)
198
- #results['risk']= normalized_score
199
- return 'The predicted risk is:', normalized_score[0]
200
-
201
- title = "Demonstration of skin cancer risk prediction"
202
- description = """
203
- This app is a proof-of-concept demonstration of predicting the risk of developing skin cancer\n
204
- Please kindly note that the model was trained with facial images of participants (age > 50) from the [Rotterdam Study](http://www.epib.nl/research/ergo.htm). \n
205
- Facial images were taken in a 3D imaging room with consistent ambient lighting \n
206
- For more information, please check: https://www.medrxiv.org/content/10.1101/2023.10.04.23296549v1\n
207
-
208
- To start, please upload a frontal facial image:
209
-
210
- """
211
- examples=[
212
- ['01.jpg', 'Simple Lines'], ['02.jpg', 'Simple Lines'], ['03.jpg', 'Simple Lines']
213
- ]
214
-
215
- with gr.Blocks() as demo:
216
- uploaded_image = gr.Image(type="numpy")
217
- txt_output = gr.Textbox(value="", label="Output")
218
- btn = gr.Button(value="Submit")
219
- btn.click(image_classifier, inputs=[uploaded_image], outputs=[txt_output])
220
-
221
- #demo = gr.Interface(fn=image_classifier, inputs=uploaded_image, outputs="label", title=title, description=description)
222
- #demo.launch(show_api=False)
223
- demo.launch()
224
-
225
- sess.close()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/Cyril666/ContourNet-ABI/maskrcnn_benchmark/utils/__init__.py DELETED
File without changes
spaces/Cyril666/ContourNet-ABI/maskrcnn_benchmark/utils/imports.py DELETED
@@ -1,23 +0,0 @@
1
- # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved.
2
- import torch
3
-
4
- if torch._six.PY3:
5
- import importlib
6
- import importlib.util
7
- import sys
8
-
9
-
10
- # from https://stackoverflow.com/questions/67631/how-to-import-a-module-given-the-full-path?utm_medium=organic&utm_source=google_rich_qa&utm_campaign=google_rich_qa
11
- def import_file(module_name, file_path, make_importable=False):
12
- spec = importlib.util.spec_from_file_location(module_name, file_path)
13
- module = importlib.util.module_from_spec(spec)
14
- spec.loader.exec_module(module)
15
- if make_importable:
16
- sys.modules[module_name] = module
17
- return module
18
- else:
19
- import imp
20
-
21
- def import_file(module_name, file_path, make_importable=None):
22
- module = imp.load_source(module_name, file_path)
23
- return module
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/DQChoi/gpt-demo/venv/lib/python3.11/site-packages/PIL/ImageMath.py DELETED
@@ -1,263 +0,0 @@
1
- #
2
- # The Python Imaging Library
3
- # $Id$
4
- #
5
- # a simple math add-on for the Python Imaging Library
6
- #
7
- # History:
8
- # 1999-02-15 fl Original PIL Plus release
9
- # 2005-05-05 fl Simplified and cleaned up for PIL 1.1.6
10
- # 2005-09-12 fl Fixed int() and float() for Python 2.4.1
11
- #
12
- # Copyright (c) 1999-2005 by Secret Labs AB
13
- # Copyright (c) 2005 by Fredrik Lundh
14
- #
15
- # See the README file for information on usage and redistribution.
16
- #
17
-
18
- import builtins
19
-
20
- from . import Image, _imagingmath
21
-
22
-
23
- def _isconstant(v):
24
- return isinstance(v, (int, float))
25
-
26
-
27
- class _Operand:
28
- """Wraps an image operand, providing standard operators"""
29
-
30
- def __init__(self, im):
31
- self.im = im
32
-
33
- def __fixup(self, im1):
34
- # convert image to suitable mode
35
- if isinstance(im1, _Operand):
36
- # argument was an image.
37
- if im1.im.mode in ("1", "L"):
38
- return im1.im.convert("I")
39
- elif im1.im.mode in ("I", "F"):
40
- return im1.im
41
- else:
42
- msg = f"unsupported mode: {im1.im.mode}"
43
- raise ValueError(msg)
44
- else:
45
- # argument was a constant
46
- if _isconstant(im1) and self.im.mode in ("1", "L", "I"):
47
- return Image.new("I", self.im.size, im1)
48
- else:
49
- return Image.new("F", self.im.size, im1)
50
-
51
- def apply(self, op, im1, im2=None, mode=None):
52
- im1 = self.__fixup(im1)
53
- if im2 is None:
54
- # unary operation
55
- out = Image.new(mode or im1.mode, im1.size, None)
56
- im1.load()
57
- try:
58
- op = getattr(_imagingmath, op + "_" + im1.mode)
59
- except AttributeError as e:
60
- msg = f"bad operand type for '{op}'"
61
- raise TypeError(msg) from e
62
- _imagingmath.unop(op, out.im.id, im1.im.id)
63
- else:
64
- # binary operation
65
- im2 = self.__fixup(im2)
66
- if im1.mode != im2.mode:
67
- # convert both arguments to floating point
68
- if im1.mode != "F":
69
- im1 = im1.convert("F")
70
- if im2.mode != "F":
71
- im2 = im2.convert("F")
72
- if im1.size != im2.size:
73
- # crop both arguments to a common size
74
- size = (min(im1.size[0], im2.size[0]), min(im1.size[1], im2.size[1]))
75
- if im1.size != size:
76
- im1 = im1.crop((0, 0) + size)
77
- if im2.size != size:
78
- im2 = im2.crop((0, 0) + size)
79
- out = Image.new(mode or im1.mode, im1.size, None)
80
- im1.load()
81
- im2.load()
82
- try:
83
- op = getattr(_imagingmath, op + "_" + im1.mode)
84
- except AttributeError as e:
85
- msg = f"bad operand type for '{op}'"
86
- raise TypeError(msg) from e
87
- _imagingmath.binop(op, out.im.id, im1.im.id, im2.im.id)
88
- return _Operand(out)
89
-
90
- # unary operators
91
- def __bool__(self):
92
- # an image is "true" if it contains at least one non-zero pixel
93
- return self.im.getbbox() is not None
94
-
95
- def __abs__(self):
96
- return self.apply("abs", self)
97
-
98
- def __pos__(self):
99
- return self
100
-
101
- def __neg__(self):
102
- return self.apply("neg", self)
103
-
104
- # binary operators
105
- def __add__(self, other):
106
- return self.apply("add", self, other)
107
-
108
- def __radd__(self, other):
109
- return self.apply("add", other, self)
110
-
111
- def __sub__(self, other):
112
- return self.apply("sub", self, other)
113
-
114
- def __rsub__(self, other):
115
- return self.apply("sub", other, self)
116
-
117
- def __mul__(self, other):
118
- return self.apply("mul", self, other)
119
-
120
- def __rmul__(self, other):
121
- return self.apply("mul", other, self)
122
-
123
- def __truediv__(self, other):
124
- return self.apply("div", self, other)
125
-
126
- def __rtruediv__(self, other):
127
- return self.apply("div", other, self)
128
-
129
- def __mod__(self, other):
130
- return self.apply("mod", self, other)
131
-
132
- def __rmod__(self, other):
133
- return self.apply("mod", other, self)
134
-
135
- def __pow__(self, other):
136
- return self.apply("pow", self, other)
137
-
138
- def __rpow__(self, other):
139
- return self.apply("pow", other, self)
140
-
141
- # bitwise
142
- def __invert__(self):
143
- return self.apply("invert", self)
144
-
145
- def __and__(self, other):
146
- return self.apply("and", self, other)
147
-
148
- def __rand__(self, other):
149
- return self.apply("and", other, self)
150
-
151
- def __or__(self, other):
152
- return self.apply("or", self, other)
153
-
154
- def __ror__(self, other):
155
- return self.apply("or", other, self)
156
-
157
- def __xor__(self, other):
158
- return self.apply("xor", self, other)
159
-
160
- def __rxor__(self, other):
161
- return self.apply("xor", other, self)
162
-
163
- def __lshift__(self, other):
164
- return self.apply("lshift", self, other)
165
-
166
- def __rshift__(self, other):
167
- return self.apply("rshift", self, other)
168
-
169
- # logical
170
- def __eq__(self, other):
171
- return self.apply("eq", self, other)
172
-
173
- def __ne__(self, other):
174
- return self.apply("ne", self, other)
175
-
176
- def __lt__(self, other):
177
- return self.apply("lt", self, other)
178
-
179
- def __le__(self, other):
180
- return self.apply("le", self, other)
181
-
182
- def __gt__(self, other):
183
- return self.apply("gt", self, other)
184
-
185
- def __ge__(self, other):
186
- return self.apply("ge", self, other)
187
-
188
-
189
- # conversions
190
- def imagemath_int(self):
191
- return _Operand(self.im.convert("I"))
192
-
193
-
194
- def imagemath_float(self):
195
- return _Operand(self.im.convert("F"))
196
-
197
-
198
- # logical
199
- def imagemath_equal(self, other):
200
- return self.apply("eq", self, other, mode="I")
201
-
202
-
203
- def imagemath_notequal(self, other):
204
- return self.apply("ne", self, other, mode="I")
205
-
206
-
207
- def imagemath_min(self, other):
208
- return self.apply("min", self, other)
209
-
210
-
211
- def imagemath_max(self, other):
212
- return self.apply("max", self, other)
213
-
214
-
215
- def imagemath_convert(self, mode):
216
- return _Operand(self.im.convert(mode))
217
-
218
-
219
- ops = {}
220
- for k, v in list(globals().items()):
221
- if k[:10] == "imagemath_":
222
- ops[k[10:]] = v
223
-
224
-
225
- def eval(expression, _dict={}, **kw):
226
- """
227
- Evaluates an image expression.
228
-
229
- :param expression: A string containing a Python-style expression.
230
- :param options: Values to add to the evaluation context. You
231
- can either use a dictionary, or one or more keyword
232
- arguments.
233
- :return: The evaluated expression. This is usually an image object, but can
234
- also be an integer, a floating point value, or a pixel tuple,
235
- depending on the expression.
236
- """
237
-
238
- # build execution namespace
239
- args = ops.copy()
240
- args.update(_dict)
241
- args.update(kw)
242
- for k, v in list(args.items()):
243
- if hasattr(v, "im"):
244
- args[k] = _Operand(v)
245
-
246
- compiled_code = compile(expression, "<string>", "eval")
247
-
248
- def scan(code):
249
- for const in code.co_consts:
250
- if type(const) == type(compiled_code):
251
- scan(const)
252
-
253
- for name in code.co_names:
254
- if name not in args and name != "abs":
255
- msg = f"'{name}' not allowed"
256
- raise ValueError(msg)
257
-
258
- scan(compiled_code)
259
- out = builtins.eval(expression, {"__builtins": {"abs": abs}}, args)
260
- try:
261
- return out.im
262
- except AttributeError:
263
- return out
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/DQChoi/gpt-demo/venv/lib/python3.11/site-packages/PIL/PngImagePlugin.py DELETED
@@ -1,1456 +0,0 @@
1
- #
2
- # The Python Imaging Library.
3
- # $Id$
4
- #
5
- # PNG support code
6
- #
7
- # See "PNG (Portable Network Graphics) Specification, version 1.0;
8
- # W3C Recommendation", 1996-10-01, Thomas Boutell (ed.).
9
- #
10
- # history:
11
- # 1996-05-06 fl Created (couldn't resist it)
12
- # 1996-12-14 fl Upgraded, added read and verify support (0.2)
13
- # 1996-12-15 fl Separate PNG stream parser
14
- # 1996-12-29 fl Added write support, added getchunks
15
- # 1996-12-30 fl Eliminated circular references in decoder (0.3)
16
- # 1998-07-12 fl Read/write 16-bit images as mode I (0.4)
17
- # 2001-02-08 fl Added transparency support (from Zircon) (0.5)
18
- # 2001-04-16 fl Don't close data source in "open" method (0.6)
19
- # 2004-02-24 fl Don't even pretend to support interlaced files (0.7)
20
- # 2004-08-31 fl Do basic sanity check on chunk identifiers (0.8)
21
- # 2004-09-20 fl Added PngInfo chunk container
22
- # 2004-12-18 fl Added DPI read support (based on code by Niki Spahiev)
23
- # 2008-08-13 fl Added tRNS support for RGB images
24
- # 2009-03-06 fl Support for preserving ICC profiles (by Florian Hoech)
25
- # 2009-03-08 fl Added zTXT support (from Lowell Alleman)
26
- # 2009-03-29 fl Read interlaced PNG files (from Conrado Porto Lopes Gouvua)
27
- #
28
- # Copyright (c) 1997-2009 by Secret Labs AB
29
- # Copyright (c) 1996 by Fredrik Lundh
30
- #
31
- # See the README file for information on usage and redistribution.
32
- #
33
-
34
- import itertools
35
- import logging
36
- import re
37
- import struct
38
- import warnings
39
- import zlib
40
- from enum import IntEnum
41
-
42
- from . import Image, ImageChops, ImageFile, ImagePalette, ImageSequence
43
- from ._binary import i16be as i16
44
- from ._binary import i32be as i32
45
- from ._binary import o8
46
- from ._binary import o16be as o16
47
- from ._binary import o32be as o32
48
-
49
- logger = logging.getLogger(__name__)
50
-
51
- is_cid = re.compile(rb"\w\w\w\w").match
52
-
53
-
54
- _MAGIC = b"\211PNG\r\n\032\n"
55
-
56
-
57
- _MODES = {
58
- # supported bits/color combinations, and corresponding modes/rawmodes
59
- # Greyscale
60
- (1, 0): ("1", "1"),
61
- (2, 0): ("L", "L;2"),
62
- (4, 0): ("L", "L;4"),
63
- (8, 0): ("L", "L"),
64
- (16, 0): ("I", "I;16B"),
65
- # Truecolour
66
- (8, 2): ("RGB", "RGB"),
67
- (16, 2): ("RGB", "RGB;16B"),
68
- # Indexed-colour
69
- (1, 3): ("P", "P;1"),
70
- (2, 3): ("P", "P;2"),
71
- (4, 3): ("P", "P;4"),
72
- (8, 3): ("P", "P"),
73
- # Greyscale with alpha
74
- (8, 4): ("LA", "LA"),
75
- (16, 4): ("RGBA", "LA;16B"), # LA;16B->LA not yet available
76
- # Truecolour with alpha
77
- (8, 6): ("RGBA", "RGBA"),
78
- (16, 6): ("RGBA", "RGBA;16B"),
79
- }
80
-
81
-
82
- _simple_palette = re.compile(b"^\xff*\x00\xff*$")
83
-
84
- MAX_TEXT_CHUNK = ImageFile.SAFEBLOCK
85
- """
86
- Maximum decompressed size for a iTXt or zTXt chunk.
87
- Eliminates decompression bombs where compressed chunks can expand 1000x.
88
- See :ref:`Text in PNG File Format<png-text>`.
89
- """
90
- MAX_TEXT_MEMORY = 64 * MAX_TEXT_CHUNK
91
- """
92
- Set the maximum total text chunk size.
93
- See :ref:`Text in PNG File Format<png-text>`.
94
- """
95
-
96
-
97
- # APNG frame disposal modes
98
- class Disposal(IntEnum):
99
- OP_NONE = 0
100
- """
101
- No disposal is done on this frame before rendering the next frame.
102
- See :ref:`Saving APNG sequences<apng-saving>`.
103
- """
104
- OP_BACKGROUND = 1
105
- """
106
- This frame’s modified region is cleared to fully transparent black before rendering
107
- the next frame.
108
- See :ref:`Saving APNG sequences<apng-saving>`.
109
- """
110
- OP_PREVIOUS = 2
111
- """
112
- This frame’s modified region is reverted to the previous frame’s contents before
113
- rendering the next frame.
114
- See :ref:`Saving APNG sequences<apng-saving>`.
115
- """
116
-
117
-
118
- # APNG frame blend modes
119
- class Blend(IntEnum):
120
- OP_SOURCE = 0
121
- """
122
- All color components of this frame, including alpha, overwrite the previous output
123
- image contents.
124
- See :ref:`Saving APNG sequences<apng-saving>`.
125
- """
126
- OP_OVER = 1
127
- """
128
- This frame should be alpha composited with the previous output image contents.
129
- See :ref:`Saving APNG sequences<apng-saving>`.
130
- """
131
-
132
-
133
- def _safe_zlib_decompress(s):
134
- dobj = zlib.decompressobj()
135
- plaintext = dobj.decompress(s, MAX_TEXT_CHUNK)
136
- if dobj.unconsumed_tail:
137
- msg = "Decompressed Data Too Large"
138
- raise ValueError(msg)
139
- return plaintext
140
-
141
-
142
- def _crc32(data, seed=0):
143
- return zlib.crc32(data, seed) & 0xFFFFFFFF
144
-
145
-
146
- # --------------------------------------------------------------------
147
- # Support classes. Suitable for PNG and related formats like MNG etc.
148
-
149
-
150
- class ChunkStream:
151
- def __init__(self, fp):
152
- self.fp = fp
153
- self.queue = []
154
-
155
- def read(self):
156
- """Fetch a new chunk. Returns header information."""
157
- cid = None
158
-
159
- if self.queue:
160
- cid, pos, length = self.queue.pop()
161
- self.fp.seek(pos)
162
- else:
163
- s = self.fp.read(8)
164
- cid = s[4:]
165
- pos = self.fp.tell()
166
- length = i32(s)
167
-
168
- if not is_cid(cid):
169
- if not ImageFile.LOAD_TRUNCATED_IMAGES:
170
- msg = f"broken PNG file (chunk {repr(cid)})"
171
- raise SyntaxError(msg)
172
-
173
- return cid, pos, length
174
-
175
- def __enter__(self):
176
- return self
177
-
178
- def __exit__(self, *args):
179
- self.close()
180
-
181
- def close(self):
182
- self.queue = self.fp = None
183
-
184
- def push(self, cid, pos, length):
185
- self.queue.append((cid, pos, length))
186
-
187
- def call(self, cid, pos, length):
188
- """Call the appropriate chunk handler"""
189
-
190
- logger.debug("STREAM %r %s %s", cid, pos, length)
191
- return getattr(self, "chunk_" + cid.decode("ascii"))(pos, length)
192
-
193
- def crc(self, cid, data):
194
- """Read and verify checksum"""
195
-
196
- # Skip CRC checks for ancillary chunks if allowed to load truncated
197
- # images
198
- # 5th byte of first char is 1 [specs, section 5.4]
199
- if ImageFile.LOAD_TRUNCATED_IMAGES and (cid[0] >> 5 & 1):
200
- self.crc_skip(cid, data)
201
- return
202
-
203
- try:
204
- crc1 = _crc32(data, _crc32(cid))
205
- crc2 = i32(self.fp.read(4))
206
- if crc1 != crc2:
207
- msg = f"broken PNG file (bad header checksum in {repr(cid)})"
208
- raise SyntaxError(msg)
209
- except struct.error as e:
210
- msg = f"broken PNG file (incomplete checksum in {repr(cid)})"
211
- raise SyntaxError(msg) from e
212
-
213
- def crc_skip(self, cid, data):
214
- """Read checksum"""
215
-
216
- self.fp.read(4)
217
-
218
- def verify(self, endchunk=b"IEND"):
219
- # Simple approach; just calculate checksum for all remaining
220
- # blocks. Must be called directly after open.
221
-
222
- cids = []
223
-
224
- while True:
225
- try:
226
- cid, pos, length = self.read()
227
- except struct.error as e:
228
- msg = "truncated PNG file"
229
- raise OSError(msg) from e
230
-
231
- if cid == endchunk:
232
- break
233
- self.crc(cid, ImageFile._safe_read(self.fp, length))
234
- cids.append(cid)
235
-
236
- return cids
237
-
238
-
239
- class iTXt(str):
240
- """
241
- Subclass of string to allow iTXt chunks to look like strings while
242
- keeping their extra information
243
-
244
- """
245
-
246
- @staticmethod
247
- def __new__(cls, text, lang=None, tkey=None):
248
- """
249
- :param cls: the class to use when creating the instance
250
- :param text: value for this key
251
- :param lang: language code
252
- :param tkey: UTF-8 version of the key name
253
- """
254
-
255
- self = str.__new__(cls, text)
256
- self.lang = lang
257
- self.tkey = tkey
258
- return self
259
-
260
-
261
- class PngInfo:
262
- """
263
- PNG chunk container (for use with save(pnginfo=))
264
-
265
- """
266
-
267
- def __init__(self):
268
- self.chunks = []
269
-
270
- def add(self, cid, data, after_idat=False):
271
- """Appends an arbitrary chunk. Use with caution.
272
-
273
- :param cid: a byte string, 4 bytes long.
274
- :param data: a byte string of the encoded data
275
- :param after_idat: for use with private chunks. Whether the chunk
276
- should be written after IDAT
277
-
278
- """
279
-
280
- chunk = [cid, data]
281
- if after_idat:
282
- chunk.append(True)
283
- self.chunks.append(tuple(chunk))
284
-
285
- def add_itxt(self, key, value, lang="", tkey="", zip=False):
286
- """Appends an iTXt chunk.
287
-
288
- :param key: latin-1 encodable text key name
289
- :param value: value for this key
290
- :param lang: language code
291
- :param tkey: UTF-8 version of the key name
292
- :param zip: compression flag
293
-
294
- """
295
-
296
- if not isinstance(key, bytes):
297
- key = key.encode("latin-1", "strict")
298
- if not isinstance(value, bytes):
299
- value = value.encode("utf-8", "strict")
300
- if not isinstance(lang, bytes):
301
- lang = lang.encode("utf-8", "strict")
302
- if not isinstance(tkey, bytes):
303
- tkey = tkey.encode("utf-8", "strict")
304
-
305
- if zip:
306
- self.add(
307
- b"iTXt",
308
- key + b"\0\x01\0" + lang + b"\0" + tkey + b"\0" + zlib.compress(value),
309
- )
310
- else:
311
- self.add(b"iTXt", key + b"\0\0\0" + lang + b"\0" + tkey + b"\0" + value)
312
-
313
- def add_text(self, key, value, zip=False):
314
- """Appends a text chunk.
315
-
316
- :param key: latin-1 encodable text key name
317
- :param value: value for this key, text or an
318
- :py:class:`PIL.PngImagePlugin.iTXt` instance
319
- :param zip: compression flag
320
-
321
- """
322
- if isinstance(value, iTXt):
323
- return self.add_itxt(key, value, value.lang, value.tkey, zip=zip)
324
-
325
- # The tEXt chunk stores latin-1 text
326
- if not isinstance(value, bytes):
327
- try:
328
- value = value.encode("latin-1", "strict")
329
- except UnicodeError:
330
- return self.add_itxt(key, value, zip=zip)
331
-
332
- if not isinstance(key, bytes):
333
- key = key.encode("latin-1", "strict")
334
-
335
- if zip:
336
- self.add(b"zTXt", key + b"\0\0" + zlib.compress(value))
337
- else:
338
- self.add(b"tEXt", key + b"\0" + value)
339
-
340
-
341
- # --------------------------------------------------------------------
342
- # PNG image stream (IHDR/IEND)
343
-
344
-
345
- class PngStream(ChunkStream):
346
- def __init__(self, fp):
347
- super().__init__(fp)
348
-
349
- # local copies of Image attributes
350
- self.im_info = {}
351
- self.im_text = {}
352
- self.im_size = (0, 0)
353
- self.im_mode = None
354
- self.im_tile = None
355
- self.im_palette = None
356
- self.im_custom_mimetype = None
357
- self.im_n_frames = None
358
- self._seq_num = None
359
- self.rewind_state = None
360
-
361
- self.text_memory = 0
362
-
363
- def check_text_memory(self, chunklen):
364
- self.text_memory += chunklen
365
- if self.text_memory > MAX_TEXT_MEMORY:
366
- msg = (
367
- "Too much memory used in text chunks: "
368
- f"{self.text_memory}>MAX_TEXT_MEMORY"
369
- )
370
- raise ValueError(msg)
371
-
372
- def save_rewind(self):
373
- self.rewind_state = {
374
- "info": self.im_info.copy(),
375
- "tile": self.im_tile,
376
- "seq_num": self._seq_num,
377
- }
378
-
379
- def rewind(self):
380
- self.im_info = self.rewind_state["info"]
381
- self.im_tile = self.rewind_state["tile"]
382
- self._seq_num = self.rewind_state["seq_num"]
383
-
384
- def chunk_iCCP(self, pos, length):
385
- # ICC profile
386
- s = ImageFile._safe_read(self.fp, length)
387
- # according to PNG spec, the iCCP chunk contains:
388
- # Profile name 1-79 bytes (character string)
389
- # Null separator 1 byte (null character)
390
- # Compression method 1 byte (0)
391
- # Compressed profile n bytes (zlib with deflate compression)
392
- i = s.find(b"\0")
393
- logger.debug("iCCP profile name %r", s[:i])
394
- logger.debug("Compression method %s", s[i])
395
- comp_method = s[i]
396
- if comp_method != 0:
397
- msg = f"Unknown compression method {comp_method} in iCCP chunk"
398
- raise SyntaxError(msg)
399
- try:
400
- icc_profile = _safe_zlib_decompress(s[i + 2 :])
401
- except ValueError:
402
- if ImageFile.LOAD_TRUNCATED_IMAGES:
403
- icc_profile = None
404
- else:
405
- raise
406
- except zlib.error:
407
- icc_profile = None # FIXME
408
- self.im_info["icc_profile"] = icc_profile
409
- return s
410
-
411
- def chunk_IHDR(self, pos, length):
412
- # image header
413
- s = ImageFile._safe_read(self.fp, length)
414
- if length < 13:
415
- if ImageFile.LOAD_TRUNCATED_IMAGES:
416
- return s
417
- msg = "Truncated IHDR chunk"
418
- raise ValueError(msg)
419
- self.im_size = i32(s, 0), i32(s, 4)
420
- try:
421
- self.im_mode, self.im_rawmode = _MODES[(s[8], s[9])]
422
- except Exception:
423
- pass
424
- if s[12]:
425
- self.im_info["interlace"] = 1
426
- if s[11]:
427
- msg = "unknown filter category"
428
- raise SyntaxError(msg)
429
- return s
430
-
431
- def chunk_IDAT(self, pos, length):
432
- # image data
433
- if "bbox" in self.im_info:
434
- tile = [("zip", self.im_info["bbox"], pos, self.im_rawmode)]
435
- else:
436
- if self.im_n_frames is not None:
437
- self.im_info["default_image"] = True
438
- tile = [("zip", (0, 0) + self.im_size, pos, self.im_rawmode)]
439
- self.im_tile = tile
440
- self.im_idat = length
441
- raise EOFError
442
-
443
- def chunk_IEND(self, pos, length):
444
- # end of PNG image
445
- raise EOFError
446
-
447
- def chunk_PLTE(self, pos, length):
448
- # palette
449
- s = ImageFile._safe_read(self.fp, length)
450
- if self.im_mode == "P":
451
- self.im_palette = "RGB", s
452
- return s
453
-
454
- def chunk_tRNS(self, pos, length):
455
- # transparency
456
- s = ImageFile._safe_read(self.fp, length)
457
- if self.im_mode == "P":
458
- if _simple_palette.match(s):
459
- # tRNS contains only one full-transparent entry,
460
- # other entries are full opaque
461
- i = s.find(b"\0")
462
- if i >= 0:
463
- self.im_info["transparency"] = i
464
- else:
465
- # otherwise, we have a byte string with one alpha value
466
- # for each palette entry
467
- self.im_info["transparency"] = s
468
- elif self.im_mode in ("1", "L", "I"):
469
- self.im_info["transparency"] = i16(s)
470
- elif self.im_mode == "RGB":
471
- self.im_info["transparency"] = i16(s), i16(s, 2), i16(s, 4)
472
- return s
473
-
474
- def chunk_gAMA(self, pos, length):
475
- # gamma setting
476
- s = ImageFile._safe_read(self.fp, length)
477
- self.im_info["gamma"] = i32(s) / 100000.0
478
- return s
479
-
480
- def chunk_cHRM(self, pos, length):
481
- # chromaticity, 8 unsigned ints, actual value is scaled by 100,000
482
- # WP x,y, Red x,y, Green x,y Blue x,y
483
-
484
- s = ImageFile._safe_read(self.fp, length)
485
- raw_vals = struct.unpack(">%dI" % (len(s) // 4), s)
486
- self.im_info["chromaticity"] = tuple(elt / 100000.0 for elt in raw_vals)
487
- return s
488
-
489
- def chunk_sRGB(self, pos, length):
490
- # srgb rendering intent, 1 byte
491
- # 0 perceptual
492
- # 1 relative colorimetric
493
- # 2 saturation
494
- # 3 absolute colorimetric
495
-
496
- s = ImageFile._safe_read(self.fp, length)
497
- if length < 1:
498
- if ImageFile.LOAD_TRUNCATED_IMAGES:
499
- return s
500
- msg = "Truncated sRGB chunk"
501
- raise ValueError(msg)
502
- self.im_info["srgb"] = s[0]
503
- return s
504
-
505
- def chunk_pHYs(self, pos, length):
506
- # pixels per unit
507
- s = ImageFile._safe_read(self.fp, length)
508
- if length < 9:
509
- if ImageFile.LOAD_TRUNCATED_IMAGES:
510
- return s
511
- msg = "Truncated pHYs chunk"
512
- raise ValueError(msg)
513
- px, py = i32(s, 0), i32(s, 4)
514
- unit = s[8]
515
- if unit == 1: # meter
516
- dpi = px * 0.0254, py * 0.0254
517
- self.im_info["dpi"] = dpi
518
- elif unit == 0:
519
- self.im_info["aspect"] = px, py
520
- return s
521
-
522
- def chunk_tEXt(self, pos, length):
523
- # text
524
- s = ImageFile._safe_read(self.fp, length)
525
- try:
526
- k, v = s.split(b"\0", 1)
527
- except ValueError:
528
- # fallback for broken tEXt tags
529
- k = s
530
- v = b""
531
- if k:
532
- k = k.decode("latin-1", "strict")
533
- v_str = v.decode("latin-1", "replace")
534
-
535
- self.im_info[k] = v if k == "exif" else v_str
536
- self.im_text[k] = v_str
537
- self.check_text_memory(len(v_str))
538
-
539
- return s
540
-
541
- def chunk_zTXt(self, pos, length):
542
- # compressed text
543
- s = ImageFile._safe_read(self.fp, length)
544
- try:
545
- k, v = s.split(b"\0", 1)
546
- except ValueError:
547
- k = s
548
- v = b""
549
- if v:
550
- comp_method = v[0]
551
- else:
552
- comp_method = 0
553
- if comp_method != 0:
554
- msg = f"Unknown compression method {comp_method} in zTXt chunk"
555
- raise SyntaxError(msg)
556
- try:
557
- v = _safe_zlib_decompress(v[1:])
558
- except ValueError:
559
- if ImageFile.LOAD_TRUNCATED_IMAGES:
560
- v = b""
561
- else:
562
- raise
563
- except zlib.error:
564
- v = b""
565
-
566
- if k:
567
- k = k.decode("latin-1", "strict")
568
- v = v.decode("latin-1", "replace")
569
-
570
- self.im_info[k] = self.im_text[k] = v
571
- self.check_text_memory(len(v))
572
-
573
- return s
574
-
575
- def chunk_iTXt(self, pos, length):
576
- # international text
577
- r = s = ImageFile._safe_read(self.fp, length)
578
- try:
579
- k, r = r.split(b"\0", 1)
580
- except ValueError:
581
- return s
582
- if len(r) < 2:
583
- return s
584
- cf, cm, r = r[0], r[1], r[2:]
585
- try:
586
- lang, tk, v = r.split(b"\0", 2)
587
- except ValueError:
588
- return s
589
- if cf != 0:
590
- if cm == 0:
591
- try:
592
- v = _safe_zlib_decompress(v)
593
- except ValueError:
594
- if ImageFile.LOAD_TRUNCATED_IMAGES:
595
- return s
596
- else:
597
- raise
598
- except zlib.error:
599
- return s
600
- else:
601
- return s
602
- try:
603
- k = k.decode("latin-1", "strict")
604
- lang = lang.decode("utf-8", "strict")
605
- tk = tk.decode("utf-8", "strict")
606
- v = v.decode("utf-8", "strict")
607
- except UnicodeError:
608
- return s
609
-
610
- self.im_info[k] = self.im_text[k] = iTXt(v, lang, tk)
611
- self.check_text_memory(len(v))
612
-
613
- return s
614
-
615
- def chunk_eXIf(self, pos, length):
616
- s = ImageFile._safe_read(self.fp, length)
617
- self.im_info["exif"] = b"Exif\x00\x00" + s
618
- return s
619
-
620
- # APNG chunks
621
- def chunk_acTL(self, pos, length):
622
- s = ImageFile._safe_read(self.fp, length)
623
- if length < 8:
624
- if ImageFile.LOAD_TRUNCATED_IMAGES:
625
- return s
626
- msg = "APNG contains truncated acTL chunk"
627
- raise ValueError(msg)
628
- if self.im_n_frames is not None:
629
- self.im_n_frames = None
630
- warnings.warn("Invalid APNG, will use default PNG image if possible")
631
- return s
632
- n_frames = i32(s)
633
- if n_frames == 0 or n_frames > 0x80000000:
634
- warnings.warn("Invalid APNG, will use default PNG image if possible")
635
- return s
636
- self.im_n_frames = n_frames
637
- self.im_info["loop"] = i32(s, 4)
638
- self.im_custom_mimetype = "image/apng"
639
- return s
640
-
641
- def chunk_fcTL(self, pos, length):
642
- s = ImageFile._safe_read(self.fp, length)
643
- if length < 26:
644
- if ImageFile.LOAD_TRUNCATED_IMAGES:
645
- return s
646
- msg = "APNG contains truncated fcTL chunk"
647
- raise ValueError(msg)
648
- seq = i32(s)
649
- if (self._seq_num is None and seq != 0) or (
650
- self._seq_num is not None and self._seq_num != seq - 1
651
- ):
652
- msg = "APNG contains frame sequence errors"
653
- raise SyntaxError(msg)
654
- self._seq_num = seq
655
- width, height = i32(s, 4), i32(s, 8)
656
- px, py = i32(s, 12), i32(s, 16)
657
- im_w, im_h = self.im_size
658
- if px + width > im_w or py + height > im_h:
659
- msg = "APNG contains invalid frames"
660
- raise SyntaxError(msg)
661
- self.im_info["bbox"] = (px, py, px + width, py + height)
662
- delay_num, delay_den = i16(s, 20), i16(s, 22)
663
- if delay_den == 0:
664
- delay_den = 100
665
- self.im_info["duration"] = float(delay_num) / float(delay_den) * 1000
666
- self.im_info["disposal"] = s[24]
667
- self.im_info["blend"] = s[25]
668
- return s
669
-
670
- def chunk_fdAT(self, pos, length):
671
- if length < 4:
672
- if ImageFile.LOAD_TRUNCATED_IMAGES:
673
- s = ImageFile._safe_read(self.fp, length)
674
- return s
675
- msg = "APNG contains truncated fDAT chunk"
676
- raise ValueError(msg)
677
- s = ImageFile._safe_read(self.fp, 4)
678
- seq = i32(s)
679
- if self._seq_num != seq - 1:
680
- msg = "APNG contains frame sequence errors"
681
- raise SyntaxError(msg)
682
- self._seq_num = seq
683
- return self.chunk_IDAT(pos + 4, length - 4)
684
-
685
-
686
- # --------------------------------------------------------------------
687
- # PNG reader
688
-
689
-
690
- def _accept(prefix):
691
- return prefix[:8] == _MAGIC
692
-
693
-
694
- ##
695
- # Image plugin for PNG images.
696
-
697
-
698
- class PngImageFile(ImageFile.ImageFile):
699
- format = "PNG"
700
- format_description = "Portable network graphics"
701
-
702
- def _open(self):
703
- if not _accept(self.fp.read(8)):
704
- msg = "not a PNG file"
705
- raise SyntaxError(msg)
706
- self._fp = self.fp
707
- self.__frame = 0
708
-
709
- #
710
- # Parse headers up to the first IDAT or fDAT chunk
711
-
712
- self.private_chunks = []
713
- self.png = PngStream(self.fp)
714
-
715
- while True:
716
- #
717
- # get next chunk
718
-
719
- cid, pos, length = self.png.read()
720
-
721
- try:
722
- s = self.png.call(cid, pos, length)
723
- except EOFError:
724
- break
725
- except AttributeError:
726
- logger.debug("%r %s %s (unknown)", cid, pos, length)
727
- s = ImageFile._safe_read(self.fp, length)
728
- if cid[1:2].islower():
729
- self.private_chunks.append((cid, s))
730
-
731
- self.png.crc(cid, s)
732
-
733
- #
734
- # Copy relevant attributes from the PngStream. An alternative
735
- # would be to let the PngStream class modify these attributes
736
- # directly, but that introduces circular references which are
737
- # difficult to break if things go wrong in the decoder...
738
- # (believe me, I've tried ;-)
739
-
740
- self.mode = self.png.im_mode
741
- self._size = self.png.im_size
742
- self.info = self.png.im_info
743
- self._text = None
744
- self.tile = self.png.im_tile
745
- self.custom_mimetype = self.png.im_custom_mimetype
746
- self.n_frames = self.png.im_n_frames or 1
747
- self.default_image = self.info.get("default_image", False)
748
-
749
- if self.png.im_palette:
750
- rawmode, data = self.png.im_palette
751
- self.palette = ImagePalette.raw(rawmode, data)
752
-
753
- if cid == b"fdAT":
754
- self.__prepare_idat = length - 4
755
- else:
756
- self.__prepare_idat = length # used by load_prepare()
757
-
758
- if self.png.im_n_frames is not None:
759
- self._close_exclusive_fp_after_loading = False
760
- self.png.save_rewind()
761
- self.__rewind_idat = self.__prepare_idat
762
- self.__rewind = self._fp.tell()
763
- if self.default_image:
764
- # IDAT chunk contains default image and not first animation frame
765
- self.n_frames += 1
766
- self._seek(0)
767
- self.is_animated = self.n_frames > 1
768
-
769
- @property
770
- def text(self):
771
- # experimental
772
- if self._text is None:
773
- # iTxt, tEXt and zTXt chunks may appear at the end of the file
774
- # So load the file to ensure that they are read
775
- if self.is_animated:
776
- frame = self.__frame
777
- # for APNG, seek to the final frame before loading
778
- self.seek(self.n_frames - 1)
779
- self.load()
780
- if self.is_animated:
781
- self.seek(frame)
782
- return self._text
783
-
784
- def verify(self):
785
- """Verify PNG file"""
786
-
787
- if self.fp is None:
788
- msg = "verify must be called directly after open"
789
- raise RuntimeError(msg)
790
-
791
- # back up to beginning of IDAT block
792
- self.fp.seek(self.tile[0][2] - 8)
793
-
794
- self.png.verify()
795
- self.png.close()
796
-
797
- if self._exclusive_fp:
798
- self.fp.close()
799
- self.fp = None
800
-
801
- def seek(self, frame):
802
- if not self._seek_check(frame):
803
- return
804
- if frame < self.__frame:
805
- self._seek(0, True)
806
-
807
- last_frame = self.__frame
808
- for f in range(self.__frame + 1, frame + 1):
809
- try:
810
- self._seek(f)
811
- except EOFError as e:
812
- self.seek(last_frame)
813
- msg = "no more images in APNG file"
814
- raise EOFError(msg) from e
815
-
816
- def _seek(self, frame, rewind=False):
817
- if frame == 0:
818
- if rewind:
819
- self._fp.seek(self.__rewind)
820
- self.png.rewind()
821
- self.__prepare_idat = self.__rewind_idat
822
- self.im = None
823
- if self.pyaccess:
824
- self.pyaccess = None
825
- self.info = self.png.im_info
826
- self.tile = self.png.im_tile
827
- self.fp = self._fp
828
- self._prev_im = None
829
- self.dispose = None
830
- self.default_image = self.info.get("default_image", False)
831
- self.dispose_op = self.info.get("disposal")
832
- self.blend_op = self.info.get("blend")
833
- self.dispose_extent = self.info.get("bbox")
834
- self.__frame = 0
835
- else:
836
- if frame != self.__frame + 1:
837
- msg = f"cannot seek to frame {frame}"
838
- raise ValueError(msg)
839
-
840
- # ensure previous frame was loaded
841
- self.load()
842
-
843
- if self.dispose:
844
- self.im.paste(self.dispose, self.dispose_extent)
845
- self._prev_im = self.im.copy()
846
-
847
- self.fp = self._fp
848
-
849
- # advance to the next frame
850
- if self.__prepare_idat:
851
- ImageFile._safe_read(self.fp, self.__prepare_idat)
852
- self.__prepare_idat = 0
853
- frame_start = False
854
- while True:
855
- self.fp.read(4) # CRC
856
-
857
- try:
858
- cid, pos, length = self.png.read()
859
- except (struct.error, SyntaxError):
860
- break
861
-
862
- if cid == b"IEND":
863
- msg = "No more images in APNG file"
864
- raise EOFError(msg)
865
- if cid == b"fcTL":
866
- if frame_start:
867
- # there must be at least one fdAT chunk between fcTL chunks
868
- msg = "APNG missing frame data"
869
- raise SyntaxError(msg)
870
- frame_start = True
871
-
872
- try:
873
- self.png.call(cid, pos, length)
874
- except UnicodeDecodeError:
875
- break
876
- except EOFError:
877
- if cid == b"fdAT":
878
- length -= 4
879
- if frame_start:
880
- self.__prepare_idat = length
881
- break
882
- ImageFile._safe_read(self.fp, length)
883
- except AttributeError:
884
- logger.debug("%r %s %s (unknown)", cid, pos, length)
885
- ImageFile._safe_read(self.fp, length)
886
-
887
- self.__frame = frame
888
- self.tile = self.png.im_tile
889
- self.dispose_op = self.info.get("disposal")
890
- self.blend_op = self.info.get("blend")
891
- self.dispose_extent = self.info.get("bbox")
892
-
893
- if not self.tile:
894
- raise EOFError
895
-
896
- # setup frame disposal (actual disposal done when needed in the next _seek())
897
- if self._prev_im is None and self.dispose_op == Disposal.OP_PREVIOUS:
898
- self.dispose_op = Disposal.OP_BACKGROUND
899
-
900
- if self.dispose_op == Disposal.OP_PREVIOUS:
901
- self.dispose = self._prev_im.copy()
902
- self.dispose = self._crop(self.dispose, self.dispose_extent)
903
- elif self.dispose_op == Disposal.OP_BACKGROUND:
904
- self.dispose = Image.core.fill(self.mode, self.size)
905
- self.dispose = self._crop(self.dispose, self.dispose_extent)
906
- else:
907
- self.dispose = None
908
-
909
- def tell(self):
910
- return self.__frame
911
-
912
- def load_prepare(self):
913
- """internal: prepare to read PNG file"""
914
-
915
- if self.info.get("interlace"):
916
- self.decoderconfig = self.decoderconfig + (1,)
917
-
918
- self.__idat = self.__prepare_idat # used by load_read()
919
- ImageFile.ImageFile.load_prepare(self)
920
-
921
- def load_read(self, read_bytes):
922
- """internal: read more image data"""
923
-
924
- while self.__idat == 0:
925
- # end of chunk, skip forward to next one
926
-
927
- self.fp.read(4) # CRC
928
-
929
- cid, pos, length = self.png.read()
930
-
931
- if cid not in [b"IDAT", b"DDAT", b"fdAT"]:
932
- self.png.push(cid, pos, length)
933
- return b""
934
-
935
- if cid == b"fdAT":
936
- try:
937
- self.png.call(cid, pos, length)
938
- except EOFError:
939
- pass
940
- self.__idat = length - 4 # sequence_num has already been read
941
- else:
942
- self.__idat = length # empty chunks are allowed
943
-
944
- # read more data from this chunk
945
- if read_bytes <= 0:
946
- read_bytes = self.__idat
947
- else:
948
- read_bytes = min(read_bytes, self.__idat)
949
-
950
- self.__idat = self.__idat - read_bytes
951
-
952
- return self.fp.read(read_bytes)
953
-
954
- def load_end(self):
955
- """internal: finished reading image data"""
956
- if self.__idat != 0:
957
- self.fp.read(self.__idat)
958
- while True:
959
- self.fp.read(4) # CRC
960
-
961
- try:
962
- cid, pos, length = self.png.read()
963
- except (struct.error, SyntaxError):
964
- break
965
-
966
- if cid == b"IEND":
967
- break
968
- elif cid == b"fcTL" and self.is_animated:
969
- # start of the next frame, stop reading
970
- self.__prepare_idat = 0
971
- self.png.push(cid, pos, length)
972
- break
973
-
974
- try:
975
- self.png.call(cid, pos, length)
976
- except UnicodeDecodeError:
977
- break
978
- except EOFError:
979
- if cid == b"fdAT":
980
- length -= 4
981
- ImageFile._safe_read(self.fp, length)
982
- except AttributeError:
983
- logger.debug("%r %s %s (unknown)", cid, pos, length)
984
- s = ImageFile._safe_read(self.fp, length)
985
- if cid[1:2].islower():
986
- self.private_chunks.append((cid, s, True))
987
- self._text = self.png.im_text
988
- if not self.is_animated:
989
- self.png.close()
990
- self.png = None
991
- else:
992
- if self._prev_im and self.blend_op == Blend.OP_OVER:
993
- updated = self._crop(self.im, self.dispose_extent)
994
- if self.im.mode == "RGB" and "transparency" in self.info:
995
- mask = updated.convert_transparent(
996
- "RGBA", self.info["transparency"]
997
- )
998
- else:
999
- mask = updated.convert("RGBA")
1000
- self._prev_im.paste(updated, self.dispose_extent, mask)
1001
- self.im = self._prev_im
1002
- if self.pyaccess:
1003
- self.pyaccess = None
1004
-
1005
- def _getexif(self):
1006
- if "exif" not in self.info:
1007
- self.load()
1008
- if "exif" not in self.info and "Raw profile type exif" not in self.info:
1009
- return None
1010
- return self.getexif()._get_merged_dict()
1011
-
1012
- def getexif(self):
1013
- if "exif" not in self.info:
1014
- self.load()
1015
-
1016
- return super().getexif()
1017
-
1018
- def getxmp(self):
1019
- """
1020
- Returns a dictionary containing the XMP tags.
1021
- Requires defusedxml to be installed.
1022
-
1023
- :returns: XMP tags in a dictionary.
1024
- """
1025
- return (
1026
- self._getxmp(self.info["XML:com.adobe.xmp"])
1027
- if "XML:com.adobe.xmp" in self.info
1028
- else {}
1029
- )
1030
-
1031
-
1032
- # --------------------------------------------------------------------
1033
- # PNG writer
1034
-
1035
- _OUTMODES = {
1036
- # supported PIL modes, and corresponding rawmodes/bits/color combinations
1037
- "1": ("1", b"\x01\x00"),
1038
- "L;1": ("L;1", b"\x01\x00"),
1039
- "L;2": ("L;2", b"\x02\x00"),
1040
- "L;4": ("L;4", b"\x04\x00"),
1041
- "L": ("L", b"\x08\x00"),
1042
- "LA": ("LA", b"\x08\x04"),
1043
- "I": ("I;16B", b"\x10\x00"),
1044
- "I;16": ("I;16B", b"\x10\x00"),
1045
- "P;1": ("P;1", b"\x01\x03"),
1046
- "P;2": ("P;2", b"\x02\x03"),
1047
- "P;4": ("P;4", b"\x04\x03"),
1048
- "P": ("P", b"\x08\x03"),
1049
- "RGB": ("RGB", b"\x08\x02"),
1050
- "RGBA": ("RGBA", b"\x08\x06"),
1051
- }
1052
-
1053
-
1054
- def putchunk(fp, cid, *data):
1055
- """Write a PNG chunk (including CRC field)"""
1056
-
1057
- data = b"".join(data)
1058
-
1059
- fp.write(o32(len(data)) + cid)
1060
- fp.write(data)
1061
- crc = _crc32(data, _crc32(cid))
1062
- fp.write(o32(crc))
1063
-
1064
-
1065
- class _idat:
1066
- # wrap output from the encoder in IDAT chunks
1067
-
1068
- def __init__(self, fp, chunk):
1069
- self.fp = fp
1070
- self.chunk = chunk
1071
-
1072
- def write(self, data):
1073
- self.chunk(self.fp, b"IDAT", data)
1074
-
1075
-
1076
- class _fdat:
1077
- # wrap encoder output in fdAT chunks
1078
-
1079
- def __init__(self, fp, chunk, seq_num):
1080
- self.fp = fp
1081
- self.chunk = chunk
1082
- self.seq_num = seq_num
1083
-
1084
- def write(self, data):
1085
- self.chunk(self.fp, b"fdAT", o32(self.seq_num), data)
1086
- self.seq_num += 1
1087
-
1088
-
1089
- def _write_multiple_frames(im, fp, chunk, rawmode, default_image, append_images):
1090
- duration = im.encoderinfo.get("duration", im.info.get("duration", 0))
1091
- loop = im.encoderinfo.get("loop", im.info.get("loop", 0))
1092
- disposal = im.encoderinfo.get("disposal", im.info.get("disposal", Disposal.OP_NONE))
1093
- blend = im.encoderinfo.get("blend", im.info.get("blend", Blend.OP_SOURCE))
1094
-
1095
- if default_image:
1096
- chain = itertools.chain(append_images)
1097
- else:
1098
- chain = itertools.chain([im], append_images)
1099
-
1100
- im_frames = []
1101
- frame_count = 0
1102
- for im_seq in chain:
1103
- for im_frame in ImageSequence.Iterator(im_seq):
1104
- if im_frame.mode == rawmode:
1105
- im_frame = im_frame.copy()
1106
- else:
1107
- if rawmode == "P":
1108
- im_frame = im_frame.convert(rawmode, palette=im.palette)
1109
- else:
1110
- im_frame = im_frame.convert(rawmode)
1111
- encoderinfo = im.encoderinfo.copy()
1112
- if isinstance(duration, (list, tuple)):
1113
- encoderinfo["duration"] = duration[frame_count]
1114
- if isinstance(disposal, (list, tuple)):
1115
- encoderinfo["disposal"] = disposal[frame_count]
1116
- if isinstance(blend, (list, tuple)):
1117
- encoderinfo["blend"] = blend[frame_count]
1118
- frame_count += 1
1119
-
1120
- if im_frames:
1121
- previous = im_frames[-1]
1122
- prev_disposal = previous["encoderinfo"].get("disposal")
1123
- prev_blend = previous["encoderinfo"].get("blend")
1124
- if prev_disposal == Disposal.OP_PREVIOUS and len(im_frames) < 2:
1125
- prev_disposal = Disposal.OP_BACKGROUND
1126
-
1127
- if prev_disposal == Disposal.OP_BACKGROUND:
1128
- base_im = previous["im"].copy()
1129
- dispose = Image.core.fill("RGBA", im.size, (0, 0, 0, 0))
1130
- bbox = previous["bbox"]
1131
- if bbox:
1132
- dispose = dispose.crop(bbox)
1133
- else:
1134
- bbox = (0, 0) + im.size
1135
- base_im.paste(dispose, bbox)
1136
- elif prev_disposal == Disposal.OP_PREVIOUS:
1137
- base_im = im_frames[-2]["im"]
1138
- else:
1139
- base_im = previous["im"]
1140
- delta = ImageChops.subtract_modulo(
1141
- im_frame.convert("RGBA"), base_im.convert("RGBA")
1142
- )
1143
- bbox = delta.getbbox(alpha_only=False)
1144
- if (
1145
- not bbox
1146
- and prev_disposal == encoderinfo.get("disposal")
1147
- and prev_blend == encoderinfo.get("blend")
1148
- ):
1149
- previous["encoderinfo"]["duration"] += encoderinfo.get(
1150
- "duration", duration
1151
- )
1152
- continue
1153
- else:
1154
- bbox = None
1155
- if "duration" not in encoderinfo:
1156
- encoderinfo["duration"] = duration
1157
- im_frames.append({"im": im_frame, "bbox": bbox, "encoderinfo": encoderinfo})
1158
-
1159
- # animation control
1160
- chunk(
1161
- fp,
1162
- b"acTL",
1163
- o32(len(im_frames)), # 0: num_frames
1164
- o32(loop), # 4: num_plays
1165
- )
1166
-
1167
- # default image IDAT (if it exists)
1168
- if default_image:
1169
- ImageFile._save(im, _idat(fp, chunk), [("zip", (0, 0) + im.size, 0, rawmode)])
1170
-
1171
- seq_num = 0
1172
- for frame, frame_data in enumerate(im_frames):
1173
- im_frame = frame_data["im"]
1174
- if not frame_data["bbox"]:
1175
- bbox = (0, 0) + im_frame.size
1176
- else:
1177
- bbox = frame_data["bbox"]
1178
- im_frame = im_frame.crop(bbox)
1179
- size = im_frame.size
1180
- encoderinfo = frame_data["encoderinfo"]
1181
- frame_duration = int(round(encoderinfo["duration"]))
1182
- frame_disposal = encoderinfo.get("disposal", disposal)
1183
- frame_blend = encoderinfo.get("blend", blend)
1184
- # frame control
1185
- chunk(
1186
- fp,
1187
- b"fcTL",
1188
- o32(seq_num), # sequence_number
1189
- o32(size[0]), # width
1190
- o32(size[1]), # height
1191
- o32(bbox[0]), # x_offset
1192
- o32(bbox[1]), # y_offset
1193
- o16(frame_duration), # delay_numerator
1194
- o16(1000), # delay_denominator
1195
- o8(frame_disposal), # dispose_op
1196
- o8(frame_blend), # blend_op
1197
- )
1198
- seq_num += 1
1199
- # frame data
1200
- if frame == 0 and not default_image:
1201
- # first frame must be in IDAT chunks for backwards compatibility
1202
- ImageFile._save(
1203
- im_frame,
1204
- _idat(fp, chunk),
1205
- [("zip", (0, 0) + im_frame.size, 0, rawmode)],
1206
- )
1207
- else:
1208
- fdat_chunks = _fdat(fp, chunk, seq_num)
1209
- ImageFile._save(
1210
- im_frame,
1211
- fdat_chunks,
1212
- [("zip", (0, 0) + im_frame.size, 0, rawmode)],
1213
- )
1214
- seq_num = fdat_chunks.seq_num
1215
-
1216
-
1217
- def _save_all(im, fp, filename):
1218
- _save(im, fp, filename, save_all=True)
1219
-
1220
-
1221
- def _save(im, fp, filename, chunk=putchunk, save_all=False):
1222
- # save an image to disk (called by the save method)
1223
-
1224
- if save_all:
1225
- default_image = im.encoderinfo.get(
1226
- "default_image", im.info.get("default_image")
1227
- )
1228
- modes = set()
1229
- append_images = im.encoderinfo.get("append_images", [])
1230
- if default_image:
1231
- chain = itertools.chain(append_images)
1232
- else:
1233
- chain = itertools.chain([im], append_images)
1234
- for im_seq in chain:
1235
- for im_frame in ImageSequence.Iterator(im_seq):
1236
- modes.add(im_frame.mode)
1237
- for mode in ("RGBA", "RGB", "P"):
1238
- if mode in modes:
1239
- break
1240
- else:
1241
- mode = modes.pop()
1242
- else:
1243
- mode = im.mode
1244
-
1245
- if mode == "P":
1246
- #
1247
- # attempt to minimize storage requirements for palette images
1248
- if "bits" in im.encoderinfo:
1249
- # number of bits specified by user
1250
- colors = min(1 << im.encoderinfo["bits"], 256)
1251
- else:
1252
- # check palette contents
1253
- if im.palette:
1254
- colors = max(min(len(im.palette.getdata()[1]) // 3, 256), 1)
1255
- else:
1256
- colors = 256
1257
-
1258
- if colors <= 16:
1259
- if colors <= 2:
1260
- bits = 1
1261
- elif colors <= 4:
1262
- bits = 2
1263
- else:
1264
- bits = 4
1265
- mode = f"{mode};{bits}"
1266
-
1267
- # encoder options
1268
- im.encoderconfig = (
1269
- im.encoderinfo.get("optimize", False),
1270
- im.encoderinfo.get("compress_level", -1),
1271
- im.encoderinfo.get("compress_type", -1),
1272
- im.encoderinfo.get("dictionary", b""),
1273
- )
1274
-
1275
- # get the corresponding PNG mode
1276
- try:
1277
- rawmode, mode = _OUTMODES[mode]
1278
- except KeyError as e:
1279
- msg = f"cannot write mode {mode} as PNG"
1280
- raise OSError(msg) from e
1281
-
1282
- #
1283
- # write minimal PNG file
1284
-
1285
- fp.write(_MAGIC)
1286
-
1287
- chunk(
1288
- fp,
1289
- b"IHDR",
1290
- o32(im.size[0]), # 0: size
1291
- o32(im.size[1]),
1292
- mode, # 8: depth/type
1293
- b"\0", # 10: compression
1294
- b"\0", # 11: filter category
1295
- b"\0", # 12: interlace flag
1296
- )
1297
-
1298
- chunks = [b"cHRM", b"gAMA", b"sBIT", b"sRGB", b"tIME"]
1299
-
1300
- icc = im.encoderinfo.get("icc_profile", im.info.get("icc_profile"))
1301
- if icc:
1302
- # ICC profile
1303
- # according to PNG spec, the iCCP chunk contains:
1304
- # Profile name 1-79 bytes (character string)
1305
- # Null separator 1 byte (null character)
1306
- # Compression method 1 byte (0)
1307
- # Compressed profile n bytes (zlib with deflate compression)
1308
- name = b"ICC Profile"
1309
- data = name + b"\0\0" + zlib.compress(icc)
1310
- chunk(fp, b"iCCP", data)
1311
-
1312
- # You must either have sRGB or iCCP.
1313
- # Disallow sRGB chunks when an iCCP-chunk has been emitted.
1314
- chunks.remove(b"sRGB")
1315
-
1316
- info = im.encoderinfo.get("pnginfo")
1317
- if info:
1318
- chunks_multiple_allowed = [b"sPLT", b"iTXt", b"tEXt", b"zTXt"]
1319
- for info_chunk in info.chunks:
1320
- cid, data = info_chunk[:2]
1321
- if cid in chunks:
1322
- chunks.remove(cid)
1323
- chunk(fp, cid, data)
1324
- elif cid in chunks_multiple_allowed:
1325
- chunk(fp, cid, data)
1326
- elif cid[1:2].islower():
1327
- # Private chunk
1328
- after_idat = info_chunk[2:3]
1329
- if not after_idat:
1330
- chunk(fp, cid, data)
1331
-
1332
- if im.mode == "P":
1333
- palette_byte_number = colors * 3
1334
- palette_bytes = im.im.getpalette("RGB")[:palette_byte_number]
1335
- while len(palette_bytes) < palette_byte_number:
1336
- palette_bytes += b"\0"
1337
- chunk(fp, b"PLTE", palette_bytes)
1338
-
1339
- transparency = im.encoderinfo.get("transparency", im.info.get("transparency", None))
1340
-
1341
- if transparency or transparency == 0:
1342
- if im.mode == "P":
1343
- # limit to actual palette size
1344
- alpha_bytes = colors
1345
- if isinstance(transparency, bytes):
1346
- chunk(fp, b"tRNS", transparency[:alpha_bytes])
1347
- else:
1348
- transparency = max(0, min(255, transparency))
1349
- alpha = b"\xFF" * transparency + b"\0"
1350
- chunk(fp, b"tRNS", alpha[:alpha_bytes])
1351
- elif im.mode in ("1", "L", "I"):
1352
- transparency = max(0, min(65535, transparency))
1353
- chunk(fp, b"tRNS", o16(transparency))
1354
- elif im.mode == "RGB":
1355
- red, green, blue = transparency
1356
- chunk(fp, b"tRNS", o16(red) + o16(green) + o16(blue))
1357
- else:
1358
- if "transparency" in im.encoderinfo:
1359
- # don't bother with transparency if it's an RGBA
1360
- # and it's in the info dict. It's probably just stale.
1361
- msg = "cannot use transparency for this mode"
1362
- raise OSError(msg)
1363
- else:
1364
- if im.mode == "P" and im.im.getpalettemode() == "RGBA":
1365
- alpha = im.im.getpalette("RGBA", "A")
1366
- alpha_bytes = colors
1367
- chunk(fp, b"tRNS", alpha[:alpha_bytes])
1368
-
1369
- dpi = im.encoderinfo.get("dpi")
1370
- if dpi:
1371
- chunk(
1372
- fp,
1373
- b"pHYs",
1374
- o32(int(dpi[0] / 0.0254 + 0.5)),
1375
- o32(int(dpi[1] / 0.0254 + 0.5)),
1376
- b"\x01",
1377
- )
1378
-
1379
- if info:
1380
- chunks = [b"bKGD", b"hIST"]
1381
- for info_chunk in info.chunks:
1382
- cid, data = info_chunk[:2]
1383
- if cid in chunks:
1384
- chunks.remove(cid)
1385
- chunk(fp, cid, data)
1386
-
1387
- exif = im.encoderinfo.get("exif")
1388
- if exif:
1389
- if isinstance(exif, Image.Exif):
1390
- exif = exif.tobytes(8)
1391
- if exif.startswith(b"Exif\x00\x00"):
1392
- exif = exif[6:]
1393
- chunk(fp, b"eXIf", exif)
1394
-
1395
- if save_all:
1396
- _write_multiple_frames(im, fp, chunk, rawmode, default_image, append_images)
1397
- else:
1398
- ImageFile._save(im, _idat(fp, chunk), [("zip", (0, 0) + im.size, 0, rawmode)])
1399
-
1400
- if info:
1401
- for info_chunk in info.chunks:
1402
- cid, data = info_chunk[:2]
1403
- if cid[1:2].islower():
1404
- # Private chunk
1405
- after_idat = info_chunk[2:3]
1406
- if after_idat:
1407
- chunk(fp, cid, data)
1408
-
1409
- chunk(fp, b"IEND", b"")
1410
-
1411
- if hasattr(fp, "flush"):
1412
- fp.flush()
1413
-
1414
-
1415
- # --------------------------------------------------------------------
1416
- # PNG chunk converter
1417
-
1418
-
1419
- def getchunks(im, **params):
1420
- """Return a list of PNG chunks representing this image."""
1421
-
1422
- class collector:
1423
- data = []
1424
-
1425
- def write(self, data):
1426
- pass
1427
-
1428
- def append(self, chunk):
1429
- self.data.append(chunk)
1430
-
1431
- def append(fp, cid, *data):
1432
- data = b"".join(data)
1433
- crc = o32(_crc32(data, _crc32(cid)))
1434
- fp.append((cid, data, crc))
1435
-
1436
- fp = collector()
1437
-
1438
- try:
1439
- im.encoderinfo = params
1440
- _save(im, fp, None, append)
1441
- finally:
1442
- del im.encoderinfo
1443
-
1444
- return fp.data
1445
-
1446
-
1447
- # --------------------------------------------------------------------
1448
- # Registry
1449
-
1450
- Image.register_open(PngImageFile.format, PngImageFile, _accept)
1451
- Image.register_save(PngImageFile.format, _save)
1452
- Image.register_save_all(PngImageFile.format, _save_all)
1453
-
1454
- Image.register_extensions(PngImageFile.format, [".png", ".apng"])
1455
-
1456
- Image.register_mime(PngImageFile.format, "image/png")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
spaces/DQChoi/gpt-demo/venv/lib/python3.11/site-packages/huggingface_hub/utils/_git_credential.py DELETED
@@ -1,96 +0,0 @@
1
- # coding=utf-8
2
- # Copyright 2022-present, the HuggingFace Inc. team.
3
- #
4
- # Licensed under the Apache License, Version 2.0 (the "License");
5
- # you may not use this file except in compliance with the License.
6
- # You may obtain a copy of the License at
7
- #
8
- # http://www.apache.org/licenses/LICENSE-2.0
9
- #
10
- # Unless required by applicable law or agreed to in writing, software
11
- # distributed under the License is distributed on an "AS IS" BASIS,
12
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
- # See the License for the specific language governing permissions and
14
- # limitations under the License.
15
- """Contains utilities to manage Git credentials."""
16
- import subprocess
17
- from typing import List, Optional
18
-
19
- from ..constants import ENDPOINT
20
- from ._subprocess import run_interactive_subprocess, run_subprocess
21
-
22
-
23
- def list_credential_helpers(folder: Optional[str] = None) -> List[str]:
24
- """Return the list of git credential helpers configured.
25
-
26
- See https://git-scm.com/docs/gitcredentials.
27
-
28
- Credentials are saved in all configured helpers (store, cache, macOS keychain,...).
29
- Calls "`git credential approve`" internally. See https://git-scm.com/docs/git-credential.
30
-
31
- Args:
32
- folder (`str`, *optional*):
33
- The folder in which to check the configured helpers.
34
- """
35
- try:
36
- output = run_subprocess("git config --list", folder=folder).stdout
37
- # NOTE: If user has set an helper for a custom URL, it will not we caught here.
38
- # Example: `credential.https://huggingface.co.helper=store`
39
- # See: https://github.com/huggingface/huggingface_hub/pull/1138#discussion_r1013324508
40
- return sorted( # Sort for nice printing
41
- { # Might have some duplicates
42
- line.split("=")[-1].split()[0] for line in output.split("\n") if "credential.helper" in line
43
- }
44
- )
45
- except subprocess.CalledProcessError as exc:
46
- raise EnvironmentError(exc.stderr)
47
-
48
-
49
- def set_git_credential(token: str, username: str = "hf_user", folder: Optional[str] = None) -> None:
50
- """Save a username/token pair in git credential for HF Hub registry.
51
-
52
- Credentials are saved in all configured helpers (store, cache, macOS keychain,...).
53
- Calls "`git credential approve`" internally. See https://git-scm.com/docs/git-credential.
54
-
55
- Args:
56
- username (`str`, defaults to `"hf_user"`):
57
- A git username. Defaults to `"hf_user"`, the default user used in the Hub.
58
- token (`str`, defaults to `"hf_user"`):
59
- A git password. In practice, the User Access Token for the Hub.
60
- See https://huggingface.co/settings/tokens.
61
- folder (`str`, *optional*):
62
- The folder in which to check the configured helpers.
63
- """
64
- with run_interactive_subprocess("git credential approve", folder=folder) as (
65
- stdin,
66
- _,
67
- ):
68
- stdin.write(f"url={ENDPOINT}\nusername={username.lower()}\npassword={token}\n\n")
69
- stdin.flush()
70
-
71
-
72
- def unset_git_credential(username: str = "hf_user", folder: Optional[str] = None) -> None:
73
- """Erase credentials from git credential for HF Hub registry.
74
-
75
- Credentials are erased from the configured helpers (store, cache, macOS
76
- keychain,...), if any. If `username` is not provided, any credential configured for
77
- HF Hub endpoint is erased.
78
- Calls "`git credential erase`" internally. See https://git-scm.com/docs/git-credential.
79
-
80
- Args:
81
- username (`str`, defaults to `"hf_user"`):
82
- A git username. Defaults to `"hf_user"`, the default user used in the Hub.
83
- folder (`str`, *optional*):
84
- The folder in which to check the configured helpers.
85
- """
86
- with run_interactive_subprocess("git credential reject", folder=folder) as (
87
- stdin,
88
- _,
89
- ):
90
- standard_input = f"url={ENDPOINT}\n"
91
- if username is not None:
92
- standard_input += f"username={username.lower()}\n"
93
- standard_input += "\n"
94
-
95
- stdin.write(standard_input)
96
- stdin.flush()