Commit
·
1896713
1
Parent(s):
3324954
Update parquet files (step 82 of 397)
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- spaces/101-5/gpt4free/g4f/Provider/Providers/Theb.py +0 -28
- spaces/1acneusushi/gradio-2dmoleculeeditor/data/Armi Project Cairo International Airport Heca Fs2004 _BEST_.md +0 -123
- spaces/1acneusushi/gradio-2dmoleculeeditor/data/Ezycracks.com How to Crack Any Software in Minutes.md +0 -28
- spaces/1acneusushi/gradio-2dmoleculeeditor/data/Free Download Windows Mobile 7 Samsung Omnia i900 Get Ready for a Faster and Smoother Experience.md +0 -175
- spaces/1acneusushi/gradio-2dmoleculeeditor/data/Free LINK Winzip Full Version Download.md +0 -19
- spaces/1acneusushi/gradio-2dmoleculeeditor/data/GTA 5 Key How to Access the Most Epic Game Ever.md +0 -14
- spaces/1gistliPinn/ChatGPT4/Examples/Commodore 64 Roms Pack !!LINK!! Download.md +0 -6
- spaces/1gistliPinn/ChatGPT4/Examples/Download Script Frost Dragon Okolnir Elfbot WORK.md +0 -6
- spaces/1gistliPinn/ChatGPT4/Examples/Free Download Myob Accounting Versi 17 Full 32 Fixed.md +0 -6
- spaces/1pelhydcardo/ChatGPT-prompt-generator/assets/Attack on Titan 2 Final Battle - The Ultimate Challenge for Fans of the Anime.md +0 -129
- spaces/2023Liu2023/bingo/src/pages/api/blob.ts +0 -40
- spaces/AIGC-Audio/AudioGPT/NeuralSeq/tasks/tts/ps_adv.py +0 -372
- spaces/AchyuthGamer/OpenGPT-Chat-UI/src/lib/utils/share.ts +0 -7
- spaces/AgentVerse/agentVerse/ui/src/phaser3-rex-plugins/plugins/ninepatch.js +0 -2
- spaces/AgentVerse/agentVerse/ui/src/phaser3-rex-plugins/templates/ui/fileselectorbutton/FileSelectorButton.d.ts +0 -45
- spaces/AgentVerse/agentVerse/ui/src/phaser3-rex-plugins/templates/ui/pages/Pages.d.ts +0 -72
- spaces/Al-Chan/Vits_League_of_Legends_Yuumi_TTS/text/english.py +0 -188
- spaces/AlekseyKorshuk/michellejieli-NSFW_text_classifier/README.md +0 -12
- spaces/Androidonnxfork/CivitAi-to-Diffusers/diffusers/docs/source/en/api/pipelines/ddpm.md +0 -35
- spaces/Androidonnxfork/CivitAi-to-Diffusers/diffusers/src/diffusers/dependency_versions_check.py +0 -47
- spaces/Andy1621/uniformer_image_detection/configs/foveabox/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py +0 -27
- spaces/Andy1621/uniformer_image_detection/configs/fpg/faster_rcnn_r50_fpn_crop640_50e_coco.py +0 -68
- spaces/Andy1621/uniformer_image_detection/configs/groie/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py +0 -45
- spaces/Andy1621/uniformer_image_detection/configs/yolact/yolact_r50_1x8_coco.py +0 -160
- spaces/Andy1621/uniformer_image_segmentation/configs/deeplabv3plus/deeplabv3plus_r101-d8_480x480_40k_pascal_context.py +0 -2
- spaces/Andy1621/uniformer_image_segmentation/configs/psanet/psanet_r101-d8_769x769_80k_cityscapes.py +0 -2
- spaces/Andy1621/uniformer_image_segmentation/configs/psanet/psanet_r50-d8_512x1024_40k_cityscapes.py +0 -4
- spaces/Andy1621/uniformer_image_segmentation/configs/pspnet/pspnet_r50-d8_480x480_40k_pascal_context_59.py +0 -10
- spaces/AnishKumbhar/ChatBot/text-generation-webui-main/extensions/multimodal/multimodal_embedder.py +0 -178
- spaces/Anonymous-sub/Rerender/gmflow_module/gmflow/__init__.py +0 -0
- spaces/Arnx/MusicGenXvAKN/audiocraft/quantization/__init__.py +0 -9
- spaces/Arnx/MusicGenXvAKN/tests/models/test_musicgen.py +0 -58
- spaces/Artrajz/vits-simple-api/bert_vits2/text/bert_handler.py +0 -33
- spaces/Atualli/yoloxTeste/configs/yolox_m.py +0 -15
- spaces/Ayushnangia/Whispercpp_yt/README.md +0 -13
- spaces/Banbri/zcvzcv/src/components/ui/separator.tsx +0 -31
- spaces/Bart92/RVC_HF/lib/uvr5_pack/lib_v5/nets_537227KB.py +0 -123
- spaces/BernardoOlisan/vqganclip/CLIP/data/yfcc100m.md +0 -14
- spaces/Big-Web/MMSD/env/Lib/site-packages/pip/_internal/locations/base.py +0 -81
- spaces/Big-Web/MMSD/env/Lib/site-packages/pip/_vendor/rich/highlighter.py +0 -232
- spaces/Big-Web/MMSD/env/Lib/site-packages/setuptools/_distutils/command/install.py +0 -814
- spaces/Big-Web/MMSD/env/Lib/site-packages/setuptools/_distutils/fancy_getopt.py +0 -470
- spaces/BigChia/bird_classifier/app.py +0 -26
- spaces/BigSalmon/BackTranslation2/app.py +0 -117
- spaces/Brightmzb/test/README.md +0 -13
- spaces/CVPR/LIVE/thrust/dependencies/cub/test/Makefile +0 -468
- spaces/CVPR/LIVE/thrust/thrust/detail/cpp14_required.h +0 -26
- spaces/CVPR/drawings-to-human/static/_app/immutable/assets/pages/__layout.svelte-cc9dd261.css +0 -1
- spaces/CVPR/lama-example/saicinpainting/training/modules/pix2pixhd.py +0 -669
- spaces/CVPR/regionclip-demo/datasets/prepare_panoptic_fpn.py +0 -116
spaces/101-5/gpt4free/g4f/Provider/Providers/Theb.py
DELETED
@@ -1,28 +0,0 @@
|
|
1 |
-
import os
|
2 |
-
import json
|
3 |
-
import time
|
4 |
-
import subprocess
|
5 |
-
|
6 |
-
from ...typing import sha256, Dict, get_type_hints
|
7 |
-
|
8 |
-
url = 'https://theb.ai'
|
9 |
-
model = ['gpt-3.5-turbo']
|
10 |
-
supports_stream = True
|
11 |
-
needs_auth = False
|
12 |
-
|
13 |
-
def _create_completion(model: str, messages: list, stream: bool, **kwargs):
|
14 |
-
|
15 |
-
path = os.path.dirname(os.path.realpath(__file__))
|
16 |
-
config = json.dumps({
|
17 |
-
'messages': messages,
|
18 |
-
'model': model}, separators=(',', ':'))
|
19 |
-
|
20 |
-
cmd = ['python3', f'{path}/helpers/theb.py', config]
|
21 |
-
|
22 |
-
p = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
|
23 |
-
|
24 |
-
for line in iter(p.stdout.readline, b''):
|
25 |
-
yield line.decode('utf-8')
|
26 |
-
|
27 |
-
params = f'g4f.Providers.{os.path.basename(__file__)[:-3]} supports: ' + \
|
28 |
-
'(%s)' % ', '.join([f"{name}: {get_type_hints(_create_completion)[name].__name__}" for name in _create_completion.__code__.co_varnames[:_create_completion.__code__.co_argcount]])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/1acneusushi/gradio-2dmoleculeeditor/data/Armi Project Cairo International Airport Heca Fs2004 _BEST_.md
DELETED
@@ -1,123 +0,0 @@
|
|
1 |
-
|
2 |
-
<h1>Armi Project Cairo International Airport HECA FS2004 Review</h1>
|
3 |
-
<h2>Introduction</h2>
|
4 |
-
<p>If you are looking for a realistic and detailed rendition of Cairo International Airport (IATA: CAI, ICAO: HECA) for FS2004, you might want to check out the Armi Project scenery. In this review, we will take a look at the features, performance and compatibility of this addon, and see if it is worth adding to your virtual hangar.</p>
|
5 |
-
<h2>armi project cairo international airport heca fs2004</h2><br /><p><b><b>Download File</b> ——— <a href="https://byltly.com/2uKyLi">https://byltly.com/2uKyLi</a></b></p><br /><br />
|
6 |
-
<h3>What is Armi Project?</h3>
|
7 |
-
<p>Armi Project is a scenery developer that specializes in creating airports for FS2004 and FSX. They have released several sceneries for Middle Eastern and Asian airports, such as Baghdad, Tehran, Kuwait, Riyadh, Muscat, Amman and Bangkok. Their sceneries are known for their accuracy, detail and realism.</p>
|
8 |
-
<h3>What is Cairo International Airport HECA?</h3>
|
9 |
-
<p>Cairo International Airport is the international airport of Cairo and the busiest in Egypt. It serves as the primary hub for EgyptAir and EgyptAir Express as well as several other airlines. The airport is located to the northeast of the city around 15 kilometres (9.3 mi) from the business area of the city and has an area of approximately 37 square kilometres (14 sq mi). The terminal facilities include Departure Hall 1, International Hall 3, and Hall 4 for private and non-commercial aircraft services. As part of the recent upgrading and facility improvement scheme, the CAA demolished the old hall 3, previously used for domestic arrivals and departures, to reconstruct a new hall to be used for international arrivals. Terminal 1 is locally known as the "Old Airport," although its facilities were recently given a complete overhaul and are newer than those of Terminal 2, which is still known as the "New Airport."</p>
|
10 |
-
<h2>Features of the scenery</h2>
|
11 |
-
<h3>Gmax models of main buildings</h3>
|
12 |
-
<p>The Armi Project scenery features Gmax models of all main buildings at Cairo International Airport, such as the terminal, control tower, cargo and military hangars and more. The models are accurate and detailed, with realistic textures and shadows. The jetways are also modeled with Gmax and can be moved with CTRL+J.</p>
|
13 |
-
<h3>Detailed jetways and bridges</h3>
|
14 |
-
<p>The scenery also features detailed jetways and bridges that connect the concourses to the terminal. The jetways have photo real textures and custom animations. The bridges have transparent windows that allow you to see inside them.</p>
|
15 |
-
<h3>Photo real texture and custom ground texture</h3>
|
16 |
-
<p>The ground texture of the scenery is based on satellite photos that give you a realistic feeling of being at the airport. The texture is custom made with lines, taxiways and taxi lights. The aprons have realistic markings and signs that help you navigate around the airport.</p>
|
17 |
-
<h3>Animated skytrain and static objects</h3>
|
18 |
-
<p>The scenery also features an animated skytrain that runs behind the concourses. The skytrain has realistic sounds and movements that add life to the airport. The scenery also has static objects such as local ground service equipment, cars and planes that populate the airport.</p>
|
19 |
-
<h3>Surrounding area and landmarks</h3>
|
20 |
-
<p>The scenery also covers the surrounding area of the airport, including military hangars, fuel tank facilities, VIP terminal, royal terminal, cargo bay and more. The scenery also includes some landmarks near the airport such as a major hotel (the Sheraton), a mosque and a pyramid.</p>
|
21 |
-
<h2>Performance and compatibility</h2>
|
22 |
-
<h3>System requirements</h3>
|
23 |
-
<p>The system requirements for this scenery are: <ul>
|
24 |
-
<li>FS2004</li>
|
25 |
-
<li>Windows 7 , Windows 8 or 8.1 (recommended)</li>
|
26 |
-
<li>Core i7 2.53 Ghz</li>
|
27 |
-
<li>RAM 2GB</li>
|
28 |
-
<li>Graphic card 512 Mb to 1GB</li>
|
29 |
-
</ul>
|
30 |
-
</p>
|
31 |
-
<p>armi project heca scenery for fs2004<br />
|
32 |
-
cairo international airport fs2004 download<br />
|
33 |
-
armi project cairo airport review<br />
|
34 |
-
heca airport code fs2004<br />
|
35 |
-
armi project fs2004 airports<br />
|
36 |
-
cairo international airport scenery fs9<br />
|
37 |
-
armi project heca fsx<br />
|
38 |
-
fs2004 cairo airport update<br />
|
39 |
-
armi project egypt airports fs2004<br />
|
40 |
-
heca airport charts fs2004<br />
|
41 |
-
armi project cairo international v2 fs2004<br />
|
42 |
-
fs2004 cairo photoreal scenery<br />
|
43 |
-
armi project heca p3d<br />
|
44 |
-
fs2004 cairo airport freeware<br />
|
45 |
-
armi project fs9 scenery<br />
|
46 |
-
cairo international airport terminal 2 fs2004<br />
|
47 |
-
armi project heca x-plane<br />
|
48 |
-
fs2004 cairo airport traffic<br />
|
49 |
-
armi project egypt fs2004<br />
|
50 |
-
heca airport map fs2004<br />
|
51 |
-
armi project cairo international v3 fs2004<br />
|
52 |
-
fs2004 cairo mesh scenery<br />
|
53 |
-
armi project heca afcad<br />
|
54 |
-
fs2004 cairo airport lights<br />
|
55 |
-
armi project fsx scenery<br />
|
56 |
-
cairo international airport terminal 3 fs2004<br />
|
57 |
-
armi project heca crack<br />
|
58 |
-
fs2004 cairo airport weather<br />
|
59 |
-
armi project egyptian airports pack fs2004<br />
|
60 |
-
heca airport elevation fs2004<br />
|
61 |
-
armi project cairo international v1 fs2004<br />
|
62 |
-
fs2004 cairo landmarks scenery<br />
|
63 |
-
armi project heca patch<br />
|
64 |
-
fs2004 cairo airport runway length<br />
|
65 |
-
armi project p3d scenery<br />
|
66 |
-
cairo international airport terminal 1 fs2004<br />
|
67 |
-
armi project heca manual<br />
|
68 |
-
fs2004 cairo airport ils frequency<br />
|
69 |
-
armi project egyptian airports bundle fs2004<br />
|
70 |
-
heca airport name fs2004<br />
|
71 |
-
armi project cairo international v4 fs2004<br />
|
72 |
-
fs2004 cairo vector scenery<br />
|
73 |
-
armi project heca update<br />
|
74 |
-
fs2004 cairo airport taxiway signs<br />
|
75 |
-
armi project x-plane scenery<br />
|
76 |
-
cairo international airport terminal 5 fs2004<br />
|
77 |
-
armi project heca serial number<br />
|
78 |
-
fs2004 cairo airport atis frequency<br />
|
79 |
-
armi project egyptian airports collection fs2004<br />
|
80 |
-
heca airport location fs2004</p>
|
81 |
-
<h3>Frame rate and VAS usage</h3>
|
82 |
-
<p>The frame rate of this scenery is very good considering the amount of detail and objects it has. The VAS usage is also reasonable and does not cause any out-of-memory errors. However, you might want to adjust your settings according to your system specifications to get the best performance.</p>
|
83 |
-
<h3>Compatibility with other addons</h3>
|
84 |
-
<p>The scenery is compatible with most addons that enhance FS2004, such as mesh, landclass, weather, traffic etc. However, you might need to disable some conflicting files or adjust some settings to avoid any issues.</p>
|
85 |
-
<h2>Conclusion</h2>
|
86 |
-
<h3>Pros and cons</h3>
|
87 |
-
<p>The pros of this scenery are: <ul>
|
88 |
-
<li>Realistic and detailed Gmax models of main buildings</li>
|
89 |
-
<li>Detailed jetways and bridges with animations</li>
|
90 |
-
<li>Photo real texture and custom ground texture</li>
|
91 |
-
<li>Animated skytrain and static objects</li>
|
92 |
-
<li>Surrounding area and landmarks</li>
|
93 |
-
<li>Good frame rate and VAS usage</li>
|
94 |
-
<li>Compatibility with other addons</li>
|
95 |
-
</ul>
|
96 |
-
</p>
|
97 |
-
<p>The cons of this scenery are: <ul>
|
98 |
-
<li>Lack of dynamic lighting or night effects (FS2004 limitation)</li>
|
99 |
-
<li>Lack of seasonal variations or weather effects (FS2004 limitation)</li>
|
100 |
-
<li>Lack of SODE or AES support for jetways (FS2004 limitation)</li>
|
101 |
-
<li>Lack of terminal interior or passengers (FS2004 limitation)</li>
|
102 |
-
<li>Lack of AI traffic or ATC (addon dependent)</li>
|
103 |
-
<li>Lack of documentation or manual (addon dependent)</li>
|
104 |
-
</ul>
|
105 |
-
</p>
|
106 |
-
<h3>Rating and recommendation</h3>
|
107 |
-
<p>I would rate this scenery 4 out of 5 stars. It is a very good representation of Cairo International Airport for FS2004 that offers a lot of features, realism and detail. It is also well optimized for performance and compatibility. However, it also suffers from some limitations that are inherent to FS2004 itself. Therefore, I would recommend this scenery to anyone who still uses FS2004 and wants to fly to or from Cairo International Airport.</p>
|
108 |
-
<table border="1">
|
109 |
-
<tr><td><b>Feature</b></td><td><b>Rating (out of 5)</b></td></tr>
|
110 |
-
<tr><td>Gmax models of main buildings</td><td>5</td></tr>
|
111 |
-
<tr><td>Detailed jetways and bridges</td><td>5</td></tr>
|
112 |
-
<tr><td>Photo real texture and custom ground texture</td><td>5</td></tr>
|
113 |
-
<tr><td>Animated skytrain and static objects</td><td>5</td></tr>
|
114 |
-
<tr><td>Surrounding area and landmarks</td><td>5</td></tr>
|
115 |
-
<tr><td>Performance </td><td>4</td></tr>
|
116 |
-
<tr><td>Compatibility </td><td>4</td></tr>
|
117 |
-
<tr><td>Total </td><td>33/40 = 82.5%</td></tr>
|
118 |
-
<tr><td>Average </td><td>4/5 = 80%</td></tr>
|
119 |
-
<tr><td>Rounded </td><td><b>4 stars ⭐⭐⭐⭐ </b></td></tr>
|
120 |
-
<table>
|
121 |
-
FAQs: Q: Where can I buy this scenery? A: You can buy this scenery from simMarket.com for €17. Q: How do I install this scenery? A: You can install this scenery by running the setup.exe file that comes with the download. Q: How do I activate this scenery? A: You can activate this scenery by entering your email address and serial number that you received after purchasing. Q: How do I uninstall this scenery? A: You can uninstall this scenery by running the uninstall.exe file that comes with the download. Q: How do I contact Armi Project for support or feedback? A: You can contact Armi Project by sending an email to [email protected]. </p> 0a6ba089eb<br />
|
122 |
-
<br />
|
123 |
-
<br />
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/1acneusushi/gradio-2dmoleculeeditor/data/Ezycracks.com How to Crack Any Software in Minutes.md
DELETED
@@ -1,28 +0,0 @@
|
|
1 |
-
|
2 |
-
```html
|
3 |
-
<h1>How to Find the Best Software Cracks on Ezycracks.com</h1>
|
4 |
-
<p>If you are looking for a way to use premium software without paying for a license, you might be interested in ezycracks.com. This website offers a huge collection of software cracks, patches, keygens, and serial keys for various applications and games. You can download them for free and enjoy the full features of your favorite software.</p>
|
5 |
-
<p>However, not all software cracks are created equal. Some of them might be outdated, infected with malware, or not compatible with your system. That's why you need to be careful when choosing a software crack from ezycracks.com. Here are some tips to help you find the best software cracks on this website.</p>
|
6 |
-
<h2>ezycracks.com</h2><br /><p><b><b>Download</b> ✔ <a href="https://byltly.com/2uKxEa">https://byltly.com/2uKxEa</a></b></p><br /><br />
|
7 |
-
<ul>
|
8 |
-
<li>Check the comments section. Before downloading a software crack from ezycracks.com, you should always read the comments section below the post. There you can find feedback from other users who have tried the crack. You can see if the crack works, if it has any problems, or if it contains any viruses. You can also ask questions or request help from other users if you encounter any issues.</li>
|
9 |
-
<li>Use a reliable antivirus program. Even if the comments section says that the crack is safe, you should still scan it with a reputable antivirus program before running it. This will protect your computer from any potential threats that might be hidden in the crack file. You should also avoid opening any suspicious attachments or links that come with the crack.</li>
|
10 |
-
<li>Backup your data. Sometimes, a software crack might cause unexpected errors or conflicts with your system. This could result in data loss or corruption. To prevent this, you should always backup your important files and folders before installing or using a software crack from ezycracks.com. You can use an external hard drive, a cloud service, or a backup software to do this.</li>
|
11 |
-
<li>Follow the instructions. Most software cracks from ezycracks.com come with a readme file or a video tutorial that explains how to use them. You should follow these instructions carefully to ensure that the crack works properly and does not cause any problems. You should also pay attention to any warnings or precautions that are mentioned in the instructions.</li>
|
12 |
-
</ul>
|
13 |
-
<p>By following these tips, you can find the best software cracks on ezycracks.com and enjoy using premium software for free. However, you should also be aware of the risks and legal issues that come with using software cracks. Software piracy is illegal and unethical, and it can harm the developers and the industry. You should always support the original creators of the software by buying a legitimate license if you can afford it.</p>
|
14 |
-
```
|
15 |
-
|
16 |
-
```html
|
17 |
-
<h2>How to Use Software Cracks Safely and Effectively</h2>
|
18 |
-
<p>Using software cracks from ezycracks.com can be a great way to save money and access premium features. However, you should also be careful and responsible when using them. Here are some tips to help you use software cracks safely and effectively.</p>
|
19 |
-
<ul>
|
20 |
-
<li>Choose the right software crack for your needs. Ezycracks.com offers different types of software cracks, such as patches, keygens, serial keys, and loaders. Each of them has its own advantages and disadvantages. For example, patches modify the original software files to bypass the activation process, while keygens generate valid license keys that can be entered into the software. You should choose the type of software crack that suits your needs and preferences.</li>
|
21 |
-
<li>Disable your internet connection and antivirus program temporarily. Some software cracks might require you to disconnect from the internet and disable your antivirus program before running them. This is because some software might try to verify your license online or detect the crack as a threat. To avoid this, you should follow the instructions provided by the crack and turn off your internet connection and antivirus program temporarily. You can turn them back on after you have activated the software.</li>
|
22 |
-
<li>Don't update the software or change its settings. Once you have activated the software with a crack from ezycracks.com, you should avoid updating it or changing its settings. This is because some updates or settings might overwrite or remove the crack and make the software unusable again. You should also avoid downloading any additional content or features that might require a valid license.</li>
|
23 |
-
<li>Don't share the software or the crack with others. Sharing software or cracks that you have downloaded from ezycracks.com with others is not only illegal but also risky. You might expose yourself and others to malware, viruses, or legal actions. You should also avoid uploading or distributing the software or the crack on other websites or platforms.</li>
|
24 |
-
</ul>
|
25 |
-
<p>By following these tips, you can use software cracks from ezycracks.com safely and effectively. However, you should also remember that using software cracks is not a long-term solution. You should always respect the rights and efforts of the software developers and buy a genuine license if you can afford it.</p>
|
26 |
-
```</p> ddb901b051<br />
|
27 |
-
<br />
|
28 |
-
<br />
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/1acneusushi/gradio-2dmoleculeeditor/data/Free Download Windows Mobile 7 Samsung Omnia i900 Get Ready for a Faster and Smoother Experience.md
DELETED
@@ -1,175 +0,0 @@
|
|
1 |
-
|
2 |
-
<h1>How to Free Download Windows Mobile 7 for Samsung Omnia i900</h1>
|
3 |
-
<p>Are you looking for a way to upgrade your Samsung Omnia i900 to a newer and better operating system? If so, you might be interested in Windows Mobile 7, the latest version of Microsoft's mobile OS that offers a sleek and intuitive user interface, enhanced performance and security, and a rich selection of apps and games. In this article, we will show you how to free download Windows Mobile 7 for Samsung Omnia i900 and how to install it on your device. We will also give you some tips and tricks on how to make the most out of your new OS.</p>
|
4 |
-
<h2>What is Windows Mobile 7?</h2>
|
5 |
-
<p>Windows Mobile 7 is the seventh generation of Microsoft's mobile operating system that was released in October 2010. It is designed to provide a seamless integration with other Microsoft products and services, such as Windows Live, Xbox Live, Zune, Bing, Office, etc. It also features a new user interface called Metro, which consists of colorful tiles that display live information and notifications. Windows Mobile 7 also supports multitouch gestures, voice commands, social networking integration, cloud computing, and more.</p>
|
6 |
-
<h2>free download windows mobile 7 samsung omnia i900</h2><br /><p><b><b>Download</b> ……… <a href="https://byltly.com/2uKvm1">https://byltly.com/2uKvm1</a></b></p><br /><br />
|
7 |
-
<h2>What is Samsung Omnia i900?</h2>
|
8 |
-
<p>Samsung Omnia i900 is a smartphone that was released in June 2008. It runs on Windows Mobile 6.1 Professional and has a 3.2-inch touchscreen display with a resolution of 240 x 400 pixels. It also has a 5-megapixel camera with autofocus and flash, a microSD card slot, Wi-Fi, Bluetooth, GPS, FM radio, and a stylus. It has a battery capacity of 1440 mAh and a weight of 122 grams.</p>
|
9 |
-
<h2>Why upgrade to Windows Mobile 7?</h2>
|
10 |
-
<p>If you are still using Windows Mobile 6.1 on your Samsung Omnia i900, you might be missing out on some of the advantages that Windows Mobile 7 can offer. Here are some of the reasons why you should consider upgrading:</p>
|
11 |
-
<ul>
|
12 |
-
<li><b>Better user interface:</b> Windows Mobile 7 has a more modern and attractive user interface than Windows Mobile 6.1. It has larger icons, smoother animations, and more customization options. You can also access your most frequently used apps and settings from the home screen.</li>
|
13 |
-
<li><b>Faster performance:</b> Windows Mobile 7 runs faster and smoother than Windows Mobile 6.1. It has improved memory management, multitasking capabilities, and battery optimization. You can also enjoy faster web browsing, email syncing, and app loading.</li>
|
14 |
-
<li><b>More security:</b> Windows Mobile 7 has more security features than Windows Mobile 6.1. It has built-in antivirus protection, remote wipe functionality, device encryption, and parental controls. You can also lock your phone with a PIN or a password.</li>
|
15 |
-
<li><b>More apps and games:</b> Windows Mobile 7 has a larger and more diverse collection of apps and games than Windows Mobile 6.1. You can download thousands of apps from the Marketplace, ranging from productivity tools, social media apps, entertainment apps, etc. You can also play high-quality games from Xbox Live.</li>
|
16 |
-
</ul>
|
17 |
-
<h2>How to free download Windows Mobile 7 for Samsung Omnia i900?</h2>
|
18 |
-
<p>If you are ready to upgrade your Samsung Omnia i900 to Windows Mobile 7, you will need to follow these steps:</p>
|
19 |
-
<p>How to get windows mobile 7 on samsung omnia i900 for free<br />
|
20 |
-
Samsung omnia i900 windows mobile 7 upgrade download link<br />
|
21 |
-
Windows mobile 7 rom for samsung omnia i900 free installation guide<br />
|
22 |
-
Samsung omnia i900 windows mobile 7 firmware update free download<br />
|
23 |
-
Free windows mobile 7 apps for samsung omnia i900<br />
|
24 |
-
Samsung omnia i900 windows mobile 7 themes free download<br />
|
25 |
-
Windows mobile 7 launcher for samsung omnia i900 free apk<br />
|
26 |
-
Samsung omnia i900 windows mobile 7 drivers free download<br />
|
27 |
-
Windows mobile 7 emulator for samsung omnia i900 free software<br />
|
28 |
-
Samsung omnia i900 windows mobile 7 games free download<br />
|
29 |
-
Windows mobile 7 custom rom for samsung omnia i900 free flash tool<br />
|
30 |
-
Samsung omnia i900 windows mobile 7 review and features<br />
|
31 |
-
Windows mobile 7 tips and tricks for samsung omnia i900 users<br />
|
32 |
-
Samsung omnia i900 windows mobile 7 comparison with other devices<br />
|
33 |
-
Windows mobile 7 backup and restore for samsung omnia i900 free tutorial<br />
|
34 |
-
Samsung omnia i900 windows mobile 7 battery life and performance<br />
|
35 |
-
Windows mobile 7 sync and transfer for samsung omnia i900 free app<br />
|
36 |
-
Samsung omnia i900 windows mobile 7 camera and video quality<br />
|
37 |
-
Windows mobile 7 security and privacy for samsung omnia i900 free settings<br />
|
38 |
-
Samsung omnia i900 windows mobile 7 keyboard and input options<br />
|
39 |
-
Windows mobile 7 widgets and shortcuts for samsung omnia i900 free customization<br />
|
40 |
-
Samsung omnia i900 windows mobile 7 browser and internet speed<br />
|
41 |
-
Windows mobile 7 email and messaging for samsung omnia i900 free setup<br />
|
42 |
-
Samsung omnia i900 windows mobile 7 contacts and calendar management<br />
|
43 |
-
Windows mobile 7 maps and navigation for samsung omnia i900 free offline mode<br />
|
44 |
-
Samsung omnia i900 windows mobile 7 music and video player features<br />
|
45 |
-
Windows mobile 7 radio and podcast for samsung omnia i900 free streaming<br />
|
46 |
-
Samsung omnia i900 windows mobile 7 social media and networking apps<br />
|
47 |
-
Windows mobile 7 news and weather for samsung omnia i900 free updates<br />
|
48 |
-
Samsung omnia i900 windows mobile 7 productivity and office tools<br />
|
49 |
-
Windows mobile 7 ebook and pdf reader for samsung omnia i900 free download<br />
|
50 |
-
Samsung omnia i900 windows mobile 7 photo and video editor apps<br />
|
51 |
-
Windows mobile 7 file manager and explorer for samsung omnia i900 free access<br />
|
52 |
-
Samsung omnia i900 windows mobile 7 calculator and converter tools<br />
|
53 |
-
Windows mobile 7 clock and alarm for samsung omnia i900 free customization<br />
|
54 |
-
Samsung omnia i900 windows mobile 7 voice recorder and memo app<br />
|
55 |
-
Windows mobile 7 flashlight and compass for samsung omnia i900 free utility<br />
|
56 |
-
Samsung omnia i900 windows mobile 7 qr code and barcode scanner app<br />
|
57 |
-
Windows mobile 7 remote control and tv guide for samsung omnia i900 free app<br />
|
58 |
-
Samsung omnia i900 windows mobile 7 fitness and health tracker apps<br />
|
59 |
-
Windows mobile 7 travel and local guide for samsung omnia i900 free app<br />
|
60 |
-
Samsung omnia i900 windows mobile 7 shopping and coupon apps<br />
|
61 |
-
Windows mobile 7 education and learning for samsung omnia i900 free app<br />
|
62 |
-
Samsung omnia i900 windows mobile 7 fun and entertainment apps<br />
|
63 |
-
Windows mobile 7 lifestyle and personalization for samsung omnia i900 free app<br />
|
64 |
-
Samsung omnia i900 windows mobile 7 troubleshooting and support forum<br />
|
65 |
-
Windows mobile 7 developer and modding for samsung omnia i900 free resources<br />
|
66 |
-
Samsung omnia i900 windows mobile 7 specifications and price list<br />
|
67 |
-
Windows mobile 7 history and evolution for samsung omnia i900 free article</p>
|
68 |
-
<h3>Step 1: Backup your data</h3>
|
69 |
-
<p>Before you start the upgrade process, you should backup your data on your phone. This includes your contacts, messages, photos, videos, music, documents, etc. You can use various methods to backup your data, such as syncing with your PC or using online services like Google Drive or Dropbox.</p>
|
70 |
-
<h3>Step 2: Download the Windows Mobile 7 ROM</h3>
|
71 |
-
<p>The next step is to download the Windows Mobile 7 ROM for Samsung Omnia i900. A ROM is a file that contains the operating system and other software for your device. You can find various sources online where you can download the ROM for free. One of them is <a href="https://forum.xda-developers.com/t/windows-phone-7-on-samsung-i900.760904/">this forum thread</a>, where you can find links to different versions of the ROM.</p>
|
72 |
-
<p>Make sure you download the ROM that matches your device model and region. Also make sure you scan the ROM for viruses before installing it.</p>
|
73 |
-
<h3>Step 3: Flash the Windows Mobile 7 ROM</h3>
|
74 |
-
<p>The final step is to flash the Windows Mobile I'll try to continue the article. <p>The final step is to flash the Windows Mobile 7 ROM on your Samsung Omnia i900. Flashing means installing the ROM on your device's memory, replacing the existing OS. To flash the Windows Mobile 7 ROM, you will need a PC and a USB cable. Here are the steps to follow:</p>
|
75 |
-
<ol>
|
76 |
-
<li>Connect your Samsung Omnia i900 to your PC using the USB cable.</li>
|
77 |
-
<li>Run the Windows Phone Image Designer tool on your PC. You can download it from <a href="https://forum.xda-developers.com/t/windows-phone-image-designer-download.2796196/">this link</a>.</li>
|
78 |
-
<li>Select "Flash a Windows Phone image onto your phone" and click next.</li>
|
79 |
-
<li>Select your device from the list and click next.</li>
|
80 |
-
<li>Browse to the folder where you downloaded the Windows Mobile 7 ROM and select it.</li>
|
81 |
-
<li>Click next and confirm that you want to flash the ROM.</li>
|
82 |
-
<li>Wait for the flashing process to complete. It may take several minutes.</li>
|
83 |
-
<li>When the flashing is done, your phone will reboot automatically.</li>
|
84 |
-
</ol>
|
85 |
-
<h3>Step 4: Enjoy your new OS</h3>
|
86 |
-
<p>Congratulations! You have successfully upgraded your Samsung Omnia i900 to Windows Mobile 7. You can now enjoy all the features and benefits of your new OS. You can customize your home screen, sync your data with Microsoft services, use voice commands and gestures, download apps from the Marketplace, and more.</p>
|
87 |
-
<h2>Tips and tricks for using Windows Mobile 7 on Samsung Omnia i900</h2>
|
88 |
-
<p>To help you get started with Windows Mobile 7 on your Samsung Omnia i900, here are some tips and tricks that you can use:</p>
|
89 |
-
<h3>Tip 1: Customize your home screen</h3>
|
90 |
-
<p>Your home screen is where you can access your most frequently used apps and settings. You can customize it by changing the tiles, colors, and themes. To do so, follow these steps:</p>
|
91 |
-
<ul>
|
92 |
-
<li>To change the tiles, tap and hold on any tile until it pops out. You can then drag it to a different position, resize it, or unpin it.</li>
|
93 |
-
<li>To change the colors, go to Settings > Theme and select your accent color and background color.</li>
|
94 |
-
<li>To change the themes, go to Settings > Theme and select one of the available themes or create your own.</li>
|
95 |
-
</ul>
|
96 |
-
<h3>Tip 2: Sync your data with Microsoft services</h3>
|
97 |
-
<p>One of the advantages of Windows Mobile 7 is that it integrates well with other Microsoft products and services, such as Outlook, OneDrive, Office, etc. You can sync your contacts, calendar, email, photos, documents, etc. with these services and access them from any device. To do so, follow these steps:</p>
|
98 |
-
<ul>
|
99 |
-
<li>Go to Settings > Email & accounts and add your Microsoft account or any other account that you want to sync with.</li>
|
100 |
-
<li>Select what data you want to sync for each account.</li>
|
101 |
-
<li>Go to Settings > Backup and turn on backup for app list + settings, text messages, photos + videos.</li>
|
102 |
-
</ul>
|
103 |
-
<h3>Tip 3: Use voice commands and gestures</h3>
|
104 |
-
<p>Windows Mobile 7 supports voice commands and gestures that allow you to control your phone without touching it. You can use voice commands to make calls, send texts, search the web, open apps, etc. You can use gestures to answer calls, mute calls, snooze alarms, etc. To do so, follow these steps:</p>
|
105 |
-
<ul>
|
106 |
-
<li>To use voice commands, press and hold the Start button until you hear a beep. Then say what you want to do. For example, "Call John", "Text Mary", "Bing pizza", etc.</li>
|
107 |
-
<li>To use gestures, go to Settings > Extras > Gestures beta and turn on gestures. Then you can use gestures like flipping your phone over to silence it or placing it face down to answer a call.</li>
|
108 |
-
</ul>
|
109 |
-
<h3>Tip 4: Download apps from the Marketplace</h3>
|
110 |
-
<p>Windows Mobile 7 has a large and diverse collection of apps that you can download from the Marketplace. You can find apps for productivity I'll try to continue the article. <p>Windows Mobile 7 has a large and diverse collection of apps that you can download from the Marketplace. You can find apps for productivity, entertainment, education, health, finance, and more. Here are some of the best apps for Windows Mobile 7 that you should try:</p>
|
111 |
-
<table>
|
112 |
-
<tr>
|
113 |
-
<th>App</th>
|
114 |
-
<th>Description</th>
|
115 |
-
</tr>
|
116 |
-
<tr>
|
117 |
-
<td>WhatsApp</td>
|
118 |
-
<td>A popular messaging app that lets you chat, call, and share media with your contacts for free.</td>
|
119 |
-
</tr>
|
120 |
-
<tr>
|
121 |
-
<td>Skype</td>
|
122 |
-
<td>A video calling app that lets you connect with your friends and family across the world.</td>
|
123 |
-
</tr>
|
124 |
-
<tr>
|
125 |
-
<td>Facebook</td>
|
126 |
-
<td>The official app for the social media giant that lets you stay in touch with your friends, post updates, check news, and more.</td>
|
127 |
-
</tr>
|
128 |
-
<tr>
|
129 |
-
<td>Instagram</td>
|
130 |
-
<td>A photo-sharing app that lets you capture and edit your moments, follow your favorite celebrities, and discover new trends.</td>
|
131 |
-
</tr>
|
132 |
-
<tr>
|
133 |
-
<td>Twitter</td>
|
134 |
-
<td>A micro-blogging app that lets you follow the latest news, opinions, and trends from around the world.</td>
|
135 |
-
</tr>
|
136 |
-
<tr>
|
137 |
-
<td>Viber</td>
|
138 |
-
<td>A messaging and calling app that lets you communicate with your contacts for free, with features like group chats, stickers, and voice messages.</td>
|
139 |
-
</tr>
|
140 |
-
<tr>
|
141 |
-
<td>Bing</td>
|
142 |
-
<td>A search engine app that lets you find what you need on the web, with features like voice search, image search, maps, and more.</td>
|
143 |
-
</tr>
|
144 |
-
<tr>
|
145 |
-
<td>Zune</td>
|
146 |
-
<td>A music and video app that lets you enjoy your favorite tunes and shows, with features like playlists, podcasts, radio, and more.</td>
|
147 |
-
</tr>
|
148 |
-
<tr>
|
149 |
-
<td>Xbox Live</td>
|
150 |
-
<td>A gaming app that lets you play high-quality games on your phone, with features like achievements, leaderboards, multiplayer, and more.</td>
|
151 |
-
</tr>
|
152 |
-
<tr>
|
153 |
-
<td>Office Mobile</td>
|
154 |
-
<td>A productivity app that lets you create and edit documents, spreadsheets, and presentations on your phone.</td>
|
155 |
-
</tr> I'll try to continue the article. <h3>Tip 5: Update your phone regularly</h3>
|
156 |
-
<p>Windows Mobile 7 is no longer supported by Microsoft, which means it won't receive any new features or security updates. However, you can still check for any available updates that you might have missed before. To do so, follow these steps:</p>
|
157 |
-
<ul>
|
158 |
-
<li>Go to Settings > Phone update and tap Check for updates.</li>
|
159 |
-
<li>If there are any updates available, tap Download and install.</li>
|
160 |
-
<li>Wait for the update to download and install. Your phone may restart several times during the process.</li>
|
161 |
-
</ul>
|
162 |
-
<h2>Conclusion</h2>
|
163 |
-
<p>Windows Mobile 7 is a great operating system that can give your Samsung Omnia i900 a new lease of life. It has a beautiful and user-friendly interface, a fast and smooth performance, a high level of security, and a wide range of apps and games. In this article, we showed you how to free download Windows Mobile 7 for Samsung Omnia i900 and how to install it on your device. We also gave you some tips and tricks on how to make the most out of your new OS. We hope you found this article helpful and informative. If you have any questions or feedback, feel free to leave a comment below.</p>
|
164 |
-
<h2>FAQs</h2>
|
165 |
-
<p>Here are some frequently asked questions about Windows Mobile 7 and Samsung Omnia i900:</p>
|
166 |
-
<ol>
|
167 |
-
<li><b>Will Windows Mobile 7 work on any Samsung Omnia model?</b><br>No, Windows Mobile 7 will only work on Samsung Omnia i900. Other models, such as Samsung Omnia i910 or Samsung Omnia II, are not compatible with Windows Mobile 7.</li>
|
168 |
-
<li><b>Will I lose any data or settings when I upgrade to Windows Mobile 7?</b><br>Yes, upgrading to Windows Mobile 7 will erase all your data and settings on your Samsung Omnia i900. That's why it's important to backup your data before you start the upgrade process.</li>
|
169 |
-
<li><b>Can I downgrade back to Windows Mobile 6.1 if I don't like Windows Mobile 7?</b><br>Yes, you can downgrade back to Windows Mobile 6.1 if you want to. You will need to flash the original Windows Mobile 6.1 ROM on your Samsung Omnia i900 using the same method as flashing the Windows Mobile 7 ROM.</li>
|
170 |
-
<li><b>Can I use Google services on Windows Mobile 7?</b><br>Yes, you can use Google services on Windows Mobile 7, such as Gmail, Google Maps, Google Drive, etc. You will need to download the Google apps from the Marketplace or use the web browser to access them.</li>
|
171 |
-
<li><b>Can I use dual SIM cards on Samsung Omnia i900?</b><br>No, Samsung Omnia i900 does not support dual SIM cards. It only has one SIM card slot.</li>
|
172 |
-
</ol>
|
173 |
-
</p> 0a6ba089eb<br />
|
174 |
-
<br />
|
175 |
-
<br />
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/1acneusushi/gradio-2dmoleculeeditor/data/Free LINK Winzip Full Version Download.md
DELETED
@@ -1,19 +0,0 @@
|
|
1 |
-
<br />
|
2 |
-
<h1>How to Get Free WinZip Full Version Download for Windows 10</h1>
|
3 |
-
<p>WinZip is one of the most popular and trusted software for compressing and decompressing files. It can help you save disk space, reduce file transfer time, and protect your files with encryption and password. WinZip supports various formats, such as ZIP, RAR, 7Z, TAR, GZIP, and more. However, WinZip is not a free software, and you need to pay a license fee to use it without any limitations.</p>
|
4 |
-
<p>That's why some people look for free WinZip full version download for Windows 10 online. They want to enjoy the benefits of WinZip without spending any money. However, this is not a good idea. Downloading WinZip from unofficial sources can expose you to various risks and problems. Here are some of them:</p>
|
5 |
-
<h2>free winzip full version download</h2><br /><p><b><b>Download File</b> ✓✓✓ <a href="https://byltly.com/2uKA0F">https://byltly.com/2uKA0F</a></b></p><br /><br />
|
6 |
-
<ul>
|
7 |
-
<li><b>You may download malware or viruses.</b> Some websites that offer free WinZip full version download for Windows 10 may contain malicious software that can harm your computer or steal your personal information. You may end up infecting your system with spyware, ransomware, trojans, or other threats.</li>
|
8 |
-
<li><b>You may violate the law.</b> Using pirated software is illegal and can result in legal consequences. You may face fines or lawsuits if you are caught using free WinZip full version download for Windows 10. Moreover, you may also damage the reputation of WinZip and its developers who work hard to provide quality software.</li>
|
9 |
-
<li><b>You may miss out on updates and support.</b> When you use free WinZip full version download for Windows 10, you will not be able to access the official updates and support from WinZip. This means you will not be able to enjoy the latest features and improvements of the software. You will also not be able to get help from the customer service if you encounter any issues or errors.</li>
|
10 |
-
</ul>
|
11 |
-
<p>Therefore, we do not recommend using free WinZip full version download for Windows 10. Instead, we suggest you to try some of the legitimate ways to get WinZip for free or at a lower cost. Here are some of them:</p>
|
12 |
-
<ul>
|
13 |
-
<li><b>Download the trial version.</b> WinZip offers a 21-day free trial that you can download from its official website. You can use all the features and functions of the software without any restrictions during the trial period. This is a great way to test the software and see if it meets your needs.</li>
|
14 |
-
<li><b>Use the online version.</b> WinZip also has an online version called ZipShare that you can access from any browser. You can upload, zip, unzip, encrypt, and share files online without installing any software. The online version is free for basic users, but you can upgrade to a premium plan for more features and storage.</li>
|
15 |
-
<li><b>Buy the discounted version.</b> Sometimes, WinZip offers discounts and deals on its website or through its partners. You can check their website regularly or sign up for their newsletter to get notified of any promotions. You can also look for coupons or vouchers from third-party websites that can help you save some money on your purchase.</li>
|
16 |
-
</ul>
|
17 |
-
<p>We hope this article has helped you understand why you should avoid using free WinZip full version download for Windows 10 and what are some of the alternatives you can try. Remember that using pirated software is not only unethical but also dangerous. It is better to use legal and safe ways to get WinZip and enjoy its advantages.</p> ddb901b051<br />
|
18 |
-
<br />
|
19 |
-
<br />
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/1acneusushi/gradio-2dmoleculeeditor/data/GTA 5 Key How to Access the Most Epic Game Ever.md
DELETED
@@ -1,14 +0,0 @@
|
|
1 |
-
<br />
|
2 |
-
<h1>How to Get a GTA 5 Key for Free</h1>
|
3 |
-
<p>GTA 5 is one of the most popular and successful video games of all time. It offers an immersive open-world experience, where you can explore the city of Los Santos and its surrounding areas, engage in various missions and activities, and customize your character and vehicles. GTA 5 also has an online mode, where you can play with other players from around the world, join crews, participate in heists, races, deathmatches, and more.</p>
|
4 |
-
<p>However, GTA 5 is not a cheap game. It usually costs around $60 on various platforms, such as Steam, Epic Games Store, PlayStation Store, and Xbox Store. If you want to play GTA 5 without spending a dime, you might be wondering if there is a way to get a GTA 5 key for free.</p>
|
5 |
-
<h2>crack gta 5 key</h2><br /><p><b><b>DOWNLOAD</b> — <a href="https://byltly.com/2uKzvP">https://byltly.com/2uKzvP</a></b></p><br /><br />
|
6 |
-
<p>The answer is yes, but it is not easy or guaranteed. There are some methods that might work for you, but they also come with some risks and drawbacks. Here are some of the ways you can try to get a GTA 5 key for free:</p>
|
7 |
-
<ul>
|
8 |
-
<li><b>Enter giveaways and contests.</b> There are many websites, blogs, YouTube channels, and social media pages that host giveaways and contests for GTA 5 keys. You can enter these by following their rules and requirements, such as subscribing, liking, commenting, sharing, etc. However, you should be careful about which ones you enter, as some of them might be scams or phishing attempts. You should also be aware that the chances of winning are very low, as there are thousands of other participants.</li>
|
9 |
-
<li><b>Use reward apps and websites.</b> There are some apps and websites that reward you with points or credits for completing tasks, such as watching videos, taking surveys, downloading apps, etc. You can then redeem these points or credits for gift cards or codes that you can use to buy GTA 5 keys. Some examples of these apps and websites are Swagbucks, Mistplay, AppNana, FeaturePoints, etc. However, you should know that these tasks can be tedious and time-consuming, and the rewards are often very low. You might need to spend hours or days to earn enough points or credits for a GTA 5 key.</li>
|
10 |
-
<li><b>Use key generators or cracks.</b> There are some programs or websites that claim to generate or crack GTA 5 keys for free. You might be tempted to try these out, but you should avoid them at all costs. These programs or websites are usually malware or viruses that can harm your device or steal your personal information. They can also get you banned from GTA 5 online mode or other online services. Moreover, these programs or websites rarely work as advertised, and they often provide fake or invalid keys.</li>
|
11 |
-
</ul>
|
12 |
-
<p>In conclusion, getting a GTA 5 key for free is possible but not easy or safe. You might end up wasting your time, money, or security by trying some of the methods mentioned above. The best way to enjoy GTA 5 is to buy it from a legitimate source when it is on sale or discounted. This way, you can support the developers and publishers of the game and have a smooth and hassle-free gaming experience.</p> ddb901b051<br />
|
13 |
-
<br />
|
14 |
-
<br />
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/1gistliPinn/ChatGPT4/Examples/Commodore 64 Roms Pack !!LINK!! Download.md
DELETED
@@ -1,6 +0,0 @@
|
|
1 |
-
<h2>Commodore 64 roms pack download</h2><br /><p><b><b>Download Zip</b> » <a href="https://imgfil.com/2uxXO7">https://imgfil.com/2uxXO7</a></b></p><br /><br />
|
2 |
-
|
3 |
-
We offer fast servers so you can Download N64 ROMs and start playing ... I've been using the 188 rom pack from EWJ for quite awhile. ... COM is a C64 site dedicated to just about everything that is connected to the Commodore 64 (C64). 1fdad05405<br />
|
4 |
-
<br />
|
5 |
-
<br />
|
6 |
-
<p></p>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/1gistliPinn/ChatGPT4/Examples/Download Script Frost Dragon Okolnir Elfbot WORK.md
DELETED
@@ -1,6 +0,0 @@
|
|
1 |
-
<h2>download script frost dragon okolnir elfbot</h2><br /><p><b><b>DOWNLOAD</b> ✶✶✶ <a href="https://imgfil.com/2uy1su">https://imgfil.com/2uy1su</a></b></p><br /><br />
|
2 |
-
<br />
|
3 |
-
Programming can elf scripts be posted there ? :). Reply ... We Should not support bots, or download & run crap. ... Try Okolnir frost dragons. 1fdad05405<br />
|
4 |
-
<br />
|
5 |
-
<br />
|
6 |
-
<p></p>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/1gistliPinn/ChatGPT4/Examples/Free Download Myob Accounting Versi 17 Full 32 Fixed.md
DELETED
@@ -1,6 +0,0 @@
|
|
1 |
-
<h2>Free Download Myob Accounting Versi 17 Full 32</h2><br /><p><b><b>Download</b> ::: <a href="https://imgfil.com/2uy1JS">https://imgfil.com/2uy1JS</a></b></p><br /><br />
|
2 |
-
<br />
|
3 |
-
3cee63e6c2<br />
|
4 |
-
<br />
|
5 |
-
<br />
|
6 |
-
<p></p>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/1pelhydcardo/ChatGPT-prompt-generator/assets/Attack on Titan 2 Final Battle - The Ultimate Challenge for Fans of the Anime.md
DELETED
@@ -1,129 +0,0 @@
|
|
1 |
-
|
2 |
-
<h1>Attack on Titan Game: A Guide for Fans and Newcomers</h1>
|
3 |
-
<p>If you are a fan of the hit anime and manga series <em>Attack on Titan</em>, or if you are curious about what it is all about, you might want to check out <em>Attack on Titan Game</em>, a thrilling action game based on the popular franchise. In this article, we will give you a comprehensive guide on what <em>Attack on Titan</em> is, what <em>Attack on Titan Game</em> is, why you should play it, and where you can get it. Whether you are a seasoned fan or a newcomer, this article will help you enjoy <em>Attack on Titan Game</em> more.</p>
|
4 |
-
<h2>What is Attack on Titan?</h2>
|
5 |
-
<p><em>Attack on Titan</em> is a Japanese anime and manga series created by Hajime Isayama. It is set in a world where humanity lives inside walled cities to protect themselves from giant humanoid creatures called Titans, who devour humans without reason. The story follows Eren Yeager, a young boy who vows to exterminate all Titans after his mother is killed by one. He joins the Survey Corps, an elite military unit that fights Titans outside the walls, along with his friends Mikasa Ackerman and Armin Arlert.</p>
|
6 |
-
<h2>attack on titan game</h2><br /><p><b><b>Download Zip</b> ► <a href="https://urlin.us/2uSZB8">https://urlin.us/2uSZB8</a></b></p><br /><br />
|
7 |
-
<h3>The story and the characters of the anime and manga</h3>
|
8 |
-
<p>The anime and manga series of <em>Attack on Titan</em> have been praised for their gripping story, complex characters, and stunning visuals. The series has four seasons of anime adaptation, with the final season currently airing. The manga has 34 volumes as of June 2021, with the final chapter published in April 2021. The series has won several awards, such as the Kodansha Manga Award, the Harvey Award, and the Micheluzzi Award.</p>
|
9 |
-
<p>The series has a large cast of characters, each with their own personality, backstory, and motivation. Some of the main characters are:</p>
|
10 |
-
<ul>
|
11 |
-
<li>Eren Yeager: The protagonist of the series, who has the ability to transform into a Titan. He is determined to wipe out all Titans and uncover the secrets behind their origin.</li>
|
12 |
-
<li>Mikasa Ackerman: Eren's childhood friend and adoptive sister, who is one of the strongest soldiers in the Survey Corps. She is loyal to Eren and will do anything to protect him.</li>
|
13 |
-
<li>Armin Arlert: Eren's childhood friend and a genius strategist, who also has the ability to transform into a Titan. He is often insecure about his physical strength, but he compensates with his intelligence and courage.</li>
|
14 |
-
<li>Levi Ackerman: The captain of the Survey Corps' Special Operations Squad, who is widely regarded as humanity's strongest soldier. He is cold, ruthless, and disciplined, but he also cares deeply for his comrades.</li>
|
15 |
-
<li>Hange Zoe: The commander of the Survey Corps, who is obsessed with studying Titans and experimenting on them. She is eccentric, enthusiastic, and passionate about her work.</li>
|
16 |
-
</ul>
|
17 |
-
<h3>The themes and the messages of the series</h3>
|
18 |
-
<p><em>Attack on Titan</em> explores various themes and messages, such as freedom, oppression, war, morality, identity, loyalty, betrayal, revenge, hope, despair, and more. The series challenges its characters and its audience to question their beliefs, values, and actions in a cruel and complex world. The series also shows how humans can overcome their fears and limitations by fighting for their ideals and dreams.</ <h2>What is Attack on Titan Game?</h2>
|
19 |
-
<p><em>Attack on Titan Game</em> is a video game based on the anime and manga series of the same name. It is developed by Omega Force, a subsidiary of Koei Tecmo, and published by Koei Tecmo in Japan and by Tecmo Koei America in North America and Europe. The game was released for PlayStation 3, PlayStation 4, PlayStation Vita, Xbox One, and Microsoft Windows in 2016, and for Nintendo Switch in 2018.</p>
|
20 |
-
<h3>The gameplay and the features of the game</h3>
|
21 |
-
<p>The game is an action game that lets you play as various characters from the series, such as Eren, Mikasa, Levi, Hange, and more. You can also create your own custom character and join the Survey Corps. The game follows the story of the anime and manga from the beginning until the end of season one, with some original scenarios and characters added. You can also play online co-op missions with up to four players.</p>
|
22 |
-
<p>The game's main feature is the omni-directional mobility gear (ODM), which allows you to swing around the environment and attack Titans with your blades. You can target different parts of a Titan's body, such as the arms, legs, eyes, or nape, and sever them to weaken or kill them. You can also use items such as gas canisters, blades, guns, bombs, and traps to aid you in combat. You have to manage your resources carefully, as running out of gas or blades can leave you vulnerable.</p>
|
23 |
-
<p>The game also has a town mode, where you can interact with other characters, upgrade your equipment, buy items, and access side missions. You can also view your stats, achievements, gallery, and encyclopedia in this mode.</p>
|
24 |
-
<h3>The differences and the similarities between the game and the anime/manga</h3>
|
25 |
-
<p>The game is faithful to the anime and manga in terms of the story, the characters, the visuals, and the sound. The game uses cel-shaded graphics to recreate the style of the anime, and features voice acting from the original cast. The game also uses music from the anime's soundtrack, composed by Hiroyuki Sawano.</p>
|
26 |
-
<p>The game also adds some new elements that are not present in the anime or manga. For example, the game introduces some original characters that are part of your squad, such as Ian Dietrich, Rico Brzenska, Mitabi Jarnach, and Gelgar. The game also has some original scenarios that expand on the events of the anime or manga, such as a mission where you have to rescue civilians from a Titan-infested town.</p>
|
27 |
-
<p>Attack on Titan / A.O.T. Wings of Freedom on Steam<br />
|
28 |
-
Attack on Titan 2 - A.O.T.2 - Demo Download<br />
|
29 |
-
List of Attack Mode Missions (Attack on Titan Game)<br />
|
30 |
-
Attack on Titan 2: Final Battle Upgrade Pack<br />
|
31 |
-
Attack on Titan Tribute Game by Feng<br />
|
32 |
-
Attack on Titan Tactics - Mobile Strategy Game<br />
|
33 |
-
Attack on Titan: Humanity in Chains for Nintendo 3DS<br />
|
34 |
-
Attack on Titan VR by Kosma - Oculus Quest<br />
|
35 |
-
Attack on Titan: Assault - RPG Runner Game<br />
|
36 |
-
Attack on Titan: The Last Stand - Board Game<br />
|
37 |
-
Attack on Titan: Escape from Certain Death for Nintendo Switch<br />
|
38 |
-
Attack on Titan: No Regrets - Visual Novel Game<br />
|
39 |
-
Attack on Titan: Lost Girls - Interactive Video Game<br />
|
40 |
-
Attack on Titan: Before the Fall - Online Game<br />
|
41 |
-
Attack on Titan: Junior High - Mini Game Collection<br />
|
42 |
-
Attack on Titan: The Anime Guide - Official Game Book<br />
|
43 |
-
Attack on Titan: The Harsh Mistress of the City - Text Adventure Game<br />
|
44 |
-
Attack on Titan: Chronicle - Movie Tie-in Game<br />
|
45 |
-
Attack on Titan: Wings of Counterattack Online - Browser Game<br />
|
46 |
-
Attack on Titan: Roar to Freedom - Mobile Simulation Game<br />
|
47 |
-
Attack on Titan: End of the World - Live Action Game<br />
|
48 |
-
Attack on Titan: Guren no Yumiya - Arcade Game<br />
|
49 |
-
Attack on Titan: Shichi Kara no Dasshutsu - Escape Room Game<br />
|
50 |
-
Attack on Titan: Team Battle - Multiplayer Online Game<br />
|
51 |
-
Attack on Titan: Brave Order - Mobile RPG Game<br />
|
52 |
-
Attack on Titan: The Final Season - Anime Streaming Game<br />
|
53 |
-
Attack on Titan: Beyond the Wall - Mobile Card Game<br />
|
54 |
-
Attack on Titan: Shadow of Freedom - Fan-made Game<br />
|
55 |
-
Attack on Titan: Birth of Levi - Spin-off Game<br />
|
56 |
-
Attack on Titan: Wall Sina, Goodbye - Side Story Game<br />
|
57 |
-
Attack on Titan: Clash of Titans - Mobile Action Game<br />
|
58 |
-
Attack on Titan: Dawn of Humanity - VR Experience Game<br />
|
59 |
-
Attack on Titan: Crimson Bow and Arrow - Movie Quiz Game<br />
|
60 |
-
Attack on Titan: The Real - Universal Studios Japan Game<br />
|
61 |
-
Attack on Titan: Spoof on Titan - Parody Game<br />
|
62 |
-
Attack on Titan: Colossal Edition - Manga Box Set Game<br />
|
63 |
-
Attack on Titan: Original Soundtrack - Music Album Game<br />
|
64 |
-
Attack on Titan: Garrison Regiment Training Camp - VR Training Game<br />
|
65 |
-
Attack on Titan: Survey Corps Expedition - VR Exploration Game<br />
|
66 |
-
Attack on Titan: Military Police Brigade Investigation - VR Mystery Game<br />
|
67 |
-
Attack on Titan: Levi vs Beast Titan - VR Battle Game<br />
|
68 |
-
Attack on Titan: Eren's Basement Key - VR Puzzle Game<br />
|
69 |
-
Attack on Titan: Mikasa's Scarf - VR Romance Game<br />
|
70 |
-
Attack on Titan: Armin's Colossal Plan - VR Strategy Game<br />
|
71 |
-
Attack on Titan: Erwin's Sacrifice - VR Drama Game<br />
|
72 |
-
Attack on Titan: Hange's Experiments - VR Science Game<br />
|
73 |
-
Attack on Titan: Sasha's Potato Snack - VR Cooking Game</p>
|
74 |
-
<p>The game also has some differences from the anime or manga in terms of the gameplay. For example, the game allows you to play as characters that are not playable in the anime or manga, such as Hange or Erwin. The game also gives you more freedom in how you approach each mission, as you can choose your own route and strategy. The game also has some features that are not realistic or consistent with the anime or manga's logic, such as being able to use guns or bombs against Titans.</p> <h2>Why should you play Attack on Titan Game?</h2>
|
75 |
-
<p>If you are a fan of <em>Attack on Titan</em>, playing <em>Attack on Titan Game</em> is a great way to experience the story and the world of the series in a new and immersive way. You can relive the epic moments of the anime and manga, such as the fall of Shiganshina, the battle of Trost, the female Titan chase, and more. You can also explore the details and the secrets of the series, such as the history of the walls, the origin of the Titans, and the identity of the enemy.</p>
|
76 |
-
<p>If you are new to <em>Attack on Titan</em>, playing <em>Attack on Titan Game</em> is a great way to get introduced to the series and its characters. You can learn about the plot and the setting of the series, as well as the personalities and the relationships of the characters. You can also enjoy the action and the thrill of fighting Titans, as well as the drama and the emotion of the story.</p>
|
77 |
-
<h3>The benefits and the challenges of playing the game</h3>
|
78 |
-
<p>Playing <em>Attack on Titan Game</em> has many benefits, such as:</p>
|
79 |
-
<ul>
|
80 |
-
<li>It improves your reflexes and your coordination, as you have to maneuver around the environment and attack Titans with precision and timing.</li>
|
81 |
-
<li>It stimulates your creativity and your problem-solving skills, as you have to plan your strategy and use your resources wisely.</li>
|
82 |
-
<li>It enhances your knowledge and your appreciation of the series, as you discover new facts and insights about the story and the characters.</li>
|
83 |
-
<li>It entertains you and makes you happy, as you have fun and feel satisfied with your achievements.</li>
|
84 |
-
</ul>
|
85 |
-
<p>Playing <em>Attack on Titan Game</em> also has some challenges, such as:</p>
|
86 |
-
<ul>
|
87 |
-
<li>It can be frustrating and stressful, as you face difficult and dangerous situations that can result in failure or death.</li>
|
88 |
-
<li>It can be addictive and time-consuming, as you get hooked on playing more missions and unlocking more content.</li>
|
89 |
-
<li>It can be expensive and demanding, as you need to buy or upgrade your device or platform to play the game smoothly.</li>
|
90 |
-
<li>It can be isolating and distracting, as you lose touch with reality or neglect other aspects of your life.</li>
|
91 |
-
</ul>
|
92 |
-
<h3>The tips and the tricks for enjoying the game more</h3>
|
93 |
-
<p>To enjoy <em>Attack on Titan Game</em> more, here are some tips and tricks that you can follow:</p>
|
94 |
-
<ul>
|
95 |
-
<li>Play with friends or other players online, as you can cooperate, communicate, and compete with each other.</li>
|
96 |
-
<li>Play with headphones or speakers, as you can immerse yourself in the sound effects and the music of the game.</li>
|
97 |
-
<li>Play with moderation and balance, as you can avoid getting bored, tired, or burned out from playing too much.</li>
|
98 |
-
<li>Play with curiosity and openness, as you can explore different options, outcomes, and possibilities in the game.</li>
|
99 |
-
</ul>
|
100 |
-
<h2>Where can you get Attack on Titan Game?</h2>
|
101 |
-
<p><em>Attack on Titan Game</em> is available for various platforms and devices, such as PlayStation 3, PlayStation 4, PlayStation Vita, Xbox One, Microsoft Windows, and Nintendo Switch. You can buy or download the game from different sources, such as online stores, physical stores, or official websites. Here is a table that shows some examples of where you can get <em>Attack on Titan Game</em>, along with their prices and discounts:</p>
|
102 |
-
<table>
|
103 |
-
<tr><th>Platform/Device</th><th>Source</th><th>Price</th><th>Discount</th></tr>
|
104 |
-
<tr><td>PlayStation 4</td><td><a href="">Amazon.com</a></td><td>$29.99</td><td>$10.00 (25% off)</td></tr>
|
105 |
-
<tr><td>Xbox One</td><td><a href="">Microsoft Store</a></td><td>$59.99</td><td>$0.00 (0% off)</td></tr>
|
106 |
-
<tr><td>Nintendo Switch</td><td><a href="">Nintendo eShop</a></td><td>$59.99</td><td>$0.00 (0% off)</td></tr>
|
107 |
-
<tr><td>Microsoft Windows</td><td><a href="">Steam</a></td><td>$59.99</td><td>$17.99 (70% off)</td></tr>
|
108 |
-
<tr><td>PlayStation Vita</td><td><a href="">PlayStation Store</a></ </td></tr>
|
109 |
-
</table>
|
110 |
-
<h2>Conclusion</h2>
|
111 |
-
<p><em>Attack on Titan Game</em> is a game that every fan of <em>Attack on Titan</em> should play, and every newcomer should try. It is a game that lets you experience the story and the world of the series in a new and immersive way. It is a game that challenges you to fight Titans and survive in a cruel and complex world. It is a game that entertains you and makes you happy, as well as frustrates you and stresses you out. It is a game that has many benefits and challenges, as well as tips and tricks for enjoying it more. It is a game that is available for various platforms and devices, at different prices and discounts.</p>
|
112 |
-
<p>If you are interested in playing <em>Attack on Titan Game</em>, you can get it from the sources listed above, or from other sources that you prefer. You can also check out the official website of the game, or the official social media accounts of the game, for more information and updates. You can also watch the trailer of the game, or read some reviews of the game, to get a better idea of what it is like.</p>
|
113 |
-
<p>Whether you are a fan or a newcomer, we hope that this article has helped you learn more about <em>Attack on Titan Game</em>, and that you will enjoy playing it. Thank you for reading, and have fun!</p>
|
114 |
-
<h3>Frequently Asked Questions</h3>
|
115 |
-
<p>Here are some frequently asked questions about <em>Attack on Titan Game</em>, along with their answers:</p>
|
116 |
-
<ol>
|
117 |
-
<li><strong>Is <em>Attack on Titan Game</em> suitable for children?</strong></li>
|
118 |
-
<p><em>Attack on Titan Game</em> is rated M for Mature by the ESRB, 18 by PEGI, and Z by CERO. This means that the game contains violence, blood, gore, and language that may not be appropriate for children. The game also deals with dark and mature themes that may be disturbing or upsetting for some players. Therefore, we recommend that parents or guardians supervise their children if they want to play the game, or avoid the game altogether if they are not comfortable with its content.</p>
|
119 |
-
<li><strong>How long does it take to finish <em>Attack on Titan Game</em>?</strong></li>
|
120 |
-
<p>The length of <em>Attack on Titan Game</em> depends on how you play it, and how much content you want to explore. According to HowLongToBeat.com, the average time to complete the main story of the game is about 10 hours, while the average time to complete all the extra content of the game is about 25 hours. However, your time may vary depending on your skill level, your difficulty setting, your pace, and your choices.</p>
|
121 |
-
<li><strong>Does <em>Attack on Titan Game</em> have multiplayer mode?</strong></li>
|
122 |
-
<p><em>Attack on Titan Game</em> has online co-op mode, where you can play with up to three other players in various missions. You can either join a random lobby, or create your own lobby and invite your friends. You can also chat with other players using voice or text messages. However, the game does not have local co-op mode or competitive mode.</p>
|
123 |
-
<li><strong>Does <em>Attack on Titan Game</em> have DLCs or updates?</strong></li>
|
124 |
-
<p><em>Attack on Titan Game</em> has several DLCs or downloadable content that you can purchase separately or as part of a season pass. These DLCs include additional costumes, weapons, scenarios, characters, and modes. The game also has free updates that fix bugs, improve performance, and add new features.</p>
|
125 |
-
<li><strong>Does <em>Attack on Titan Game</em> have any sequels or spin-offs?</strong></li>
|
126 |
-
<p><em>Attack on Titan Game</em> has a sequel called <em>Attack on Titan 2</em>, which was released in 2018. The sequel covers the events of season two and three of the anime, as well as some original content. The sequel also has improved graphics, gameplay, and customization options. The sequel also has a spin-off called <em>Attack on Titan 2: Final Battle</em>, which was released in 2019. The spin-off adds more content from season three of the anime, as well as new modes and features.</p>
|
127 |
-
</ol></p> 197e85843d<br />
|
128 |
-
<br />
|
129 |
-
<br />
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/2023Liu2023/bingo/src/pages/api/blob.ts
DELETED
@@ -1,40 +0,0 @@
|
|
1 |
-
'use server'
|
2 |
-
|
3 |
-
import { NextApiRequest, NextApiResponse } from 'next'
|
4 |
-
import { Readable } from 'node:stream'
|
5 |
-
import { fetch } from '@/lib/isomorphic'
|
6 |
-
|
7 |
-
const API_DOMAIN = 'https://www.bing.com'
|
8 |
-
|
9 |
-
export default async function handler(req: NextApiRequest, res: NextApiResponse) {
|
10 |
-
try {
|
11 |
-
const { bcid } = req.query
|
12 |
-
|
13 |
-
const { headers, body } = await fetch(`${API_DOMAIN}/images/blob?bcid=${bcid}`,
|
14 |
-
{
|
15 |
-
method: 'GET',
|
16 |
-
headers: {
|
17 |
-
"sec-ch-ua": "\"Not/A)Brand\";v=\"99\", \"Google Chrome\";v=\"115\", \"Chromium\";v=\"115\"",
|
18 |
-
"sec-ch-ua-mobile": "?0",
|
19 |
-
"sec-ch-ua-platform": "\"Windows\"",
|
20 |
-
"Referrer-Policy": "origin-when-cross-origin",
|
21 |
-
},
|
22 |
-
},
|
23 |
-
)
|
24 |
-
|
25 |
-
res.writeHead(200, {
|
26 |
-
'Content-Length': headers.get('content-length')!,
|
27 |
-
'Content-Type': headers.get('content-type')!,
|
28 |
-
})
|
29 |
-
// @ts-ignore
|
30 |
-
return Readable.fromWeb(body!).pipe(res)
|
31 |
-
} catch (e) {
|
32 |
-
console.log('Error', e)
|
33 |
-
return res.json({
|
34 |
-
result: {
|
35 |
-
value: 'UploadFailed',
|
36 |
-
message: `${e}`
|
37 |
-
}
|
38 |
-
})
|
39 |
-
}
|
40 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/AIGC-Audio/AudioGPT/NeuralSeq/tasks/tts/ps_adv.py
DELETED
@@ -1,372 +0,0 @@
|
|
1 |
-
import os
|
2 |
-
import torch
|
3 |
-
import torch.nn.functional as F
|
4 |
-
import torch.nn as nn
|
5 |
-
import numpy as np
|
6 |
-
|
7 |
-
from modules.portaspeech.portaspeech import PortaSpeech
|
8 |
-
from modules.syntaspeech.multi_window_disc import Discriminator
|
9 |
-
from tasks.tts.fs2 import FastSpeech2Task
|
10 |
-
from utils.hparams import hparams
|
11 |
-
from utils.tts_utils import get_focus_rate, get_phone_coverage_rate, get_diagonal_focus_rate, mel2token_to_dur
|
12 |
-
from utils import num_params, tensors_to_scalars
|
13 |
-
from utils.pitch_utils import denorm_f0, norm_f0
|
14 |
-
from data_gen.tts.data_gen_utils import get_pitch
|
15 |
-
from utils.dtw import dtw as DTW
|
16 |
-
|
17 |
-
from utils.plot import spec_to_figure
|
18 |
-
from utils.text.text_encoder import build_token_encoder
|
19 |
-
|
20 |
-
|
21 |
-
class PortaSpeechAdvTask(FastSpeech2Task):
|
22 |
-
def __init__(self):
|
23 |
-
super().__init__()
|
24 |
-
data_dir = hparams['binary_data_dir']
|
25 |
-
self.word_encoder = build_token_encoder(f'{data_dir}/word_set.json')
|
26 |
-
self.build_disc_model()
|
27 |
-
self.mse_loss_fn = torch.nn.MSELoss()
|
28 |
-
|
29 |
-
def build_tts_model(self):
|
30 |
-
ph_dict_size = len(self.token_encoder)
|
31 |
-
word_dict_size = len(self.word_encoder)
|
32 |
-
self.model = PortaSpeech(ph_dict_size, word_dict_size, hparams)
|
33 |
-
|
34 |
-
self.gen_params = [p for p in self.model.parameters() if p.requires_grad]
|
35 |
-
self.dp_params = [p for k, p in self.model.named_parameters() if (('dur_predictor' in k) and p.requires_grad)]
|
36 |
-
self.gen_params_except_dp = [p for k, p in self.model.named_parameters() if (('dur_predictor' not in k) and p.requires_grad)]
|
37 |
-
self.bert_params = [p for k, p in self.model.named_parameters() if (('bert' in k) and p.requires_grad)]
|
38 |
-
self.gen_params_except_bert_and_dp = [p for k, p in self.model.named_parameters() if ('dur_predictor' not in k) and ('bert' not in k) and p.requires_grad ]
|
39 |
-
|
40 |
-
self.use_bert = True if len(self.bert_params) > 0 else False
|
41 |
-
|
42 |
-
def build_disc_model(self):
|
43 |
-
disc_win_num = hparams['disc_win_num']
|
44 |
-
h = hparams['mel_disc_hidden_size']
|
45 |
-
self.mel_disc = Discriminator(
|
46 |
-
time_lengths=[32, 64, 128][:disc_win_num],
|
47 |
-
freq_length=80, hidden_size=h, kernel=(3, 3)
|
48 |
-
)
|
49 |
-
self.disc_params = list(self.mel_disc.parameters())
|
50 |
-
|
51 |
-
def on_train_start(self):
|
52 |
-
super().on_train_start()
|
53 |
-
for n, m in self.model.named_children():
|
54 |
-
num_params(m, model_name=n)
|
55 |
-
if hasattr(self.model, 'fvae'):
|
56 |
-
for n, m in self.model.fvae.named_children():
|
57 |
-
num_params(m, model_name=f'fvae.{n}')
|
58 |
-
|
59 |
-
def _training_step(self, sample, batch_idx, optimizer_idx):
|
60 |
-
loss_output = {}
|
61 |
-
loss_weights = {}
|
62 |
-
disc_start = self.global_step >= hparams["disc_start_steps"] and hparams['lambda_mel_adv'] > 0
|
63 |
-
if optimizer_idx == 0:
|
64 |
-
#######################
|
65 |
-
# Generator #
|
66 |
-
#######################
|
67 |
-
loss_output, model_out = self.run_model(sample, infer=False)
|
68 |
-
self.model_out_gt = self.model_out = \
|
69 |
-
{k: v.detach() for k, v in model_out.items() if isinstance(v, torch.Tensor)}
|
70 |
-
if disc_start:
|
71 |
-
mel_p = model_out['mel_out']
|
72 |
-
if hasattr(self.model, 'out2mel'):
|
73 |
-
mel_p = self.model.out2mel(mel_p)
|
74 |
-
o_ = self.mel_disc(mel_p)
|
75 |
-
p_, pc_ = o_['y'], o_['y_c']
|
76 |
-
if p_ is not None:
|
77 |
-
loss_output['a'] = self.mse_loss_fn(p_, p_.new_ones(p_.size()))
|
78 |
-
loss_weights['a'] = hparams['lambda_mel_adv']
|
79 |
-
if pc_ is not None:
|
80 |
-
loss_output['ac'] = self.mse_loss_fn(pc_, pc_.new_ones(pc_.size()))
|
81 |
-
loss_weights['ac'] = hparams['lambda_mel_adv']
|
82 |
-
else:
|
83 |
-
#######################
|
84 |
-
# Discriminator #
|
85 |
-
#######################
|
86 |
-
if disc_start and self.global_step % hparams['disc_interval'] == 0:
|
87 |
-
model_out = self.model_out_gt
|
88 |
-
mel_g = sample['mels']
|
89 |
-
mel_p = model_out['mel_out']
|
90 |
-
o = self.mel_disc(mel_g)
|
91 |
-
p, pc = o['y'], o['y_c']
|
92 |
-
o_ = self.mel_disc(mel_p)
|
93 |
-
p_, pc_ = o_['y'], o_['y_c']
|
94 |
-
if p_ is not None:
|
95 |
-
loss_output["r"] = self.mse_loss_fn(p, p.new_ones(p.size()))
|
96 |
-
loss_output["f"] = self.mse_loss_fn(p_, p_.new_zeros(p_.size()))
|
97 |
-
if pc_ is not None:
|
98 |
-
loss_output["rc"] = self.mse_loss_fn(pc, pc.new_ones(pc.size()))
|
99 |
-
loss_output["fc"] = self.mse_loss_fn(pc_, pc_.new_zeros(pc_.size()))
|
100 |
-
total_loss = sum([loss_weights.get(k, 1) * v for k, v in loss_output.items() if isinstance(v, torch.Tensor) and v.requires_grad])
|
101 |
-
loss_output['batch_size'] = sample['txt_tokens'].size()[0]
|
102 |
-
return total_loss, loss_output
|
103 |
-
|
104 |
-
def run_model(self, sample, infer=False, *args, **kwargs):
|
105 |
-
txt_tokens = sample['txt_tokens']
|
106 |
-
word_tokens = sample['word_tokens']
|
107 |
-
spk_embed = sample.get('spk_embed')
|
108 |
-
spk_id = sample.get('spk_ids')
|
109 |
-
if not infer:
|
110 |
-
output = self.model(txt_tokens, word_tokens,
|
111 |
-
ph2word=sample['ph2word'],
|
112 |
-
mel2word=sample['mel2word'],
|
113 |
-
mel2ph=sample['mel2ph'],
|
114 |
-
word_len=sample['word_lengths'].max(),
|
115 |
-
tgt_mels=sample['mels'],
|
116 |
-
pitch=sample.get('pitch'),
|
117 |
-
spk_embed=spk_embed,
|
118 |
-
spk_id=spk_id,
|
119 |
-
infer=False,
|
120 |
-
global_step=self.global_step,
|
121 |
-
graph_lst=sample['graph_lst'],
|
122 |
-
etypes_lst=sample['etypes_lst'],
|
123 |
-
bert_feats=sample.get("bert_feats"),
|
124 |
-
cl_feats=sample.get("cl_feats")
|
125 |
-
)
|
126 |
-
losses = {}
|
127 |
-
losses['kl_v'] = output['kl'].detach()
|
128 |
-
losses_kl = output['kl']
|
129 |
-
losses_kl = torch.clamp(losses_kl, min=hparams['kl_min'])
|
130 |
-
losses_kl = min(self.global_step / hparams['kl_start_steps'], 1) * losses_kl
|
131 |
-
losses_kl = losses_kl * hparams['lambda_kl']
|
132 |
-
losses['kl'] = losses_kl
|
133 |
-
|
134 |
-
self.add_mel_loss(output['mel_out'], sample['mels'], losses)
|
135 |
-
if hparams['dur_level'] == 'word':
|
136 |
-
self.add_dur_loss(
|
137 |
-
output['dur'], sample['mel2word'], sample['word_lengths'], sample['txt_tokens'], losses)
|
138 |
-
self.get_attn_stats(output['attn'], sample, losses)
|
139 |
-
else:
|
140 |
-
super(PortaSpeechAdvTask, self).add_dur_loss(output['dur'], sample['mel2ph'], sample['txt_tokens'], losses)
|
141 |
-
return losses, output
|
142 |
-
else:
|
143 |
-
use_gt_dur = kwargs.get('infer_use_gt_dur', hparams['use_gt_dur'])
|
144 |
-
output = self.model(
|
145 |
-
txt_tokens, word_tokens,
|
146 |
-
ph2word=sample['ph2word'],
|
147 |
-
word_len=sample['word_lengths'].max(),
|
148 |
-
pitch=sample.get('pitch'),
|
149 |
-
mel2ph=sample['mel2ph'] if use_gt_dur else None,
|
150 |
-
mel2word=sample['mel2word'] if use_gt_dur else None,
|
151 |
-
tgt_mels=sample['mels'],
|
152 |
-
infer=True,
|
153 |
-
spk_embed=spk_embed,
|
154 |
-
spk_id=spk_id,
|
155 |
-
graph_lst=sample['graph_lst'],
|
156 |
-
etypes_lst=sample['etypes_lst'],
|
157 |
-
bert_feats=sample.get("bert_feats"),
|
158 |
-
cl_feats=sample.get("cl_feats")
|
159 |
-
)
|
160 |
-
return output
|
161 |
-
|
162 |
-
def add_dur_loss(self, dur_pred, mel2token, word_len, txt_tokens, losses=None):
|
163 |
-
T = word_len.max()
|
164 |
-
dur_gt = mel2token_to_dur(mel2token, T).float()
|
165 |
-
nonpadding = (torch.arange(T).to(dur_pred.device)[None, :] < word_len[:, None]).float()
|
166 |
-
dur_pred = dur_pred * nonpadding
|
167 |
-
dur_gt = dur_gt * nonpadding
|
168 |
-
wdur = F.l1_loss((dur_pred + 1).log(), (dur_gt + 1).log(), reduction='none')
|
169 |
-
wdur = (wdur * nonpadding).sum() / nonpadding.sum()
|
170 |
-
|
171 |
-
if hparams['lambda_word_dur'] > 0:
|
172 |
-
losses['wdur'] = wdur * hparams['lambda_word_dur']
|
173 |
-
if hparams['lambda_sent_dur'] > 0:
|
174 |
-
sent_dur_p = dur_pred.sum(-1)
|
175 |
-
sent_dur_g = dur_gt.sum(-1)
|
176 |
-
sdur_loss = F.l1_loss(sent_dur_p, sent_dur_g, reduction='mean')
|
177 |
-
losses['sdur'] = sdur_loss.mean() * hparams['lambda_sent_dur']
|
178 |
-
|
179 |
-
with torch.no_grad():
|
180 |
-
# calculate word-level abs_dur_error in micro-second
|
181 |
-
abs_word_dur_error = F.l1_loss(dur_pred , dur_gt, reduction='none')
|
182 |
-
abs_word_dur_error = (abs_word_dur_error * nonpadding).sum() / nonpadding.sum()
|
183 |
-
abs_word_dur_error = abs_word_dur_error * hparams['hop_size'] / hparams['audio_sample_rate'] * 1000
|
184 |
-
losses['abs_word_dur_error'] = abs_word_dur_error
|
185 |
-
# calculate word-level abs_dur_error in second
|
186 |
-
sent_dur_p = dur_pred.sum(-1)
|
187 |
-
sent_dur_g = dur_gt.sum(-1)
|
188 |
-
abs_sent_dur_error = F.l1_loss(sent_dur_p, sent_dur_g, reduction='mean').mean()
|
189 |
-
abs_sent_dur_error = abs_sent_dur_error * hparams['hop_size'] / hparams['audio_sample_rate']
|
190 |
-
losses['abs_sent_dur_error'] = abs_sent_dur_error
|
191 |
-
|
192 |
-
def validation_step(self, sample, batch_idx):
|
193 |
-
outputs = {}
|
194 |
-
outputs['losses'] = {}
|
195 |
-
outputs['losses'], model_out = self.run_model(sample)
|
196 |
-
outputs['total_loss'] = sum(outputs['losses'].values())
|
197 |
-
outputs['nsamples'] = sample['nsamples']
|
198 |
-
outputs = tensors_to_scalars(outputs)
|
199 |
-
if self.global_step % hparams['valid_infer_interval'] == 0 \
|
200 |
-
and batch_idx < hparams['num_valid_plots']:
|
201 |
-
valid_results = self.save_valid_result(sample, batch_idx, model_out)
|
202 |
-
wav_gt = valid_results['wav_gt']
|
203 |
-
mel_gt = valid_results['mel_gt']
|
204 |
-
wav_pred = valid_results['wav_pred']
|
205 |
-
mel_pred = valid_results['mel_pred']
|
206 |
-
f0_pred_, _ = get_pitch(wav_pred, mel_pred, hparams)
|
207 |
-
f0_gt_, _ = get_pitch(wav_gt, mel_gt, hparams)
|
208 |
-
manhattan_distance = lambda x, y: np.abs(x - y)
|
209 |
-
dist, cost, acc, path = DTW(f0_pred_, f0_gt_, manhattan_distance)
|
210 |
-
outputs['losses']['f0_dtw'] = dist / len(f0_gt_)
|
211 |
-
return outputs
|
212 |
-
|
213 |
-
def save_valid_result(self, sample, batch_idx, model_out):
|
214 |
-
sr = hparams['audio_sample_rate']
|
215 |
-
f0_gt = None
|
216 |
-
mel_out = model_out['mel_out']
|
217 |
-
if sample.get('f0') is not None:
|
218 |
-
f0_gt = denorm_f0(sample['f0'][0].cpu(), sample['uv'][0].cpu())
|
219 |
-
self.plot_mel(batch_idx, sample['mels'], mel_out, f0s=f0_gt)
|
220 |
-
|
221 |
-
# if self.global_step > 0:
|
222 |
-
wav_pred = self.vocoder.spec2wav(mel_out[0].cpu(), f0=f0_gt)
|
223 |
-
self.logger.add_audio(f'wav_val_{batch_idx}', wav_pred, self.global_step, sr)
|
224 |
-
# with gt duration
|
225 |
-
model_out = self.run_model(sample, infer=True, infer_use_gt_dur=True)
|
226 |
-
dur_info = self.get_plot_dur_info(sample, model_out)
|
227 |
-
del dur_info['dur_pred']
|
228 |
-
wav_pred = self.vocoder.spec2wav(model_out['mel_out'][0].cpu(), f0=f0_gt)
|
229 |
-
self.logger.add_audio(f'wav_gdur_{batch_idx}', wav_pred, self.global_step, sr)
|
230 |
-
self.plot_mel(batch_idx, sample['mels'], model_out['mel_out'][0], f'mel_gdur_{batch_idx}',
|
231 |
-
dur_info=dur_info, f0s=f0_gt)
|
232 |
-
|
233 |
-
# with pred duration
|
234 |
-
if not hparams['use_gt_dur']:
|
235 |
-
model_out = self.run_model(sample, infer=True, infer_use_gt_dur=False)
|
236 |
-
dur_info = self.get_plot_dur_info(sample, model_out)
|
237 |
-
self.plot_mel(batch_idx, sample['mels'], model_out['mel_out'][0], f'mel_pdur_{batch_idx}',
|
238 |
-
dur_info=dur_info, f0s=f0_gt)
|
239 |
-
wav_pred = self.vocoder.spec2wav(model_out['mel_out'][0].cpu(), f0=f0_gt)
|
240 |
-
self.logger.add_audio(f'wav_pdur_{batch_idx}', wav_pred, self.global_step, sr)
|
241 |
-
# gt wav
|
242 |
-
mel_gt = sample['mels'][0].cpu()
|
243 |
-
wav_gt = self.vocoder.spec2wav(mel_gt, f0=f0_gt)
|
244 |
-
if self.global_step <= hparams['valid_infer_interval']:
|
245 |
-
self.logger.add_audio(f'wav_gt_{batch_idx}', wav_gt, self.global_step, sr)
|
246 |
-
|
247 |
-
# add attn plot
|
248 |
-
if self.global_step > 0 and hparams['dur_level'] == 'word':
|
249 |
-
self.logger.add_figure(f'attn_{batch_idx}', spec_to_figure(model_out['attn'][0]), self.global_step)
|
250 |
-
|
251 |
-
return {'wav_gt': wav_gt, 'wav_pred': wav_pred, 'mel_gt': mel_gt, 'mel_pred': model_out['mel_out'][0].cpu()}
|
252 |
-
|
253 |
-
def get_attn_stats(self, attn, sample, logging_outputs, prefix=''):
|
254 |
-
# diagonal_focus_rate
|
255 |
-
txt_lengths = sample['txt_lengths'].float()
|
256 |
-
mel_lengths = sample['mel_lengths'].float()
|
257 |
-
src_padding_mask = sample['txt_tokens'].eq(0)
|
258 |
-
target_padding_mask = sample['mels'].abs().sum(-1).eq(0)
|
259 |
-
src_seg_mask = sample['txt_tokens'].eq(self.seg_idx)
|
260 |
-
attn_ks = txt_lengths.float() / mel_lengths.float()
|
261 |
-
|
262 |
-
focus_rate = get_focus_rate(attn, src_padding_mask, target_padding_mask).mean().data
|
263 |
-
phone_coverage_rate = get_phone_coverage_rate(
|
264 |
-
attn, src_padding_mask, src_seg_mask, target_padding_mask).mean()
|
265 |
-
diagonal_focus_rate, diag_mask = get_diagonal_focus_rate(
|
266 |
-
attn, attn_ks, mel_lengths, src_padding_mask, target_padding_mask)
|
267 |
-
logging_outputs[f'{prefix}fr'] = focus_rate.mean().data
|
268 |
-
logging_outputs[f'{prefix}pcr'] = phone_coverage_rate.mean().data
|
269 |
-
logging_outputs[f'{prefix}dfr'] = diagonal_focus_rate.mean().data
|
270 |
-
|
271 |
-
def get_plot_dur_info(self, sample, model_out):
|
272 |
-
if hparams['dur_level'] == 'word':
|
273 |
-
T_txt = sample['word_lengths'].max()
|
274 |
-
dur_gt = mel2token_to_dur(sample['mel2word'], T_txt)[0]
|
275 |
-
dur_pred = model_out['dur'] if 'dur' in model_out else dur_gt
|
276 |
-
txt = sample['ph_words'][0].split(" ")
|
277 |
-
else:
|
278 |
-
T_txt = sample['txt_tokens'].shape[1]
|
279 |
-
dur_gt = mel2token_to_dur(sample['mel2ph'], T_txt)[0]
|
280 |
-
dur_pred = model_out['dur'] if 'dur' in model_out else dur_gt
|
281 |
-
txt = self.token_encoder.decode(sample['txt_tokens'][0].cpu().numpy())
|
282 |
-
txt = txt.split(" ")
|
283 |
-
return {'dur_gt': dur_gt, 'dur_pred': dur_pred, 'txt': txt}
|
284 |
-
|
285 |
-
def build_optimizer(self, model):
|
286 |
-
|
287 |
-
optimizer_gen = torch.optim.AdamW(
|
288 |
-
self.gen_params,
|
289 |
-
lr=hparams['lr'],
|
290 |
-
betas=(hparams['optimizer_adam_beta1'], hparams['optimizer_adam_beta2']),
|
291 |
-
weight_decay=hparams['weight_decay'])
|
292 |
-
|
293 |
-
optimizer_disc = torch.optim.AdamW(
|
294 |
-
self.disc_params,
|
295 |
-
lr=hparams['disc_lr'],
|
296 |
-
betas=(hparams['optimizer_adam_beta1'], hparams['optimizer_adam_beta2']),
|
297 |
-
**hparams["discriminator_optimizer_params"]) if len(self.disc_params) > 0 else None
|
298 |
-
|
299 |
-
return [optimizer_gen, optimizer_disc]
|
300 |
-
|
301 |
-
def build_scheduler(self, optimizer):
|
302 |
-
return [
|
303 |
-
FastSpeechTask.build_scheduler(self, optimizer[0]), # Generator Scheduler
|
304 |
-
torch.optim.lr_scheduler.StepLR(optimizer=optimizer[1], # Discriminator Scheduler
|
305 |
-
**hparams["discriminator_scheduler_params"]),
|
306 |
-
]
|
307 |
-
|
308 |
-
def on_before_optimization(self, opt_idx):
|
309 |
-
if opt_idx == 0:
|
310 |
-
nn.utils.clip_grad_norm_(self.dp_params, hparams['clip_grad_norm'])
|
311 |
-
if self.use_bert:
|
312 |
-
nn.utils.clip_grad_norm_(self.bert_params, hparams['clip_grad_norm'])
|
313 |
-
nn.utils.clip_grad_norm_(self.gen_params_except_bert_and_dp, hparams['clip_grad_norm'])
|
314 |
-
else:
|
315 |
-
nn.utils.clip_grad_norm_(self.gen_params_except_dp, hparams['clip_grad_norm'])
|
316 |
-
else:
|
317 |
-
nn.utils.clip_grad_norm_(self.disc_params, hparams["clip_grad_norm"])
|
318 |
-
|
319 |
-
def on_after_optimization(self, epoch, batch_idx, optimizer, optimizer_idx):
|
320 |
-
if self.scheduler is not None:
|
321 |
-
self.scheduler[0].step(self.global_step // hparams['accumulate_grad_batches'])
|
322 |
-
self.scheduler[1].step(self.global_step // hparams['accumulate_grad_batches'])
|
323 |
-
|
324 |
-
############
|
325 |
-
# infer
|
326 |
-
############
|
327 |
-
def test_start(self):
|
328 |
-
super().test_start()
|
329 |
-
if hparams.get('save_attn', False):
|
330 |
-
os.makedirs(f'{self.gen_dir}/attn', exist_ok=True)
|
331 |
-
self.model.store_inverse_all()
|
332 |
-
|
333 |
-
def test_step(self, sample, batch_idx):
|
334 |
-
assert sample['txt_tokens'].shape[0] == 1, 'only support batch_size=1 in inference'
|
335 |
-
outputs = self.run_model(sample, infer=True)
|
336 |
-
text = sample['text'][0]
|
337 |
-
item_name = sample['item_name'][0]
|
338 |
-
tokens = sample['txt_tokens'][0].cpu().numpy()
|
339 |
-
mel_gt = sample['mels'][0].cpu().numpy()
|
340 |
-
mel_pred = outputs['mel_out'][0].cpu().numpy()
|
341 |
-
mel2ph = sample['mel2ph'][0].cpu().numpy()
|
342 |
-
mel2ph_pred = None
|
343 |
-
str_phs = self.token_encoder.decode(tokens, strip_padding=True)
|
344 |
-
base_fn = f'[{batch_idx:06d}][{item_name.replace("%", "_")}][%s]'
|
345 |
-
if text is not None:
|
346 |
-
base_fn += text.replace(":", "$3A")[:80]
|
347 |
-
base_fn = base_fn.replace(' ', '_')
|
348 |
-
gen_dir = self.gen_dir
|
349 |
-
wav_pred = self.vocoder.spec2wav(mel_pred)
|
350 |
-
self.saving_result_pool.add_job(self.save_result, args=[
|
351 |
-
wav_pred, mel_pred, base_fn % 'P', gen_dir, str_phs, mel2ph_pred])
|
352 |
-
if hparams['save_gt']:
|
353 |
-
wav_gt = self.vocoder.spec2wav(mel_gt)
|
354 |
-
self.saving_result_pool.add_job(self.save_result, args=[
|
355 |
-
wav_gt, mel_gt, base_fn % 'G', gen_dir, str_phs, mel2ph])
|
356 |
-
if hparams.get('save_attn', False):
|
357 |
-
attn = outputs['attn'][0].cpu().numpy()
|
358 |
-
np.save(f'{gen_dir}/attn/{item_name}.npy', attn)
|
359 |
-
# save f0 for pitch dtw
|
360 |
-
f0_pred_, _ = get_pitch(wav_pred, mel_pred, hparams)
|
361 |
-
f0_gt_, _ = get_pitch(wav_gt, mel_gt, hparams)
|
362 |
-
np.save(f'{gen_dir}/f0/{item_name}.npy', f0_pred_)
|
363 |
-
np.save(f'{gen_dir}/f0/{item_name}_gt.npy', f0_gt_)
|
364 |
-
|
365 |
-
print(f"Pred_shape: {mel_pred.shape}, gt_shape: {mel_gt.shape}")
|
366 |
-
return {
|
367 |
-
'item_name': item_name,
|
368 |
-
'text': text,
|
369 |
-
'ph_tokens': self.token_encoder.decode(tokens.tolist()),
|
370 |
-
'wav_fn_pred': base_fn % 'P',
|
371 |
-
'wav_fn_gt': base_fn % 'G',
|
372 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/AchyuthGamer/OpenGPT-Chat-UI/src/lib/utils/share.ts
DELETED
@@ -1,7 +0,0 @@
|
|
1 |
-
export function share(url: string, title: string) {
|
2 |
-
if (navigator.share) {
|
3 |
-
navigator.share({ url, title });
|
4 |
-
} else {
|
5 |
-
prompt("Copy this public url to share:", url);
|
6 |
-
}
|
7 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/AgentVerse/agentVerse/ui/src/phaser3-rex-plugins/plugins/ninepatch.js
DELETED
@@ -1,2 +0,0 @@
|
|
1 |
-
import NinePatch from './gameobjects/rendertexture/ninepatch/NinePatch.js';
|
2 |
-
export default NinePatch;
|
|
|
|
|
|
spaces/AgentVerse/agentVerse/ui/src/phaser3-rex-plugins/templates/ui/fileselectorbutton/FileSelectorButton.d.ts
DELETED
@@ -1,45 +0,0 @@
|
|
1 |
-
import Label from '../label/Label';
|
2 |
-
|
3 |
-
export default FileSelectorButton;
|
4 |
-
|
5 |
-
declare namespace FileSelectorButton {
|
6 |
-
interface IConfig extends Label.IConfig {
|
7 |
-
accept?: string,
|
8 |
-
multiple?: boolean,
|
9 |
-
}
|
10 |
-
}
|
11 |
-
|
12 |
-
declare class FileSelectorButton extends Label {
|
13 |
-
constructor(
|
14 |
-
scene: Phaser.Scene,
|
15 |
-
config?: FileSelectorButton.IConfig
|
16 |
-
);
|
17 |
-
|
18 |
-
readonly files: File[];
|
19 |
-
|
20 |
-
setAccept(accept: string): this;
|
21 |
-
|
22 |
-
setMultiple(multiple?: boolean): this;
|
23 |
-
|
24 |
-
loadFile(
|
25 |
-
file: File,
|
26 |
-
loaderType: string,
|
27 |
-
key: string,
|
28 |
-
cacheType?: string
|
29 |
-
): this;
|
30 |
-
|
31 |
-
loadFile(
|
32 |
-
file: File,
|
33 |
-
loaderType: string,
|
34 |
-
key: string,
|
35 |
-
cacheType?: string,
|
36 |
-
onComplete?: (data: any) => void
|
37 |
-
): this;
|
38 |
-
|
39 |
-
loadFilePromise(
|
40 |
-
file: File,
|
41 |
-
loaderType: string,
|
42 |
-
key: string,
|
43 |
-
cacheType?: string
|
44 |
-
): Promise<any>;
|
45 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/AgentVerse/agentVerse/ui/src/phaser3-rex-plugins/templates/ui/pages/Pages.d.ts
DELETED
@@ -1,72 +0,0 @@
|
|
1 |
-
// import * as Phaser from 'phaser';
|
2 |
-
import OverlapSizer from '../overlapsizer/OverlapSizer';
|
3 |
-
|
4 |
-
|
5 |
-
export default Pages;
|
6 |
-
|
7 |
-
declare namespace Pages {
|
8 |
-
|
9 |
-
type AlignTypes = number | 'center' | 'left' | 'right' | 'top' | 'bottom' |
|
10 |
-
'left-top' | 'left-center' | 'left-bottom' |
|
11 |
-
'center-top' | 'center-center' | 'center-bottom' |
|
12 |
-
'right-top' | 'right-center' | 'right-bottom';
|
13 |
-
|
14 |
-
type PaddingTypes = number |
|
15 |
-
{
|
16 |
-
left?: number,
|
17 |
-
right?: number,
|
18 |
-
top?: number,
|
19 |
-
bottom?: number,
|
20 |
-
};
|
21 |
-
|
22 |
-
interface IConfig extends OverlapSizer.IConfig {
|
23 |
-
fadeIn?: number,
|
24 |
-
|
25 |
-
swapMode?: 0 | 1 | 'invisible' | 'destroy',
|
26 |
-
}
|
27 |
-
|
28 |
-
}
|
29 |
-
|
30 |
-
declare class Pages extends OverlapSizer {
|
31 |
-
constructor(
|
32 |
-
scene: Phaser.Scene,
|
33 |
-
config?: Pages.IConfig
|
34 |
-
);
|
35 |
-
|
36 |
-
setSwapMode(
|
37 |
-
mode: 0 | 1 | 'invisible' | 'destroy'
|
38 |
-
): this;
|
39 |
-
|
40 |
-
addPage(
|
41 |
-
gameObject: Phaser.GameObjects.GameObject,
|
42 |
-
config?: {
|
43 |
-
key?: string,
|
44 |
-
|
45 |
-
align?: Pages.AlignTypes,
|
46 |
-
|
47 |
-
padding?: Pages.PaddingTypes,
|
48 |
-
|
49 |
-
expand: boolean |
|
50 |
-
{
|
51 |
-
width?: boolean,
|
52 |
-
height?: boolean,
|
53 |
-
},
|
54 |
-
|
55 |
-
minWidth?: number,
|
56 |
-
|
57 |
-
minHeight?: number
|
58 |
-
}
|
59 |
-
): this;
|
60 |
-
|
61 |
-
swapPage(
|
62 |
-
key: string,
|
63 |
-
fadeInDuration?: number
|
64 |
-
): this;
|
65 |
-
currentKey: string;
|
66 |
-
readonly previousKey: string;
|
67 |
-
keys: string[];
|
68 |
-
|
69 |
-
getPage(key: string): Phaser.GameObjects.GameObject;
|
70 |
-
readonly currentPage: Phaser.GameObjects.GameObject;
|
71 |
-
readonly previousPage: Phaser.GameObjects.GameObject;
|
72 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Al-Chan/Vits_League_of_Legends_Yuumi_TTS/text/english.py
DELETED
@@ -1,188 +0,0 @@
|
|
1 |
-
""" from https://github.com/keithito/tacotron """
|
2 |
-
|
3 |
-
'''
|
4 |
-
Cleaners are transformations that run over the input text at both training and eval time.
|
5 |
-
|
6 |
-
Cleaners can be selected by passing a comma-delimited list of cleaner names as the "cleaners"
|
7 |
-
hyperparameter. Some cleaners are English-specific. You'll typically want to use:
|
8 |
-
1. "english_cleaners" for English text
|
9 |
-
2. "transliteration_cleaners" for non-English text that can be transliterated to ASCII using
|
10 |
-
the Unidecode library (https://pypi.python.org/pypi/Unidecode)
|
11 |
-
3. "basic_cleaners" if you do not want to transliterate (in this case, you should also update
|
12 |
-
the symbols in symbols.py to match your data).
|
13 |
-
'''
|
14 |
-
|
15 |
-
|
16 |
-
# Regular expression matching whitespace:
|
17 |
-
|
18 |
-
|
19 |
-
import re
|
20 |
-
import inflect
|
21 |
-
from unidecode import unidecode
|
22 |
-
import eng_to_ipa as ipa
|
23 |
-
_inflect = inflect.engine()
|
24 |
-
_comma_number_re = re.compile(r'([0-9][0-9\,]+[0-9])')
|
25 |
-
_decimal_number_re = re.compile(r'([0-9]+\.[0-9]+)')
|
26 |
-
_pounds_re = re.compile(r'£([0-9\,]*[0-9]+)')
|
27 |
-
_dollars_re = re.compile(r'\$([0-9\.\,]*[0-9]+)')
|
28 |
-
_ordinal_re = re.compile(r'[0-9]+(st|nd|rd|th)')
|
29 |
-
_number_re = re.compile(r'[0-9]+')
|
30 |
-
|
31 |
-
# List of (regular expression, replacement) pairs for abbreviations:
|
32 |
-
_abbreviations = [(re.compile('\\b%s\\.' % x[0], re.IGNORECASE), x[1]) for x in [
|
33 |
-
('mrs', 'misess'),
|
34 |
-
('mr', 'mister'),
|
35 |
-
('dr', 'doctor'),
|
36 |
-
('st', 'saint'),
|
37 |
-
('co', 'company'),
|
38 |
-
('jr', 'junior'),
|
39 |
-
('maj', 'major'),
|
40 |
-
('gen', 'general'),
|
41 |
-
('drs', 'doctors'),
|
42 |
-
('rev', 'reverend'),
|
43 |
-
('lt', 'lieutenant'),
|
44 |
-
('hon', 'honorable'),
|
45 |
-
('sgt', 'sergeant'),
|
46 |
-
('capt', 'captain'),
|
47 |
-
('esq', 'esquire'),
|
48 |
-
('ltd', 'limited'),
|
49 |
-
('col', 'colonel'),
|
50 |
-
('ft', 'fort'),
|
51 |
-
]]
|
52 |
-
|
53 |
-
|
54 |
-
# List of (ipa, lazy ipa) pairs:
|
55 |
-
_lazy_ipa = [(re.compile('%s' % x[0]), x[1]) for x in [
|
56 |
-
('r', 'ɹ'),
|
57 |
-
('æ', 'e'),
|
58 |
-
('ɑ', 'a'),
|
59 |
-
('ɔ', 'o'),
|
60 |
-
('ð', 'z'),
|
61 |
-
('θ', 's'),
|
62 |
-
('ɛ', 'e'),
|
63 |
-
('ɪ', 'i'),
|
64 |
-
('ʊ', 'u'),
|
65 |
-
('ʒ', 'ʥ'),
|
66 |
-
('ʤ', 'ʥ'),
|
67 |
-
('ˈ', '↓'),
|
68 |
-
]]
|
69 |
-
|
70 |
-
# List of (ipa, lazy ipa2) pairs:
|
71 |
-
_lazy_ipa2 = [(re.compile('%s' % x[0]), x[1]) for x in [
|
72 |
-
('r', 'ɹ'),
|
73 |
-
('ð', 'z'),
|
74 |
-
('θ', 's'),
|
75 |
-
('ʒ', 'ʑ'),
|
76 |
-
('ʤ', 'dʑ'),
|
77 |
-
('ˈ', '↓'),
|
78 |
-
]]
|
79 |
-
|
80 |
-
# List of (ipa, ipa2) pairs
|
81 |
-
_ipa_to_ipa2 = [(re.compile('%s' % x[0]), x[1]) for x in [
|
82 |
-
('r', 'ɹ'),
|
83 |
-
('ʤ', 'dʒ'),
|
84 |
-
('ʧ', 'tʃ')
|
85 |
-
]]
|
86 |
-
|
87 |
-
|
88 |
-
def expand_abbreviations(text):
|
89 |
-
for regex, replacement in _abbreviations:
|
90 |
-
text = re.sub(regex, replacement, text)
|
91 |
-
return text
|
92 |
-
|
93 |
-
|
94 |
-
def collapse_whitespace(text):
|
95 |
-
return re.sub(r'\s+', ' ', text)
|
96 |
-
|
97 |
-
|
98 |
-
def _remove_commas(m):
|
99 |
-
return m.group(1).replace(',', '')
|
100 |
-
|
101 |
-
|
102 |
-
def _expand_decimal_point(m):
|
103 |
-
return m.group(1).replace('.', ' point ')
|
104 |
-
|
105 |
-
|
106 |
-
def _expand_dollars(m):
|
107 |
-
match = m.group(1)
|
108 |
-
parts = match.split('.')
|
109 |
-
if len(parts) > 2:
|
110 |
-
return match + ' dollars' # Unexpected format
|
111 |
-
dollars = int(parts[0]) if parts[0] else 0
|
112 |
-
cents = int(parts[1]) if len(parts) > 1 and parts[1] else 0
|
113 |
-
if dollars and cents:
|
114 |
-
dollar_unit = 'dollar' if dollars == 1 else 'dollars'
|
115 |
-
cent_unit = 'cent' if cents == 1 else 'cents'
|
116 |
-
return '%s %s, %s %s' % (dollars, dollar_unit, cents, cent_unit)
|
117 |
-
elif dollars:
|
118 |
-
dollar_unit = 'dollar' if dollars == 1 else 'dollars'
|
119 |
-
return '%s %s' % (dollars, dollar_unit)
|
120 |
-
elif cents:
|
121 |
-
cent_unit = 'cent' if cents == 1 else 'cents'
|
122 |
-
return '%s %s' % (cents, cent_unit)
|
123 |
-
else:
|
124 |
-
return 'zero dollars'
|
125 |
-
|
126 |
-
|
127 |
-
def _expand_ordinal(m):
|
128 |
-
return _inflect.number_to_words(m.group(0))
|
129 |
-
|
130 |
-
|
131 |
-
def _expand_number(m):
|
132 |
-
num = int(m.group(0))
|
133 |
-
if num > 1000 and num < 3000:
|
134 |
-
if num == 2000:
|
135 |
-
return 'two thousand'
|
136 |
-
elif num > 2000 and num < 2010:
|
137 |
-
return 'two thousand ' + _inflect.number_to_words(num % 100)
|
138 |
-
elif num % 100 == 0:
|
139 |
-
return _inflect.number_to_words(num // 100) + ' hundred'
|
140 |
-
else:
|
141 |
-
return _inflect.number_to_words(num, andword='', zero='oh', group=2).replace(', ', ' ')
|
142 |
-
else:
|
143 |
-
return _inflect.number_to_words(num, andword='')
|
144 |
-
|
145 |
-
|
146 |
-
def normalize_numbers(text):
|
147 |
-
text = re.sub(_comma_number_re, _remove_commas, text)
|
148 |
-
text = re.sub(_pounds_re, r'\1 pounds', text)
|
149 |
-
text = re.sub(_dollars_re, _expand_dollars, text)
|
150 |
-
text = re.sub(_decimal_number_re, _expand_decimal_point, text)
|
151 |
-
text = re.sub(_ordinal_re, _expand_ordinal, text)
|
152 |
-
text = re.sub(_number_re, _expand_number, text)
|
153 |
-
return text
|
154 |
-
|
155 |
-
|
156 |
-
def mark_dark_l(text):
|
157 |
-
return re.sub(r'l([^aeiouæɑɔəɛɪʊ ]*(?: |$))', lambda x: 'ɫ'+x.group(1), text)
|
158 |
-
|
159 |
-
|
160 |
-
def english_to_ipa(text):
|
161 |
-
text = unidecode(text).lower()
|
162 |
-
text = expand_abbreviations(text)
|
163 |
-
text = normalize_numbers(text)
|
164 |
-
phonemes = ipa.convert(text)
|
165 |
-
phonemes = collapse_whitespace(phonemes)
|
166 |
-
return phonemes
|
167 |
-
|
168 |
-
|
169 |
-
def english_to_lazy_ipa(text):
|
170 |
-
text = english_to_ipa(text)
|
171 |
-
for regex, replacement in _lazy_ipa:
|
172 |
-
text = re.sub(regex, replacement, text)
|
173 |
-
return text
|
174 |
-
|
175 |
-
|
176 |
-
def english_to_ipa2(text):
|
177 |
-
text = english_to_ipa(text)
|
178 |
-
text = mark_dark_l(text)
|
179 |
-
for regex, replacement in _ipa_to_ipa2:
|
180 |
-
text = re.sub(regex, replacement, text)
|
181 |
-
return text.replace('...', '…')
|
182 |
-
|
183 |
-
|
184 |
-
def english_to_lazy_ipa2(text):
|
185 |
-
text = english_to_ipa(text)
|
186 |
-
for regex, replacement in _lazy_ipa2:
|
187 |
-
text = re.sub(regex, replacement, text)
|
188 |
-
return text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/AlekseyKorshuk/michellejieli-NSFW_text_classifier/README.md
DELETED
@@ -1,12 +0,0 @@
|
|
1 |
-
---
|
2 |
-
title: Michellejieli-NSFW Text Classifier
|
3 |
-
emoji: 🌍
|
4 |
-
colorFrom: red
|
5 |
-
colorTo: gray
|
6 |
-
sdk: gradio
|
7 |
-
sdk_version: 3.23.0
|
8 |
-
app_file: app.py
|
9 |
-
pinned: false
|
10 |
-
---
|
11 |
-
|
12 |
-
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Androidonnxfork/CivitAi-to-Diffusers/diffusers/docs/source/en/api/pipelines/ddpm.md
DELETED
@@ -1,35 +0,0 @@
|
|
1 |
-
<!--Copyright 2023 The HuggingFace Team. All rights reserved.
|
2 |
-
|
3 |
-
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
4 |
-
the License. You may obtain a copy of the License at
|
5 |
-
|
6 |
-
http://www.apache.org/licenses/LICENSE-2.0
|
7 |
-
|
8 |
-
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
9 |
-
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
10 |
-
specific language governing permissions and limitations under the License.
|
11 |
-
-->
|
12 |
-
|
13 |
-
# DDPM
|
14 |
-
|
15 |
-
[Denoising Diffusion Probabilistic Models](https://huggingface.co/papers/2006.11239) (DDPM) by Jonathan Ho, Ajay Jain and Pieter Abbeel proposes a diffusion based model of the same name. In the 🤗 Diffusers library, DDPM refers to the *discrete denoising scheduler* from the paper as well as the pipeline.
|
16 |
-
|
17 |
-
The abstract from the paper is:
|
18 |
-
|
19 |
-
*We present high quality image synthesis results using diffusion probabilistic models, a class of latent variable models inspired by considerations from nonequilibrium thermodynamics. Our best results are obtained by training on a weighted variational bound designed according to a novel connection between diffusion probabilistic models and denoising score matching with Langevin dynamics, and our models naturally admit a progressive lossy decompression scheme that can be interpreted as a generalization of autoregressive decoding. On the unconditional CIFAR10 dataset, we obtain an Inception score of 9.46 and a state-of-the-art FID score of 3.17. On 256x256 LSUN, we obtain sample quality similar to ProgressiveGAN.*
|
20 |
-
|
21 |
-
The original codebase can be found at [hohonathanho/diffusion](https://github.com/hojonathanho/diffusion).
|
22 |
-
|
23 |
-
<Tip>
|
24 |
-
|
25 |
-
Make sure to check out the Schedulers [guide](/using-diffusers/schedulers) to learn how to explore the tradeoff between scheduler speed and quality, and see the [reuse components across pipelines](/using-diffusers/loading#reuse-components-across-pipelines) section to learn how to efficiently load the same components into multiple pipelines.
|
26 |
-
|
27 |
-
</Tip>
|
28 |
-
|
29 |
-
# DDPMPipeline
|
30 |
-
[[autodoc]] DDPMPipeline
|
31 |
-
- all
|
32 |
-
- __call__
|
33 |
-
|
34 |
-
## ImagePipelineOutput
|
35 |
-
[[autodoc]] pipelines.ImagePipelineOutput
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Androidonnxfork/CivitAi-to-Diffusers/diffusers/src/diffusers/dependency_versions_check.py
DELETED
@@ -1,47 +0,0 @@
|
|
1 |
-
# Copyright 2023 The HuggingFace Team. All rights reserved.
|
2 |
-
#
|
3 |
-
# Licensed under the Apache License, Version 2.0 (the "License");
|
4 |
-
# you may not use this file except in compliance with the License.
|
5 |
-
# You may obtain a copy of the License at
|
6 |
-
#
|
7 |
-
# http://www.apache.org/licenses/LICENSE-2.0
|
8 |
-
#
|
9 |
-
# Unless required by applicable law or agreed to in writing, software
|
10 |
-
# distributed under the License is distributed on an "AS IS" BASIS,
|
11 |
-
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12 |
-
# See the License for the specific language governing permissions and
|
13 |
-
# limitations under the License.
|
14 |
-
import sys
|
15 |
-
|
16 |
-
from .dependency_versions_table import deps
|
17 |
-
from .utils.versions import require_version, require_version_core
|
18 |
-
|
19 |
-
|
20 |
-
# define which module versions we always want to check at run time
|
21 |
-
# (usually the ones defined in `install_requires` in setup.py)
|
22 |
-
#
|
23 |
-
# order specific notes:
|
24 |
-
# - tqdm must be checked before tokenizers
|
25 |
-
|
26 |
-
pkgs_to_check_at_runtime = "python tqdm regex requests packaging filelock numpy tokenizers".split()
|
27 |
-
if sys.version_info < (3, 7):
|
28 |
-
pkgs_to_check_at_runtime.append("dataclasses")
|
29 |
-
if sys.version_info < (3, 8):
|
30 |
-
pkgs_to_check_at_runtime.append("importlib_metadata")
|
31 |
-
|
32 |
-
for pkg in pkgs_to_check_at_runtime:
|
33 |
-
if pkg in deps:
|
34 |
-
if pkg == "tokenizers":
|
35 |
-
# must be loaded here, or else tqdm check may fail
|
36 |
-
from .utils import is_tokenizers_available
|
37 |
-
|
38 |
-
if not is_tokenizers_available():
|
39 |
-
continue # not required, check version only if installed
|
40 |
-
|
41 |
-
require_version_core(deps[pkg])
|
42 |
-
else:
|
43 |
-
raise ValueError(f"can't find {pkg} in {deps.keys()}, check dependency_versions_table.py")
|
44 |
-
|
45 |
-
|
46 |
-
def dep_version_check(pkg, hint=None):
|
47 |
-
require_version(deps[pkg], hint)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Andy1621/uniformer_image_detection/configs/foveabox/fovea_align_r101_fpn_gn-head_mstrain_640-800_4x4_2x_coco.py
DELETED
@@ -1,27 +0,0 @@
|
|
1 |
-
_base_ = './fovea_r50_fpn_4x4_1x_coco.py'
|
2 |
-
model = dict(
|
3 |
-
pretrained='torchvision://resnet101',
|
4 |
-
backbone=dict(depth=101),
|
5 |
-
bbox_head=dict(
|
6 |
-
with_deform=True,
|
7 |
-
norm_cfg=dict(type='GN', num_groups=32, requires_grad=True)))
|
8 |
-
img_norm_cfg = dict(
|
9 |
-
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
|
10 |
-
train_pipeline = [
|
11 |
-
dict(type='LoadImageFromFile'),
|
12 |
-
dict(type='LoadAnnotations', with_bbox=True),
|
13 |
-
dict(
|
14 |
-
type='Resize',
|
15 |
-
img_scale=[(1333, 640), (1333, 800)],
|
16 |
-
multiscale_mode='value',
|
17 |
-
keep_ratio=True),
|
18 |
-
dict(type='RandomFlip', flip_ratio=0.5),
|
19 |
-
dict(type='Normalize', **img_norm_cfg),
|
20 |
-
dict(type='Pad', size_divisor=32),
|
21 |
-
dict(type='DefaultFormatBundle'),
|
22 |
-
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']),
|
23 |
-
]
|
24 |
-
data = dict(train=dict(pipeline=train_pipeline))
|
25 |
-
# learning policy
|
26 |
-
lr_config = dict(step=[16, 22])
|
27 |
-
runner = dict(type='EpochBasedRunner', max_epochs=24)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Andy1621/uniformer_image_detection/configs/fpg/faster_rcnn_r50_fpn_crop640_50e_coco.py
DELETED
@@ -1,68 +0,0 @@
|
|
1 |
-
_base_ = [
|
2 |
-
'../_base_/models/faster_rcnn_r50_fpn.py',
|
3 |
-
'../_base_/datasets/coco_detection.py',
|
4 |
-
'../_base_/schedules/schedule_1x.py', '../_base_/default_runtime.py'
|
5 |
-
]
|
6 |
-
norm_cfg = dict(type='BN', requires_grad=True)
|
7 |
-
model = dict(
|
8 |
-
backbone=dict(norm_cfg=norm_cfg, norm_eval=False),
|
9 |
-
neck=dict(norm_cfg=norm_cfg),
|
10 |
-
roi_head=dict(bbox_head=dict(norm_cfg=norm_cfg)))
|
11 |
-
dataset_type = 'CocoDataset'
|
12 |
-
data_root = 'data/coco/'
|
13 |
-
img_norm_cfg = dict(
|
14 |
-
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
|
15 |
-
train_pipeline = [
|
16 |
-
dict(type='LoadImageFromFile'),
|
17 |
-
dict(type='LoadAnnotations', with_bbox=True, with_mask=True),
|
18 |
-
dict(
|
19 |
-
type='Resize',
|
20 |
-
img_scale=(640, 640),
|
21 |
-
ratio_range=(0.8, 1.2),
|
22 |
-
keep_ratio=True),
|
23 |
-
dict(type='RandomCrop', crop_size=(640, 640)),
|
24 |
-
dict(type='RandomFlip', flip_ratio=0.5),
|
25 |
-
dict(type='Normalize', **img_norm_cfg),
|
26 |
-
dict(type='Pad', size=(640, 640)),
|
27 |
-
dict(type='DefaultFormatBundle'),
|
28 |
-
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels']),
|
29 |
-
]
|
30 |
-
test_pipeline = [
|
31 |
-
dict(type='LoadImageFromFile'),
|
32 |
-
dict(
|
33 |
-
type='MultiScaleFlipAug',
|
34 |
-
img_scale=(640, 640),
|
35 |
-
flip=False,
|
36 |
-
transforms=[
|
37 |
-
dict(type='Resize', keep_ratio=True),
|
38 |
-
dict(type='RandomFlip'),
|
39 |
-
dict(type='Normalize', **img_norm_cfg),
|
40 |
-
dict(type='Pad', size_divisor=64),
|
41 |
-
dict(type='ImageToTensor', keys=['img']),
|
42 |
-
dict(type='Collect', keys=['img']),
|
43 |
-
])
|
44 |
-
]
|
45 |
-
data = dict(
|
46 |
-
samples_per_gpu=8,
|
47 |
-
workers_per_gpu=4,
|
48 |
-
train=dict(pipeline=train_pipeline),
|
49 |
-
val=dict(pipeline=test_pipeline),
|
50 |
-
test=dict(pipeline=test_pipeline))
|
51 |
-
# learning policy
|
52 |
-
optimizer = dict(
|
53 |
-
type='SGD',
|
54 |
-
lr=0.08,
|
55 |
-
momentum=0.9,
|
56 |
-
weight_decay=0.0001,
|
57 |
-
paramwise_cfg=dict(norm_decay_mult=0, bypass_duplicate=True))
|
58 |
-
optimizer_config = dict(grad_clip=None)
|
59 |
-
# learning policy
|
60 |
-
lr_config = dict(
|
61 |
-
policy='step',
|
62 |
-
warmup='linear',
|
63 |
-
warmup_iters=1000,
|
64 |
-
warmup_ratio=0.1,
|
65 |
-
step=[30, 40])
|
66 |
-
# runtime settings
|
67 |
-
runner = dict(max_epochs=50)
|
68 |
-
evaluation = dict(interval=2)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Andy1621/uniformer_image_detection/configs/groie/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_groie_1x_coco.py
DELETED
@@ -1,45 +0,0 @@
|
|
1 |
-
_base_ = '../gcnet/mask_rcnn_r101_fpn_syncbn-backbone_r4_gcb_c3-c5_1x_coco.py'
|
2 |
-
# model settings
|
3 |
-
model = dict(
|
4 |
-
roi_head=dict(
|
5 |
-
bbox_roi_extractor=dict(
|
6 |
-
type='GenericRoIExtractor',
|
7 |
-
aggregation='sum',
|
8 |
-
roi_layer=dict(type='RoIAlign', output_size=7, sampling_ratio=2),
|
9 |
-
out_channels=256,
|
10 |
-
featmap_strides=[4, 8, 16, 32],
|
11 |
-
pre_cfg=dict(
|
12 |
-
type='ConvModule',
|
13 |
-
in_channels=256,
|
14 |
-
out_channels=256,
|
15 |
-
kernel_size=5,
|
16 |
-
padding=2,
|
17 |
-
inplace=False,
|
18 |
-
),
|
19 |
-
post_cfg=dict(
|
20 |
-
type='GeneralizedAttention',
|
21 |
-
in_channels=256,
|
22 |
-
spatial_range=-1,
|
23 |
-
num_heads=6,
|
24 |
-
attention_type='0100',
|
25 |
-
kv_stride=2)),
|
26 |
-
mask_roi_extractor=dict(
|
27 |
-
type='GenericRoIExtractor',
|
28 |
-
roi_layer=dict(type='RoIAlign', output_size=14, sampling_ratio=2),
|
29 |
-
out_channels=256,
|
30 |
-
featmap_strides=[4, 8, 16, 32],
|
31 |
-
pre_cfg=dict(
|
32 |
-
type='ConvModule',
|
33 |
-
in_channels=256,
|
34 |
-
out_channels=256,
|
35 |
-
kernel_size=5,
|
36 |
-
padding=2,
|
37 |
-
inplace=False,
|
38 |
-
),
|
39 |
-
post_cfg=dict(
|
40 |
-
type='GeneralizedAttention',
|
41 |
-
in_channels=256,
|
42 |
-
spatial_range=-1,
|
43 |
-
num_heads=6,
|
44 |
-
attention_type='0100',
|
45 |
-
kv_stride=2))))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Andy1621/uniformer_image_detection/configs/yolact/yolact_r50_1x8_coco.py
DELETED
@@ -1,160 +0,0 @@
|
|
1 |
-
_base_ = '../_base_/default_runtime.py'
|
2 |
-
|
3 |
-
# model settings
|
4 |
-
img_size = 550
|
5 |
-
model = dict(
|
6 |
-
type='YOLACT',
|
7 |
-
pretrained='torchvision://resnet50',
|
8 |
-
backbone=dict(
|
9 |
-
type='ResNet',
|
10 |
-
depth=50,
|
11 |
-
num_stages=4,
|
12 |
-
out_indices=(0, 1, 2, 3),
|
13 |
-
frozen_stages=-1, # do not freeze stem
|
14 |
-
norm_cfg=dict(type='BN', requires_grad=True),
|
15 |
-
norm_eval=False, # update the statistics of bn
|
16 |
-
zero_init_residual=False,
|
17 |
-
style='pytorch'),
|
18 |
-
neck=dict(
|
19 |
-
type='FPN',
|
20 |
-
in_channels=[256, 512, 1024, 2048],
|
21 |
-
out_channels=256,
|
22 |
-
start_level=1,
|
23 |
-
add_extra_convs='on_input',
|
24 |
-
num_outs=5,
|
25 |
-
upsample_cfg=dict(mode='bilinear')),
|
26 |
-
bbox_head=dict(
|
27 |
-
type='YOLACTHead',
|
28 |
-
num_classes=80,
|
29 |
-
in_channels=256,
|
30 |
-
feat_channels=256,
|
31 |
-
anchor_generator=dict(
|
32 |
-
type='AnchorGenerator',
|
33 |
-
octave_base_scale=3,
|
34 |
-
scales_per_octave=1,
|
35 |
-
base_sizes=[8, 16, 32, 64, 128],
|
36 |
-
ratios=[0.5, 1.0, 2.0],
|
37 |
-
strides=[550.0 / x for x in [69, 35, 18, 9, 5]],
|
38 |
-
centers=[(550 * 0.5 / x, 550 * 0.5 / x)
|
39 |
-
for x in [69, 35, 18, 9, 5]]),
|
40 |
-
bbox_coder=dict(
|
41 |
-
type='DeltaXYWHBBoxCoder',
|
42 |
-
target_means=[.0, .0, .0, .0],
|
43 |
-
target_stds=[0.1, 0.1, 0.2, 0.2]),
|
44 |
-
loss_cls=dict(
|
45 |
-
type='CrossEntropyLoss',
|
46 |
-
use_sigmoid=False,
|
47 |
-
reduction='none',
|
48 |
-
loss_weight=1.0),
|
49 |
-
loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.5),
|
50 |
-
num_head_convs=1,
|
51 |
-
num_protos=32,
|
52 |
-
use_ohem=True),
|
53 |
-
mask_head=dict(
|
54 |
-
type='YOLACTProtonet',
|
55 |
-
in_channels=256,
|
56 |
-
num_protos=32,
|
57 |
-
num_classes=80,
|
58 |
-
max_masks_to_train=100,
|
59 |
-
loss_mask_weight=6.125),
|
60 |
-
segm_head=dict(
|
61 |
-
type='YOLACTSegmHead',
|
62 |
-
num_classes=80,
|
63 |
-
in_channels=256,
|
64 |
-
loss_segm=dict(
|
65 |
-
type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0)),
|
66 |
-
# training and testing settings
|
67 |
-
train_cfg=dict(
|
68 |
-
assigner=dict(
|
69 |
-
type='MaxIoUAssigner',
|
70 |
-
pos_iou_thr=0.5,
|
71 |
-
neg_iou_thr=0.4,
|
72 |
-
min_pos_iou=0.,
|
73 |
-
ignore_iof_thr=-1,
|
74 |
-
gt_max_assign_all=False),
|
75 |
-
# smoothl1_beta=1.,
|
76 |
-
allowed_border=-1,
|
77 |
-
pos_weight=-1,
|
78 |
-
neg_pos_ratio=3,
|
79 |
-
debug=False),
|
80 |
-
test_cfg=dict(
|
81 |
-
nms_pre=1000,
|
82 |
-
min_bbox_size=0,
|
83 |
-
score_thr=0.05,
|
84 |
-
iou_thr=0.5,
|
85 |
-
top_k=200,
|
86 |
-
max_per_img=100))
|
87 |
-
# dataset settings
|
88 |
-
dataset_type = 'CocoDataset'
|
89 |
-
data_root = 'data/coco/'
|
90 |
-
img_norm_cfg = dict(
|
91 |
-
mean=[123.68, 116.78, 103.94], std=[58.40, 57.12, 57.38], to_rgb=True)
|
92 |
-
train_pipeline = [
|
93 |
-
dict(type='LoadImageFromFile', to_float32=True),
|
94 |
-
dict(type='LoadAnnotations', with_bbox=True, with_mask=True),
|
95 |
-
dict(type='FilterAnnotations', min_gt_bbox_wh=(4.0, 4.0)),
|
96 |
-
dict(
|
97 |
-
type='PhotoMetricDistortion',
|
98 |
-
brightness_delta=32,
|
99 |
-
contrast_range=(0.5, 1.5),
|
100 |
-
saturation_range=(0.5, 1.5),
|
101 |
-
hue_delta=18),
|
102 |
-
dict(
|
103 |
-
type='Expand',
|
104 |
-
mean=img_norm_cfg['mean'],
|
105 |
-
to_rgb=img_norm_cfg['to_rgb'],
|
106 |
-
ratio_range=(1, 4)),
|
107 |
-
dict(
|
108 |
-
type='MinIoURandomCrop',
|
109 |
-
min_ious=(0.1, 0.3, 0.5, 0.7, 0.9),
|
110 |
-
min_crop_size=0.3),
|
111 |
-
dict(type='Resize', img_scale=(img_size, img_size), keep_ratio=False),
|
112 |
-
dict(type='Normalize', **img_norm_cfg),
|
113 |
-
dict(type='RandomFlip', flip_ratio=0.5),
|
114 |
-
dict(type='DefaultFormatBundle'),
|
115 |
-
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']),
|
116 |
-
]
|
117 |
-
test_pipeline = [
|
118 |
-
dict(type='LoadImageFromFile'),
|
119 |
-
dict(
|
120 |
-
type='MultiScaleFlipAug',
|
121 |
-
img_scale=(img_size, img_size),
|
122 |
-
flip=False,
|
123 |
-
transforms=[
|
124 |
-
dict(type='Resize', keep_ratio=False),
|
125 |
-
dict(type='Normalize', **img_norm_cfg),
|
126 |
-
dict(type='ImageToTensor', keys=['img']),
|
127 |
-
dict(type='Collect', keys=['img']),
|
128 |
-
])
|
129 |
-
]
|
130 |
-
data = dict(
|
131 |
-
samples_per_gpu=8,
|
132 |
-
workers_per_gpu=4,
|
133 |
-
train=dict(
|
134 |
-
type=dataset_type,
|
135 |
-
ann_file=data_root + 'annotations/instances_train2017.json',
|
136 |
-
img_prefix=data_root + 'train2017/',
|
137 |
-
pipeline=train_pipeline),
|
138 |
-
val=dict(
|
139 |
-
type=dataset_type,
|
140 |
-
ann_file=data_root + 'annotations/instances_val2017.json',
|
141 |
-
img_prefix=data_root + 'val2017/',
|
142 |
-
pipeline=test_pipeline),
|
143 |
-
test=dict(
|
144 |
-
type=dataset_type,
|
145 |
-
ann_file=data_root + 'annotations/instances_val2017.json',
|
146 |
-
img_prefix=data_root + 'val2017/',
|
147 |
-
pipeline=test_pipeline))
|
148 |
-
# optimizer
|
149 |
-
optimizer = dict(type='SGD', lr=1e-3, momentum=0.9, weight_decay=5e-4)
|
150 |
-
optimizer_config = dict()
|
151 |
-
# learning policy
|
152 |
-
lr_config = dict(
|
153 |
-
policy='step',
|
154 |
-
warmup='linear',
|
155 |
-
warmup_iters=500,
|
156 |
-
warmup_ratio=0.1,
|
157 |
-
step=[20, 42, 49, 52])
|
158 |
-
runner = dict(type='EpochBasedRunner', max_epochs=55)
|
159 |
-
cudnn_benchmark = True
|
160 |
-
evaluation = dict(metric=['bbox', 'segm'])
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Andy1621/uniformer_image_segmentation/configs/deeplabv3plus/deeplabv3plus_r101-d8_480x480_40k_pascal_context.py
DELETED
@@ -1,2 +0,0 @@
|
|
1 |
-
_base_ = './deeplabv3plus_r50-d8_480x480_40k_pascal_context.py'
|
2 |
-
model = dict(pretrained='open-mmlab://resnet101_v1c', backbone=dict(depth=101))
|
|
|
|
|
|
spaces/Andy1621/uniformer_image_segmentation/configs/psanet/psanet_r101-d8_769x769_80k_cityscapes.py
DELETED
@@ -1,2 +0,0 @@
|
|
1 |
-
_base_ = './psanet_r50-d8_769x769_80k_cityscapes.py'
|
2 |
-
model = dict(pretrained='open-mmlab://resnet101_v1c', backbone=dict(depth=101))
|
|
|
|
|
|
spaces/Andy1621/uniformer_image_segmentation/configs/psanet/psanet_r50-d8_512x1024_40k_cityscapes.py
DELETED
@@ -1,4 +0,0 @@
|
|
1 |
-
_base_ = [
|
2 |
-
'../_base_/models/psanet_r50-d8.py', '../_base_/datasets/cityscapes.py',
|
3 |
-
'../_base_/default_runtime.py', '../_base_/schedules/schedule_40k.py'
|
4 |
-
]
|
|
|
|
|
|
|
|
|
|
spaces/Andy1621/uniformer_image_segmentation/configs/pspnet/pspnet_r50-d8_480x480_40k_pascal_context_59.py
DELETED
@@ -1,10 +0,0 @@
|
|
1 |
-
_base_ = [
|
2 |
-
'../_base_/models/pspnet_r50-d8.py',
|
3 |
-
'../_base_/datasets/pascal_context_59.py', '../_base_/default_runtime.py',
|
4 |
-
'../_base_/schedules/schedule_40k.py'
|
5 |
-
]
|
6 |
-
model = dict(
|
7 |
-
decode_head=dict(num_classes=59),
|
8 |
-
auxiliary_head=dict(num_classes=59),
|
9 |
-
test_cfg=dict(mode='slide', crop_size=(480, 480), stride=(320, 320)))
|
10 |
-
optimizer = dict(type='SGD', lr=0.004, momentum=0.9, weight_decay=0.0001)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/AnishKumbhar/ChatBot/text-generation-webui-main/extensions/multimodal/multimodal_embedder.py
DELETED
@@ -1,178 +0,0 @@
|
|
1 |
-
import base64
|
2 |
-
import re
|
3 |
-
from dataclasses import dataclass
|
4 |
-
from io import BytesIO
|
5 |
-
from typing import Any, List, Optional
|
6 |
-
|
7 |
-
import torch
|
8 |
-
from PIL import Image
|
9 |
-
|
10 |
-
from extensions.multimodal.pipeline_loader import load_pipeline
|
11 |
-
from modules import shared
|
12 |
-
from modules.logging_colors import logger
|
13 |
-
from modules.text_generation import encode, get_max_prompt_length
|
14 |
-
|
15 |
-
|
16 |
-
@dataclass
|
17 |
-
class PromptPart:
|
18 |
-
text: str
|
19 |
-
image: Optional[Image.Image] = None
|
20 |
-
is_image: bool = False
|
21 |
-
input_ids: Optional[torch.Tensor] = None
|
22 |
-
embedding: Optional[torch.Tensor] = None
|
23 |
-
|
24 |
-
|
25 |
-
class MultimodalEmbedder:
|
26 |
-
def __init__(self, params: dict):
|
27 |
-
pipeline, source = load_pipeline(params)
|
28 |
-
self.pipeline = pipeline
|
29 |
-
logger.info(f'Multimodal: loaded pipeline {self.pipeline.name()} from pipelines/{source} ({self.pipeline.__class__.__name__})')
|
30 |
-
|
31 |
-
def _split_prompt(self, prompt: str, load_images: bool = False) -> List[PromptPart]:
|
32 |
-
"""Splits a prompt into a list of `PromptParts` to separate image data from text.
|
33 |
-
It will also append `image_start` and `image_end` before and after the image, and optionally parse and load the images,
|
34 |
-
if `load_images` is `True`.
|
35 |
-
"""
|
36 |
-
parts: List[PromptPart] = []
|
37 |
-
curr = 0
|
38 |
-
while True:
|
39 |
-
match = re.search(r'<img src="data:image/jpeg;base64,([A-Za-z0-9+/=]+)">', prompt[curr:])
|
40 |
-
if match is None:
|
41 |
-
# no more image tokens, append the rest of the prompt
|
42 |
-
if curr > 0:
|
43 |
-
# add image end token after last image
|
44 |
-
parts.append(PromptPart(text=self.pipeline.image_end() + prompt[curr:]))
|
45 |
-
else:
|
46 |
-
parts.append(PromptPart(text=prompt))
|
47 |
-
break
|
48 |
-
# found an image, append image start token to the text
|
49 |
-
if match.start() > 0:
|
50 |
-
parts.append(PromptPart(text=prompt[curr:curr + match.start()] + self.pipeline.image_start()))
|
51 |
-
else:
|
52 |
-
parts.append(PromptPart(text=self.pipeline.image_start()))
|
53 |
-
# append the image
|
54 |
-
parts.append(PromptPart(
|
55 |
-
text=match.group(0),
|
56 |
-
image=Image.open(BytesIO(base64.b64decode(match.group(1)))) if load_images else None,
|
57 |
-
is_image=True
|
58 |
-
))
|
59 |
-
curr += match.end()
|
60 |
-
return parts
|
61 |
-
|
62 |
-
def _len_in_tokens_prompt_parts(self, parts: List[PromptPart]) -> int:
|
63 |
-
"""Total length in tokens of all `parts`"""
|
64 |
-
tokens = 0
|
65 |
-
for part in parts:
|
66 |
-
if part.is_image:
|
67 |
-
tokens += self.pipeline.num_image_embeds()
|
68 |
-
elif part.input_ids is not None:
|
69 |
-
tokens += len(part.input_ids)
|
70 |
-
else:
|
71 |
-
tokens += len(encode(part.text)[0])
|
72 |
-
return tokens
|
73 |
-
|
74 |
-
def len_in_tokens(self, prompt: str) -> int:
|
75 |
-
"""Total length in tokens for a given text `prompt`"""
|
76 |
-
parts = self._split_prompt(prompt, False)
|
77 |
-
return self._len_in_tokens_prompt_parts(parts)
|
78 |
-
|
79 |
-
def _encode_single_text(self, part: PromptPart, add_bos_token: bool) -> PromptPart:
|
80 |
-
"""Encode a single prompt `part` to `input_ids`. Returns a `PromptPart`"""
|
81 |
-
if part.is_image:
|
82 |
-
placeholders = torch.ones((self.pipeline.num_image_embeds())) * self.pipeline.placeholder_token_id()
|
83 |
-
part.input_ids = placeholders.to(shared.model.device, dtype=torch.int64)
|
84 |
-
else:
|
85 |
-
part.input_ids = encode(part.text, add_bos_token=add_bos_token)[0].to(shared.model.device, dtype=torch.int64)
|
86 |
-
return part
|
87 |
-
|
88 |
-
@staticmethod
|
89 |
-
def _num_images(parts: List[PromptPart]) -> int:
|
90 |
-
count = 0
|
91 |
-
for part in parts:
|
92 |
-
if part.is_image:
|
93 |
-
count += 1
|
94 |
-
return count
|
95 |
-
|
96 |
-
def _encode_text(self, state, parts: List[PromptPart]) -> List[PromptPart]:
|
97 |
-
"""Encode text to token_ids, also truncate the prompt, if necessary.
|
98 |
-
|
99 |
-
The chat/instruct mode should make prompts that fit in get_max_prompt_length, but if max_new_tokens are set
|
100 |
-
such that the context + min_rows don't fit, we can get a prompt which is too long.
|
101 |
-
We can't truncate image embeddings, as it leads to broken generation, so remove the images instead and warn the user
|
102 |
-
"""
|
103 |
-
encoded: List[PromptPart] = []
|
104 |
-
for i, part in enumerate(parts):
|
105 |
-
encoded.append(self._encode_single_text(part, i == 0 and state['add_bos_token']))
|
106 |
-
|
107 |
-
# truncation:
|
108 |
-
max_len = get_max_prompt_length(state)
|
109 |
-
removed_images = 0
|
110 |
-
|
111 |
-
# 1. remove entire text/image blocks
|
112 |
-
while self._len_in_tokens_prompt_parts(encoded[1:]) > max_len:
|
113 |
-
if encoded[0].is_image:
|
114 |
-
removed_images += 1
|
115 |
-
encoded = encoded[1:]
|
116 |
-
|
117 |
-
# 2. check if the last prompt part doesn't need to get truncated
|
118 |
-
if self._len_in_tokens_prompt_parts(encoded) > max_len:
|
119 |
-
if encoded[0].is_image:
|
120 |
-
# don't truncate image embeddings, just remove the image, otherwise generation will be broken
|
121 |
-
removed_images += 1
|
122 |
-
encoded = encoded[1:]
|
123 |
-
elif len(encoded) > 1 and encoded[0].text.endswith(self.pipeline.image_start()):
|
124 |
-
# see if we can keep image_start token
|
125 |
-
len_image_start = len(encode(self.pipeline.image_start(), add_bos_token=state['add_bos_token'])[0])
|
126 |
-
if self._len_in_tokens_prompt_parts(encoded[1:]) + len_image_start > max_len:
|
127 |
-
# we can't -> remove this text, and the image
|
128 |
-
encoded = encoded[2:]
|
129 |
-
removed_images += 1
|
130 |
-
else:
|
131 |
-
# we can -> just truncate the text
|
132 |
-
trunc_len = self._len_in_tokens_prompt_parts(encoded) - max_len
|
133 |
-
encoded[0].input_ids = encoded[0].input_ids[trunc_len:]
|
134 |
-
elif len(encoded) > 0:
|
135 |
-
# only one text left, truncate it normally
|
136 |
-
trunc_len = self._len_in_tokens_prompt_parts(encoded) - max_len
|
137 |
-
encoded[0].input_ids = encoded[0].input_ids[trunc_len:]
|
138 |
-
|
139 |
-
# notify user if we truncated an image
|
140 |
-
if removed_images > 0:
|
141 |
-
logger.warning(f"Multimodal: removed {removed_images} image(s) from prompt. Try decreasing max_new_tokens if generation is broken")
|
142 |
-
|
143 |
-
return encoded
|
144 |
-
|
145 |
-
def _embed(self, parts: List[PromptPart]) -> List[PromptPart]:
|
146 |
-
# batch images
|
147 |
-
image_indicies = [i for i, part in enumerate(parts) if part.is_image]
|
148 |
-
embedded = self.pipeline.embed_images([parts[i].image for i in image_indicies])
|
149 |
-
for i, embeds in zip(image_indicies, embedded):
|
150 |
-
parts[i].embedding = embeds
|
151 |
-
# embed text
|
152 |
-
for (i, part) in enumerate(parts):
|
153 |
-
if not part.is_image:
|
154 |
-
parts[i].embedding = self.pipeline.embed_tokens(part.input_ids)
|
155 |
-
return parts
|
156 |
-
|
157 |
-
def _remove_old_images(self, parts: List[PromptPart], params: dict) -> List[PromptPart]:
|
158 |
-
if params['add_all_images_to_prompt']:
|
159 |
-
return parts
|
160 |
-
already_added = False
|
161 |
-
for i, part in reversed(list(enumerate(parts))):
|
162 |
-
if part.is_image:
|
163 |
-
if already_added:
|
164 |
-
parts[i].embedding = self.pipeline.placeholder_embeddings()
|
165 |
-
else:
|
166 |
-
already_added = True
|
167 |
-
return parts
|
168 |
-
|
169 |
-
def forward(self, prompt: str, state: Any, params: dict):
|
170 |
-
prompt_parts = self._split_prompt(prompt, True)
|
171 |
-
prompt_parts = self._encode_text(state, prompt_parts)
|
172 |
-
prompt_parts = self._embed(prompt_parts)
|
173 |
-
prompt_parts = self._remove_old_images(prompt_parts, params)
|
174 |
-
embeds = tuple(part.embedding for part in prompt_parts)
|
175 |
-
ids = tuple(part.input_ids for part in prompt_parts)
|
176 |
-
input_embeds = torch.cat(embeds, dim=0)
|
177 |
-
input_ids = torch.cat(ids, dim=0)
|
178 |
-
return prompt, input_ids, input_embeds, self._num_images(prompt_parts)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Anonymous-sub/Rerender/gmflow_module/gmflow/__init__.py
DELETED
File without changes
|
spaces/Arnx/MusicGenXvAKN/audiocraft/quantization/__init__.py
DELETED
@@ -1,9 +0,0 @@
|
|
1 |
-
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
2 |
-
# All rights reserved.
|
3 |
-
#
|
4 |
-
# This source code is licensed under the license found in the
|
5 |
-
# LICENSE file in the root directory of this source tree.
|
6 |
-
|
7 |
-
# flake8: noqa
|
8 |
-
from .vq import ResidualVectorQuantizer
|
9 |
-
from .base import BaseQuantizer, DummyQuantizer, QuantizedResult
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Arnx/MusicGenXvAKN/tests/models/test_musicgen.py
DELETED
@@ -1,58 +0,0 @@
|
|
1 |
-
# Copyright (c) Meta Platforms, Inc. and affiliates.
|
2 |
-
# All rights reserved.
|
3 |
-
#
|
4 |
-
# This source code is licensed under the license found in the
|
5 |
-
# LICENSE file in the root directory of this source tree.
|
6 |
-
|
7 |
-
import pytest
|
8 |
-
import torch
|
9 |
-
|
10 |
-
from audiocraft.models import MusicGen
|
11 |
-
|
12 |
-
|
13 |
-
class TestSEANetModel:
|
14 |
-
def get_musicgen(self):
|
15 |
-
mg = MusicGen.get_pretrained(name='debug', device='cpu')
|
16 |
-
mg.set_generation_params(duration=2.0, extend_stride=2.)
|
17 |
-
return mg
|
18 |
-
|
19 |
-
def test_base(self):
|
20 |
-
mg = self.get_musicgen()
|
21 |
-
assert mg.frame_rate == 25
|
22 |
-
assert mg.sample_rate == 32000
|
23 |
-
assert mg.audio_channels == 1
|
24 |
-
|
25 |
-
def test_generate_unconditional(self):
|
26 |
-
mg = self.get_musicgen()
|
27 |
-
wav = mg.generate_unconditional(3)
|
28 |
-
assert list(wav.shape) == [3, 1, 64000]
|
29 |
-
|
30 |
-
def test_generate_continuation(self):
|
31 |
-
mg = self.get_musicgen()
|
32 |
-
prompt = torch.randn(3, 1, 32000)
|
33 |
-
wav = mg.generate_continuation(prompt, 32000)
|
34 |
-
assert list(wav.shape) == [3, 1, 64000]
|
35 |
-
|
36 |
-
prompt = torch.randn(2, 1, 32000)
|
37 |
-
wav = mg.generate_continuation(
|
38 |
-
prompt, 32000, ['youpi', 'lapin dort'])
|
39 |
-
assert list(wav.shape) == [2, 1, 64000]
|
40 |
-
|
41 |
-
prompt = torch.randn(2, 1, 32000)
|
42 |
-
with pytest.raises(AssertionError):
|
43 |
-
wav = mg.generate_continuation(
|
44 |
-
prompt, 32000, ['youpi', 'lapin dort', 'one too many'])
|
45 |
-
|
46 |
-
def test_generate(self):
|
47 |
-
mg = self.get_musicgen()
|
48 |
-
wav = mg.generate(
|
49 |
-
['youpi', 'lapin dort'])
|
50 |
-
assert list(wav.shape) == [2, 1, 64000]
|
51 |
-
|
52 |
-
def test_generate_long(self):
|
53 |
-
mg = self.get_musicgen()
|
54 |
-
mg.max_duration = 3.
|
55 |
-
mg.set_generation_params(duration=4., extend_stride=2.)
|
56 |
-
wav = mg.generate(
|
57 |
-
['youpi', 'lapin dort'])
|
58 |
-
assert list(wav.shape) == [2, 1, 32000 * 4]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Artrajz/vits-simple-api/bert_vits2/text/bert_handler.py
DELETED
@@ -1,33 +0,0 @@
|
|
1 |
-
import importlib
|
2 |
-
|
3 |
-
|
4 |
-
class BertHandler:
|
5 |
-
_bert_functions = {}
|
6 |
-
|
7 |
-
BERT_IMPORT_MAP = {
|
8 |
-
"zh": "bert_vits2.text.chinese_bert.get_bert_feature",
|
9 |
-
"en": "bert_vits2.text.english_bert_mock.get_bert_feature",
|
10 |
-
"ja": "bert_vits2.text.japanese_bert.get_bert_feature",
|
11 |
-
}
|
12 |
-
|
13 |
-
def __init__(self, languages):
|
14 |
-
for lang in languages:
|
15 |
-
if lang not in BertHandler._bert_functions:
|
16 |
-
self.load_bert_function(lang)
|
17 |
-
|
18 |
-
def load_bert_function(self, language):
|
19 |
-
if language not in BertHandler.BERT_IMPORT_MAP:
|
20 |
-
raise ValueError(f"Unsupported language: {language}")
|
21 |
-
|
22 |
-
module_path, function_name = BertHandler.BERT_IMPORT_MAP[language].rsplit('.', 1)
|
23 |
-
module = importlib.import_module(module_path, package=__package__)
|
24 |
-
bert_function = getattr(module, function_name)
|
25 |
-
|
26 |
-
BertHandler._bert_functions[language] = bert_function
|
27 |
-
|
28 |
-
def get_bert(self, norm_text, word2ph, language):
|
29 |
-
if language not in BertHandler._bert_functions:
|
30 |
-
raise ValueError(f"BERT for {language} has not been initialized. Please initialize first.")
|
31 |
-
|
32 |
-
bert_func = BertHandler._bert_functions[language]
|
33 |
-
return bert_func(norm_text, word2ph)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Atualli/yoloxTeste/configs/yolox_m.py
DELETED
@@ -1,15 +0,0 @@
|
|
1 |
-
#!/usr/bin/env python3
|
2 |
-
# -*- coding:utf-8 -*-
|
3 |
-
# Copyright (c) Megvii, Inc. and its affiliates.
|
4 |
-
|
5 |
-
import os
|
6 |
-
|
7 |
-
from yolox.exp import Exp as MyExp
|
8 |
-
|
9 |
-
|
10 |
-
class Exp(MyExp):
|
11 |
-
def __init__(self):
|
12 |
-
super(Exp, self).__init__()
|
13 |
-
self.depth = 0.67
|
14 |
-
self.width = 0.75
|
15 |
-
self.exp_name = os.path.split(os.path.realpath(__file__))[1].split(".")[0]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Ayushnangia/Whispercpp_yt/README.md
DELETED
@@ -1,13 +0,0 @@
|
|
1 |
-
---
|
2 |
-
title: Whispercpp Yt
|
3 |
-
emoji: 🐠
|
4 |
-
colorFrom: pink
|
5 |
-
colorTo: yellow
|
6 |
-
sdk: gradio
|
7 |
-
sdk_version: 3.40.1
|
8 |
-
app_file: app.py
|
9 |
-
pinned: false
|
10 |
-
license: apache-2.0
|
11 |
-
---
|
12 |
-
|
13 |
-
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Banbri/zcvzcv/src/components/ui/separator.tsx
DELETED
@@ -1,31 +0,0 @@
|
|
1 |
-
"use client"
|
2 |
-
|
3 |
-
import * as React from "react"
|
4 |
-
import * as SeparatorPrimitive from "@radix-ui/react-separator"
|
5 |
-
|
6 |
-
import { cn } from "@/lib/utils"
|
7 |
-
|
8 |
-
const Separator = React.forwardRef<
|
9 |
-
React.ElementRef<typeof SeparatorPrimitive.Root>,
|
10 |
-
React.ComponentPropsWithoutRef<typeof SeparatorPrimitive.Root>
|
11 |
-
>(
|
12 |
-
(
|
13 |
-
{ className, orientation = "horizontal", decorative = true, ...props },
|
14 |
-
ref
|
15 |
-
) => (
|
16 |
-
<SeparatorPrimitive.Root
|
17 |
-
ref={ref}
|
18 |
-
decorative={decorative}
|
19 |
-
orientation={orientation}
|
20 |
-
className={cn(
|
21 |
-
"shrink-0 bg-stone-200 dark:bg-stone-800",
|
22 |
-
orientation === "horizontal" ? "h-[1px] w-full" : "h-full w-[1px]",
|
23 |
-
className
|
24 |
-
)}
|
25 |
-
{...props}
|
26 |
-
/>
|
27 |
-
)
|
28 |
-
)
|
29 |
-
Separator.displayName = SeparatorPrimitive.Root.displayName
|
30 |
-
|
31 |
-
export { Separator }
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Bart92/RVC_HF/lib/uvr5_pack/lib_v5/nets_537227KB.py
DELETED
@@ -1,123 +0,0 @@
|
|
1 |
-
import torch
|
2 |
-
import numpy as np
|
3 |
-
from torch import nn
|
4 |
-
import torch.nn.functional as F
|
5 |
-
|
6 |
-
from . import layers_537238KB as layers
|
7 |
-
|
8 |
-
|
9 |
-
class BaseASPPNet(nn.Module):
|
10 |
-
def __init__(self, nin, ch, dilations=(4, 8, 16)):
|
11 |
-
super(BaseASPPNet, self).__init__()
|
12 |
-
self.enc1 = layers.Encoder(nin, ch, 3, 2, 1)
|
13 |
-
self.enc2 = layers.Encoder(ch, ch * 2, 3, 2, 1)
|
14 |
-
self.enc3 = layers.Encoder(ch * 2, ch * 4, 3, 2, 1)
|
15 |
-
self.enc4 = layers.Encoder(ch * 4, ch * 8, 3, 2, 1)
|
16 |
-
|
17 |
-
self.aspp = layers.ASPPModule(ch * 8, ch * 16, dilations)
|
18 |
-
|
19 |
-
self.dec4 = layers.Decoder(ch * (8 + 16), ch * 8, 3, 1, 1)
|
20 |
-
self.dec3 = layers.Decoder(ch * (4 + 8), ch * 4, 3, 1, 1)
|
21 |
-
self.dec2 = layers.Decoder(ch * (2 + 4), ch * 2, 3, 1, 1)
|
22 |
-
self.dec1 = layers.Decoder(ch * (1 + 2), ch, 3, 1, 1)
|
23 |
-
|
24 |
-
def __call__(self, x):
|
25 |
-
h, e1 = self.enc1(x)
|
26 |
-
h, e2 = self.enc2(h)
|
27 |
-
h, e3 = self.enc3(h)
|
28 |
-
h, e4 = self.enc4(h)
|
29 |
-
|
30 |
-
h = self.aspp(h)
|
31 |
-
|
32 |
-
h = self.dec4(h, e4)
|
33 |
-
h = self.dec3(h, e3)
|
34 |
-
h = self.dec2(h, e2)
|
35 |
-
h = self.dec1(h, e1)
|
36 |
-
|
37 |
-
return h
|
38 |
-
|
39 |
-
|
40 |
-
class CascadedASPPNet(nn.Module):
|
41 |
-
def __init__(self, n_fft):
|
42 |
-
super(CascadedASPPNet, self).__init__()
|
43 |
-
self.stg1_low_band_net = BaseASPPNet(2, 64)
|
44 |
-
self.stg1_high_band_net = BaseASPPNet(2, 64)
|
45 |
-
|
46 |
-
self.stg2_bridge = layers.Conv2DBNActiv(66, 32, 1, 1, 0)
|
47 |
-
self.stg2_full_band_net = BaseASPPNet(32, 64)
|
48 |
-
|
49 |
-
self.stg3_bridge = layers.Conv2DBNActiv(130, 64, 1, 1, 0)
|
50 |
-
self.stg3_full_band_net = BaseASPPNet(64, 128)
|
51 |
-
|
52 |
-
self.out = nn.Conv2d(128, 2, 1, bias=False)
|
53 |
-
self.aux1_out = nn.Conv2d(64, 2, 1, bias=False)
|
54 |
-
self.aux2_out = nn.Conv2d(64, 2, 1, bias=False)
|
55 |
-
|
56 |
-
self.max_bin = n_fft // 2
|
57 |
-
self.output_bin = n_fft // 2 + 1
|
58 |
-
|
59 |
-
self.offset = 128
|
60 |
-
|
61 |
-
def forward(self, x, aggressiveness=None):
|
62 |
-
mix = x.detach()
|
63 |
-
x = x.clone()
|
64 |
-
|
65 |
-
x = x[:, :, : self.max_bin]
|
66 |
-
|
67 |
-
bandw = x.size()[2] // 2
|
68 |
-
aux1 = torch.cat(
|
69 |
-
[
|
70 |
-
self.stg1_low_band_net(x[:, :, :bandw]),
|
71 |
-
self.stg1_high_band_net(x[:, :, bandw:]),
|
72 |
-
],
|
73 |
-
dim=2,
|
74 |
-
)
|
75 |
-
|
76 |
-
h = torch.cat([x, aux1], dim=1)
|
77 |
-
aux2 = self.stg2_full_band_net(self.stg2_bridge(h))
|
78 |
-
|
79 |
-
h = torch.cat([x, aux1, aux2], dim=1)
|
80 |
-
h = self.stg3_full_band_net(self.stg3_bridge(h))
|
81 |
-
|
82 |
-
mask = torch.sigmoid(self.out(h))
|
83 |
-
mask = F.pad(
|
84 |
-
input=mask,
|
85 |
-
pad=(0, 0, 0, self.output_bin - mask.size()[2]),
|
86 |
-
mode="replicate",
|
87 |
-
)
|
88 |
-
|
89 |
-
if self.training:
|
90 |
-
aux1 = torch.sigmoid(self.aux1_out(aux1))
|
91 |
-
aux1 = F.pad(
|
92 |
-
input=aux1,
|
93 |
-
pad=(0, 0, 0, self.output_bin - aux1.size()[2]),
|
94 |
-
mode="replicate",
|
95 |
-
)
|
96 |
-
aux2 = torch.sigmoid(self.aux2_out(aux2))
|
97 |
-
aux2 = F.pad(
|
98 |
-
input=aux2,
|
99 |
-
pad=(0, 0, 0, self.output_bin - aux2.size()[2]),
|
100 |
-
mode="replicate",
|
101 |
-
)
|
102 |
-
return mask * mix, aux1 * mix, aux2 * mix
|
103 |
-
else:
|
104 |
-
if aggressiveness:
|
105 |
-
mask[:, :, : aggressiveness["split_bin"]] = torch.pow(
|
106 |
-
mask[:, :, : aggressiveness["split_bin"]],
|
107 |
-
1 + aggressiveness["value"] / 3,
|
108 |
-
)
|
109 |
-
mask[:, :, aggressiveness["split_bin"] :] = torch.pow(
|
110 |
-
mask[:, :, aggressiveness["split_bin"] :],
|
111 |
-
1 + aggressiveness["value"],
|
112 |
-
)
|
113 |
-
|
114 |
-
return mask * mix
|
115 |
-
|
116 |
-
def predict(self, x_mag, aggressiveness=None):
|
117 |
-
h = self.forward(x_mag, aggressiveness)
|
118 |
-
|
119 |
-
if self.offset > 0:
|
120 |
-
h = h[:, :, :, self.offset : -self.offset]
|
121 |
-
assert h.size()[3] > 0
|
122 |
-
|
123 |
-
return h
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/BernardoOlisan/vqganclip/CLIP/data/yfcc100m.md
DELETED
@@ -1,14 +0,0 @@
|
|
1 |
-
# The YFCC100M Subset
|
2 |
-
|
3 |
-
In the paper, we performed a dataset ablation using a subset of the YFCC100M dataset and showed that the performance remained largely similar.
|
4 |
-
|
5 |
-
The subset contains 14,829,396 images, about 15% of the full dataset, which have been filtered to only keep those with natural languag titles and/or descriptions in English.
|
6 |
-
|
7 |
-
We provide the list of (line number, photo identifier, photo hash) of each image contained in this subset. These correspond to the first three columns in the dataset's metadata TSV file.
|
8 |
-
|
9 |
-
```
|
10 |
-
wget https://openaipublic.azureedge.net/clip/data/yfcc100m_subset_data.tsv.bz2
|
11 |
-
bunzip2 yfcc100m_subset_data.tsv.bz2
|
12 |
-
```
|
13 |
-
|
14 |
-
Use of the underlying media files is subject to the Creative Commons licenses chosen by their creators/uploaders. For more information about the YFCC100M dataset, visit [the official website](https://multimediacommons.wordpress.com/yfcc100m-core-dataset/).
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Big-Web/MMSD/env/Lib/site-packages/pip/_internal/locations/base.py
DELETED
@@ -1,81 +0,0 @@
|
|
1 |
-
import functools
|
2 |
-
import os
|
3 |
-
import site
|
4 |
-
import sys
|
5 |
-
import sysconfig
|
6 |
-
import typing
|
7 |
-
|
8 |
-
from pip._internal.exceptions import InstallationError
|
9 |
-
from pip._internal.utils import appdirs
|
10 |
-
from pip._internal.utils.virtualenv import running_under_virtualenv
|
11 |
-
|
12 |
-
# Application Directories
|
13 |
-
USER_CACHE_DIR = appdirs.user_cache_dir("pip")
|
14 |
-
|
15 |
-
# FIXME doesn't account for venv linked to global site-packages
|
16 |
-
site_packages: str = sysconfig.get_path("purelib")
|
17 |
-
|
18 |
-
|
19 |
-
def get_major_minor_version() -> str:
|
20 |
-
"""
|
21 |
-
Return the major-minor version of the current Python as a string, e.g.
|
22 |
-
"3.7" or "3.10".
|
23 |
-
"""
|
24 |
-
return "{}.{}".format(*sys.version_info)
|
25 |
-
|
26 |
-
|
27 |
-
def change_root(new_root: str, pathname: str) -> str:
|
28 |
-
"""Return 'pathname' with 'new_root' prepended.
|
29 |
-
|
30 |
-
If 'pathname' is relative, this is equivalent to os.path.join(new_root, pathname).
|
31 |
-
Otherwise, it requires making 'pathname' relative and then joining the
|
32 |
-
two, which is tricky on DOS/Windows and Mac OS.
|
33 |
-
|
34 |
-
This is borrowed from Python's standard library's distutils module.
|
35 |
-
"""
|
36 |
-
if os.name == "posix":
|
37 |
-
if not os.path.isabs(pathname):
|
38 |
-
return os.path.join(new_root, pathname)
|
39 |
-
else:
|
40 |
-
return os.path.join(new_root, pathname[1:])
|
41 |
-
|
42 |
-
elif os.name == "nt":
|
43 |
-
(drive, path) = os.path.splitdrive(pathname)
|
44 |
-
if path[0] == "\\":
|
45 |
-
path = path[1:]
|
46 |
-
return os.path.join(new_root, path)
|
47 |
-
|
48 |
-
else:
|
49 |
-
raise InstallationError(
|
50 |
-
f"Unknown platform: {os.name}\n"
|
51 |
-
"Can not change root path prefix on unknown platform."
|
52 |
-
)
|
53 |
-
|
54 |
-
|
55 |
-
def get_src_prefix() -> str:
|
56 |
-
if running_under_virtualenv():
|
57 |
-
src_prefix = os.path.join(sys.prefix, "src")
|
58 |
-
else:
|
59 |
-
# FIXME: keep src in cwd for now (it is not a temporary folder)
|
60 |
-
try:
|
61 |
-
src_prefix = os.path.join(os.getcwd(), "src")
|
62 |
-
except OSError:
|
63 |
-
# In case the current working directory has been renamed or deleted
|
64 |
-
sys.exit("The folder you are executing pip from can no longer be found.")
|
65 |
-
|
66 |
-
# under macOS + virtualenv sys.prefix is not properly resolved
|
67 |
-
# it is something like /path/to/python/bin/..
|
68 |
-
return os.path.abspath(src_prefix)
|
69 |
-
|
70 |
-
|
71 |
-
try:
|
72 |
-
# Use getusersitepackages if this is present, as it ensures that the
|
73 |
-
# value is initialised properly.
|
74 |
-
user_site: typing.Optional[str] = site.getusersitepackages()
|
75 |
-
except AttributeError:
|
76 |
-
user_site = site.USER_SITE
|
77 |
-
|
78 |
-
|
79 |
-
@functools.lru_cache(maxsize=None)
|
80 |
-
def is_osx_framework() -> bool:
|
81 |
-
return bool(sysconfig.get_config_var("PYTHONFRAMEWORK"))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Big-Web/MMSD/env/Lib/site-packages/pip/_vendor/rich/highlighter.py
DELETED
@@ -1,232 +0,0 @@
|
|
1 |
-
import re
|
2 |
-
from abc import ABC, abstractmethod
|
3 |
-
from typing import List, Union
|
4 |
-
|
5 |
-
from .text import Span, Text
|
6 |
-
|
7 |
-
|
8 |
-
def _combine_regex(*regexes: str) -> str:
|
9 |
-
"""Combine a number of regexes in to a single regex.
|
10 |
-
|
11 |
-
Returns:
|
12 |
-
str: New regex with all regexes ORed together.
|
13 |
-
"""
|
14 |
-
return "|".join(regexes)
|
15 |
-
|
16 |
-
|
17 |
-
class Highlighter(ABC):
|
18 |
-
"""Abstract base class for highlighters."""
|
19 |
-
|
20 |
-
def __call__(self, text: Union[str, Text]) -> Text:
|
21 |
-
"""Highlight a str or Text instance.
|
22 |
-
|
23 |
-
Args:
|
24 |
-
text (Union[str, ~Text]): Text to highlight.
|
25 |
-
|
26 |
-
Raises:
|
27 |
-
TypeError: If not called with text or str.
|
28 |
-
|
29 |
-
Returns:
|
30 |
-
Text: A test instance with highlighting applied.
|
31 |
-
"""
|
32 |
-
if isinstance(text, str):
|
33 |
-
highlight_text = Text(text)
|
34 |
-
elif isinstance(text, Text):
|
35 |
-
highlight_text = text.copy()
|
36 |
-
else:
|
37 |
-
raise TypeError(f"str or Text instance required, not {text!r}")
|
38 |
-
self.highlight(highlight_text)
|
39 |
-
return highlight_text
|
40 |
-
|
41 |
-
@abstractmethod
|
42 |
-
def highlight(self, text: Text) -> None:
|
43 |
-
"""Apply highlighting in place to text.
|
44 |
-
|
45 |
-
Args:
|
46 |
-
text (~Text): A text object highlight.
|
47 |
-
"""
|
48 |
-
|
49 |
-
|
50 |
-
class NullHighlighter(Highlighter):
|
51 |
-
"""A highlighter object that doesn't highlight.
|
52 |
-
|
53 |
-
May be used to disable highlighting entirely.
|
54 |
-
|
55 |
-
"""
|
56 |
-
|
57 |
-
def highlight(self, text: Text) -> None:
|
58 |
-
"""Nothing to do"""
|
59 |
-
|
60 |
-
|
61 |
-
class RegexHighlighter(Highlighter):
|
62 |
-
"""Applies highlighting from a list of regular expressions."""
|
63 |
-
|
64 |
-
highlights: List[str] = []
|
65 |
-
base_style: str = ""
|
66 |
-
|
67 |
-
def highlight(self, text: Text) -> None:
|
68 |
-
"""Highlight :class:`rich.text.Text` using regular expressions.
|
69 |
-
|
70 |
-
Args:
|
71 |
-
text (~Text): Text to highlighted.
|
72 |
-
|
73 |
-
"""
|
74 |
-
|
75 |
-
highlight_regex = text.highlight_regex
|
76 |
-
for re_highlight in self.highlights:
|
77 |
-
highlight_regex(re_highlight, style_prefix=self.base_style)
|
78 |
-
|
79 |
-
|
80 |
-
class ReprHighlighter(RegexHighlighter):
|
81 |
-
"""Highlights the text typically produced from ``__repr__`` methods."""
|
82 |
-
|
83 |
-
base_style = "repr."
|
84 |
-
highlights = [
|
85 |
-
r"(?P<tag_start><)(?P<tag_name>[-\w.:|]*)(?P<tag_contents>[\w\W]*)(?P<tag_end>>)",
|
86 |
-
r'(?P<attrib_name>[\w_]{1,50})=(?P<attrib_value>"?[\w_]+"?)?',
|
87 |
-
r"(?P<brace>[][{}()])",
|
88 |
-
_combine_regex(
|
89 |
-
r"(?P<ipv4>[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3})",
|
90 |
-
r"(?P<ipv6>([A-Fa-f0-9]{1,4}::?){1,7}[A-Fa-f0-9]{1,4})",
|
91 |
-
r"(?P<eui64>(?:[0-9A-Fa-f]{1,2}-){7}[0-9A-Fa-f]{1,2}|(?:[0-9A-Fa-f]{1,2}:){7}[0-9A-Fa-f]{1,2}|(?:[0-9A-Fa-f]{4}\.){3}[0-9A-Fa-f]{4})",
|
92 |
-
r"(?P<eui48>(?:[0-9A-Fa-f]{1,2}-){5}[0-9A-Fa-f]{1,2}|(?:[0-9A-Fa-f]{1,2}:){5}[0-9A-Fa-f]{1,2}|(?:[0-9A-Fa-f]{4}\.){2}[0-9A-Fa-f]{4})",
|
93 |
-
r"(?P<uuid>[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12})",
|
94 |
-
r"(?P<call>[\w.]*?)\(",
|
95 |
-
r"\b(?P<bool_true>True)\b|\b(?P<bool_false>False)\b|\b(?P<none>None)\b",
|
96 |
-
r"(?P<ellipsis>\.\.\.)",
|
97 |
-
r"(?P<number_complex>(?<!\w)(?:\-?[0-9]+\.?[0-9]*(?:e[-+]?\d+?)?)(?:[-+](?:[0-9]+\.?[0-9]*(?:e[-+]?\d+)?))?j)",
|
98 |
-
r"(?P<number>(?<!\w)\-?[0-9]+\.?[0-9]*(e[-+]?\d+?)?\b|0x[0-9a-fA-F]*)",
|
99 |
-
r"(?P<path>\B(/[-\w._+]+)*\/)(?P<filename>[-\w._+]*)?",
|
100 |
-
r"(?<![\\\w])(?P<str>b?'''.*?(?<!\\)'''|b?'.*?(?<!\\)'|b?\"\"\".*?(?<!\\)\"\"\"|b?\".*?(?<!\\)\")",
|
101 |
-
r"(?P<url>(file|https|http|ws|wss)://[-0-9a-zA-Z$_+!`(),.?/;:&=%#]*)",
|
102 |
-
),
|
103 |
-
]
|
104 |
-
|
105 |
-
|
106 |
-
class JSONHighlighter(RegexHighlighter):
|
107 |
-
"""Highlights JSON"""
|
108 |
-
|
109 |
-
# Captures the start and end of JSON strings, handling escaped quotes
|
110 |
-
JSON_STR = r"(?<![\\\w])(?P<str>b?\".*?(?<!\\)\")"
|
111 |
-
JSON_WHITESPACE = {" ", "\n", "\r", "\t"}
|
112 |
-
|
113 |
-
base_style = "json."
|
114 |
-
highlights = [
|
115 |
-
_combine_regex(
|
116 |
-
r"(?P<brace>[\{\[\(\)\]\}])",
|
117 |
-
r"\b(?P<bool_true>true)\b|\b(?P<bool_false>false)\b|\b(?P<null>null)\b",
|
118 |
-
r"(?P<number>(?<!\w)\-?[0-9]+\.?[0-9]*(e[\-\+]?\d+?)?\b|0x[0-9a-fA-F]*)",
|
119 |
-
JSON_STR,
|
120 |
-
),
|
121 |
-
]
|
122 |
-
|
123 |
-
def highlight(self, text: Text) -> None:
|
124 |
-
super().highlight(text)
|
125 |
-
|
126 |
-
# Additional work to handle highlighting JSON keys
|
127 |
-
plain = text.plain
|
128 |
-
append = text.spans.append
|
129 |
-
whitespace = self.JSON_WHITESPACE
|
130 |
-
for match in re.finditer(self.JSON_STR, plain):
|
131 |
-
start, end = match.span()
|
132 |
-
cursor = end
|
133 |
-
while cursor < len(plain):
|
134 |
-
char = plain[cursor]
|
135 |
-
cursor += 1
|
136 |
-
if char == ":":
|
137 |
-
append(Span(start, end, "json.key"))
|
138 |
-
elif char in whitespace:
|
139 |
-
continue
|
140 |
-
break
|
141 |
-
|
142 |
-
|
143 |
-
class ISO8601Highlighter(RegexHighlighter):
|
144 |
-
"""Highlights the ISO8601 date time strings.
|
145 |
-
Regex reference: https://www.oreilly.com/library/view/regular-expressions-cookbook/9781449327453/ch04s07.html
|
146 |
-
"""
|
147 |
-
|
148 |
-
base_style = "iso8601."
|
149 |
-
highlights = [
|
150 |
-
#
|
151 |
-
# Dates
|
152 |
-
#
|
153 |
-
# Calendar month (e.g. 2008-08). The hyphen is required
|
154 |
-
r"^(?P<year>[0-9]{4})-(?P<month>1[0-2]|0[1-9])$",
|
155 |
-
# Calendar date w/o hyphens (e.g. 20080830)
|
156 |
-
r"^(?P<date>(?P<year>[0-9]{4})(?P<month>1[0-2]|0[1-9])(?P<day>3[01]|0[1-9]|[12][0-9]))$",
|
157 |
-
# Ordinal date (e.g. 2008-243). The hyphen is optional
|
158 |
-
r"^(?P<date>(?P<year>[0-9]{4})-?(?P<day>36[0-6]|3[0-5][0-9]|[12][0-9]{2}|0[1-9][0-9]|00[1-9]))$",
|
159 |
-
#
|
160 |
-
# Weeks
|
161 |
-
#
|
162 |
-
# Week of the year (e.g., 2008-W35). The hyphen is optional
|
163 |
-
r"^(?P<date>(?P<year>[0-9]{4})-?W(?P<week>5[0-3]|[1-4][0-9]|0[1-9]))$",
|
164 |
-
# Week date (e.g., 2008-W35-6). The hyphens are optional
|
165 |
-
r"^(?P<date>(?P<year>[0-9]{4})-?W(?P<week>5[0-3]|[1-4][0-9]|0[1-9])-?(?P<day>[1-7]))$",
|
166 |
-
#
|
167 |
-
# Times
|
168 |
-
#
|
169 |
-
# Hours and minutes (e.g., 17:21). The colon is optional
|
170 |
-
r"^(?P<time>(?P<hour>2[0-3]|[01][0-9]):?(?P<minute>[0-5][0-9]))$",
|
171 |
-
# Hours, minutes, and seconds w/o colons (e.g., 172159)
|
172 |
-
r"^(?P<time>(?P<hour>2[0-3]|[01][0-9])(?P<minute>[0-5][0-9])(?P<second>[0-5][0-9]))$",
|
173 |
-
# Time zone designator (e.g., Z, +07 or +07:00). The colons and the minutes are optional
|
174 |
-
r"^(?P<timezone>(Z|[+-](?:2[0-3]|[01][0-9])(?::?(?:[0-5][0-9]))?))$",
|
175 |
-
# Hours, minutes, and seconds with time zone designator (e.g., 17:21:59+07:00).
|
176 |
-
# All the colons are optional. The minutes in the time zone designator are also optional
|
177 |
-
r"^(?P<time>(?P<hour>2[0-3]|[01][0-9])(?P<minute>[0-5][0-9])(?P<second>[0-5][0-9]))(?P<timezone>Z|[+-](?:2[0-3]|[01][0-9])(?::?(?:[0-5][0-9]))?)$",
|
178 |
-
#
|
179 |
-
# Date and Time
|
180 |
-
#
|
181 |
-
# Calendar date with hours, minutes, and seconds (e.g., 2008-08-30 17:21:59 or 20080830 172159).
|
182 |
-
# A space is required between the date and the time. The hyphens and colons are optional.
|
183 |
-
# This regex matches dates and times that specify some hyphens or colons but omit others.
|
184 |
-
# This does not follow ISO 8601
|
185 |
-
r"^(?P<date>(?P<year>[0-9]{4})(?P<hyphen>-)?(?P<month>1[0-2]|0[1-9])(?(hyphen)-)(?P<day>3[01]|0[1-9]|[12][0-9])) (?P<time>(?P<hour>2[0-3]|[01][0-9])(?(hyphen):)(?P<minute>[0-5][0-9])(?(hyphen):)(?P<second>[0-5][0-9]))$",
|
186 |
-
#
|
187 |
-
# XML Schema dates and times
|
188 |
-
#
|
189 |
-
# Date, with optional time zone (e.g., 2008-08-30 or 2008-08-30+07:00).
|
190 |
-
# Hyphens are required. This is the XML Schema 'date' type
|
191 |
-
r"^(?P<date>(?P<year>-?(?:[1-9][0-9]*)?[0-9]{4})-(?P<month>1[0-2]|0[1-9])-(?P<day>3[01]|0[1-9]|[12][0-9]))(?P<timezone>Z|[+-](?:2[0-3]|[01][0-9]):[0-5][0-9])?$",
|
192 |
-
# Time, with optional fractional seconds and time zone (e.g., 01:45:36 or 01:45:36.123+07:00).
|
193 |
-
# There is no limit on the number of digits for the fractional seconds. This is the XML Schema 'time' type
|
194 |
-
r"^(?P<time>(?P<hour>2[0-3]|[01][0-9]):(?P<minute>[0-5][0-9]):(?P<second>[0-5][0-9])(?P<frac>\.[0-9]+)?)(?P<timezone>Z|[+-](?:2[0-3]|[01][0-9]):[0-5][0-9])?$",
|
195 |
-
# Date and time, with optional fractional seconds and time zone (e.g., 2008-08-30T01:45:36 or 2008-08-30T01:45:36.123Z).
|
196 |
-
# This is the XML Schema 'dateTime' type
|
197 |
-
r"^(?P<date>(?P<year>-?(?:[1-9][0-9]*)?[0-9]{4})-(?P<month>1[0-2]|0[1-9])-(?P<day>3[01]|0[1-9]|[12][0-9]))T(?P<time>(?P<hour>2[0-3]|[01][0-9]):(?P<minute>[0-5][0-9]):(?P<second>[0-5][0-9])(?P<ms>\.[0-9]+)?)(?P<timezone>Z|[+-](?:2[0-3]|[01][0-9]):[0-5][0-9])?$",
|
198 |
-
]
|
199 |
-
|
200 |
-
|
201 |
-
if __name__ == "__main__": # pragma: no cover
|
202 |
-
from .console import Console
|
203 |
-
|
204 |
-
console = Console()
|
205 |
-
console.print("[bold green]hello world![/bold green]")
|
206 |
-
console.print("'[bold green]hello world![/bold green]'")
|
207 |
-
|
208 |
-
console.print(" /foo")
|
209 |
-
console.print("/foo/")
|
210 |
-
console.print("/foo/bar")
|
211 |
-
console.print("foo/bar/baz")
|
212 |
-
|
213 |
-
console.print("/foo/bar/baz?foo=bar+egg&egg=baz")
|
214 |
-
console.print("/foo/bar/baz/")
|
215 |
-
console.print("/foo/bar/baz/egg")
|
216 |
-
console.print("/foo/bar/baz/egg.py")
|
217 |
-
console.print("/foo/bar/baz/egg.py word")
|
218 |
-
console.print(" /foo/bar/baz/egg.py word")
|
219 |
-
console.print("foo /foo/bar/baz/egg.py word")
|
220 |
-
console.print("foo /foo/bar/ba._++z/egg+.py word")
|
221 |
-
console.print("https://example.org?foo=bar#header")
|
222 |
-
|
223 |
-
console.print(1234567.34)
|
224 |
-
console.print(1 / 2)
|
225 |
-
console.print(-1 / 123123123123)
|
226 |
-
|
227 |
-
console.print(
|
228 |
-
"127.0.1.1 bar 192.168.1.4 2001:0db8:85a3:0000:0000:8a2e:0370:7334 foo"
|
229 |
-
)
|
230 |
-
import json
|
231 |
-
|
232 |
-
console.print_json(json.dumps(obj={"name": "apple", "count": 1}), indent=None)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Big-Web/MMSD/env/Lib/site-packages/setuptools/_distutils/command/install.py
DELETED
@@ -1,814 +0,0 @@
|
|
1 |
-
"""distutils.command.install
|
2 |
-
|
3 |
-
Implements the Distutils 'install' command."""
|
4 |
-
|
5 |
-
import sys
|
6 |
-
import os
|
7 |
-
import contextlib
|
8 |
-
import sysconfig
|
9 |
-
import itertools
|
10 |
-
|
11 |
-
from distutils import log
|
12 |
-
from distutils.core import Command
|
13 |
-
from distutils.debug import DEBUG
|
14 |
-
from distutils.sysconfig import get_config_vars
|
15 |
-
from distutils.file_util import write_file
|
16 |
-
from distutils.util import convert_path, subst_vars, change_root
|
17 |
-
from distutils.util import get_platform
|
18 |
-
from distutils.errors import DistutilsOptionError, DistutilsPlatformError
|
19 |
-
from . import _framework_compat as fw
|
20 |
-
from .. import _collections
|
21 |
-
|
22 |
-
from site import USER_BASE
|
23 |
-
from site import USER_SITE
|
24 |
-
|
25 |
-
HAS_USER_SITE = True
|
26 |
-
|
27 |
-
WINDOWS_SCHEME = {
|
28 |
-
'purelib': '{base}/Lib/site-packages',
|
29 |
-
'platlib': '{base}/Lib/site-packages',
|
30 |
-
'headers': '{base}/Include/{dist_name}',
|
31 |
-
'scripts': '{base}/Scripts',
|
32 |
-
'data': '{base}',
|
33 |
-
}
|
34 |
-
|
35 |
-
INSTALL_SCHEMES = {
|
36 |
-
'posix_prefix': {
|
37 |
-
'purelib': '{base}/lib/{implementation_lower}{py_version_short}/site-packages',
|
38 |
-
'platlib': '{platbase}/{platlibdir}/{implementation_lower}'
|
39 |
-
'{py_version_short}/site-packages',
|
40 |
-
'headers': '{base}/include/{implementation_lower}'
|
41 |
-
'{py_version_short}{abiflags}/{dist_name}',
|
42 |
-
'scripts': '{base}/bin',
|
43 |
-
'data': '{base}',
|
44 |
-
},
|
45 |
-
'posix_home': {
|
46 |
-
'purelib': '{base}/lib/{implementation_lower}',
|
47 |
-
'platlib': '{base}/{platlibdir}/{implementation_lower}',
|
48 |
-
'headers': '{base}/include/{implementation_lower}/{dist_name}',
|
49 |
-
'scripts': '{base}/bin',
|
50 |
-
'data': '{base}',
|
51 |
-
},
|
52 |
-
'nt': WINDOWS_SCHEME,
|
53 |
-
'pypy': {
|
54 |
-
'purelib': '{base}/site-packages',
|
55 |
-
'platlib': '{base}/site-packages',
|
56 |
-
'headers': '{base}/include/{dist_name}',
|
57 |
-
'scripts': '{base}/bin',
|
58 |
-
'data': '{base}',
|
59 |
-
},
|
60 |
-
'pypy_nt': {
|
61 |
-
'purelib': '{base}/site-packages',
|
62 |
-
'platlib': '{base}/site-packages',
|
63 |
-
'headers': '{base}/include/{dist_name}',
|
64 |
-
'scripts': '{base}/Scripts',
|
65 |
-
'data': '{base}',
|
66 |
-
},
|
67 |
-
}
|
68 |
-
|
69 |
-
# user site schemes
|
70 |
-
if HAS_USER_SITE:
|
71 |
-
INSTALL_SCHEMES['nt_user'] = {
|
72 |
-
'purelib': '{usersite}',
|
73 |
-
'platlib': '{usersite}',
|
74 |
-
'headers': '{userbase}/{implementation}{py_version_nodot_plat}'
|
75 |
-
'/Include/{dist_name}',
|
76 |
-
'scripts': '{userbase}/{implementation}{py_version_nodot_plat}/Scripts',
|
77 |
-
'data': '{userbase}',
|
78 |
-
}
|
79 |
-
|
80 |
-
INSTALL_SCHEMES['posix_user'] = {
|
81 |
-
'purelib': '{usersite}',
|
82 |
-
'platlib': '{usersite}',
|
83 |
-
'headers': '{userbase}/include/{implementation_lower}'
|
84 |
-
'{py_version_short}{abiflags}/{dist_name}',
|
85 |
-
'scripts': '{userbase}/bin',
|
86 |
-
'data': '{userbase}',
|
87 |
-
}
|
88 |
-
|
89 |
-
|
90 |
-
INSTALL_SCHEMES.update(fw.schemes)
|
91 |
-
|
92 |
-
|
93 |
-
# The keys to an installation scheme; if any new types of files are to be
|
94 |
-
# installed, be sure to add an entry to every installation scheme above,
|
95 |
-
# and to SCHEME_KEYS here.
|
96 |
-
SCHEME_KEYS = ('purelib', 'platlib', 'headers', 'scripts', 'data')
|
97 |
-
|
98 |
-
|
99 |
-
def _load_sysconfig_schemes():
|
100 |
-
with contextlib.suppress(AttributeError):
|
101 |
-
return {
|
102 |
-
scheme: sysconfig.get_paths(scheme, expand=False)
|
103 |
-
for scheme in sysconfig.get_scheme_names()
|
104 |
-
}
|
105 |
-
|
106 |
-
|
107 |
-
def _load_schemes():
|
108 |
-
"""
|
109 |
-
Extend default schemes with schemes from sysconfig.
|
110 |
-
"""
|
111 |
-
|
112 |
-
sysconfig_schemes = _load_sysconfig_schemes() or {}
|
113 |
-
|
114 |
-
return {
|
115 |
-
scheme: {
|
116 |
-
**INSTALL_SCHEMES.get(scheme, {}),
|
117 |
-
**sysconfig_schemes.get(scheme, {}),
|
118 |
-
}
|
119 |
-
for scheme in set(itertools.chain(INSTALL_SCHEMES, sysconfig_schemes))
|
120 |
-
}
|
121 |
-
|
122 |
-
|
123 |
-
def _get_implementation():
|
124 |
-
if hasattr(sys, 'pypy_version_info'):
|
125 |
-
return 'PyPy'
|
126 |
-
else:
|
127 |
-
return 'Python'
|
128 |
-
|
129 |
-
|
130 |
-
def _select_scheme(ob, name):
|
131 |
-
scheme = _inject_headers(name, _load_scheme(_resolve_scheme(name)))
|
132 |
-
vars(ob).update(_remove_set(ob, _scheme_attrs(scheme)))
|
133 |
-
|
134 |
-
|
135 |
-
def _remove_set(ob, attrs):
|
136 |
-
"""
|
137 |
-
Include only attrs that are None in ob.
|
138 |
-
"""
|
139 |
-
return {key: value for key, value in attrs.items() if getattr(ob, key) is None}
|
140 |
-
|
141 |
-
|
142 |
-
def _resolve_scheme(name):
|
143 |
-
os_name, sep, key = name.partition('_')
|
144 |
-
try:
|
145 |
-
resolved = sysconfig.get_preferred_scheme(key)
|
146 |
-
except Exception:
|
147 |
-
resolved = fw.scheme(_pypy_hack(name))
|
148 |
-
return resolved
|
149 |
-
|
150 |
-
|
151 |
-
def _load_scheme(name):
|
152 |
-
return _load_schemes()[name]
|
153 |
-
|
154 |
-
|
155 |
-
def _inject_headers(name, scheme):
|
156 |
-
"""
|
157 |
-
Given a scheme name and the resolved scheme,
|
158 |
-
if the scheme does not include headers, resolve
|
159 |
-
the fallback scheme for the name and use headers
|
160 |
-
from it. pypa/distutils#88
|
161 |
-
"""
|
162 |
-
# Bypass the preferred scheme, which may not
|
163 |
-
# have defined headers.
|
164 |
-
fallback = _load_scheme(_pypy_hack(name))
|
165 |
-
scheme.setdefault('headers', fallback['headers'])
|
166 |
-
return scheme
|
167 |
-
|
168 |
-
|
169 |
-
def _scheme_attrs(scheme):
|
170 |
-
"""Resolve install directories by applying the install schemes."""
|
171 |
-
return {f'install_{key}': scheme[key] for key in SCHEME_KEYS}
|
172 |
-
|
173 |
-
|
174 |
-
def _pypy_hack(name):
|
175 |
-
PY37 = sys.version_info < (3, 8)
|
176 |
-
old_pypy = hasattr(sys, 'pypy_version_info') and PY37
|
177 |
-
prefix = not name.endswith(('_user', '_home'))
|
178 |
-
pypy_name = 'pypy' + '_nt' * (os.name == 'nt')
|
179 |
-
return pypy_name if old_pypy and prefix else name
|
180 |
-
|
181 |
-
|
182 |
-
class install(Command):
|
183 |
-
|
184 |
-
description = "install everything from build directory"
|
185 |
-
|
186 |
-
user_options = [
|
187 |
-
# Select installation scheme and set base director(y|ies)
|
188 |
-
('prefix=', None, "installation prefix"),
|
189 |
-
('exec-prefix=', None, "(Unix only) prefix for platform-specific files"),
|
190 |
-
('home=', None, "(Unix only) home directory to install under"),
|
191 |
-
# Or, just set the base director(y|ies)
|
192 |
-
(
|
193 |
-
'install-base=',
|
194 |
-
None,
|
195 |
-
"base installation directory (instead of --prefix or --home)",
|
196 |
-
),
|
197 |
-
(
|
198 |
-
'install-platbase=',
|
199 |
-
None,
|
200 |
-
"base installation directory for platform-specific files "
|
201 |
-
+ "(instead of --exec-prefix or --home)",
|
202 |
-
),
|
203 |
-
('root=', None, "install everything relative to this alternate root directory"),
|
204 |
-
# Or, explicitly set the installation scheme
|
205 |
-
(
|
206 |
-
'install-purelib=',
|
207 |
-
None,
|
208 |
-
"installation directory for pure Python module distributions",
|
209 |
-
),
|
210 |
-
(
|
211 |
-
'install-platlib=',
|
212 |
-
None,
|
213 |
-
"installation directory for non-pure module distributions",
|
214 |
-
),
|
215 |
-
(
|
216 |
-
'install-lib=',
|
217 |
-
None,
|
218 |
-
"installation directory for all module distributions "
|
219 |
-
+ "(overrides --install-purelib and --install-platlib)",
|
220 |
-
),
|
221 |
-
('install-headers=', None, "installation directory for C/C++ headers"),
|
222 |
-
('install-scripts=', None, "installation directory for Python scripts"),
|
223 |
-
('install-data=', None, "installation directory for data files"),
|
224 |
-
# Byte-compilation options -- see install_lib.py for details, as
|
225 |
-
# these are duplicated from there (but only install_lib does
|
226 |
-
# anything with them).
|
227 |
-
('compile', 'c', "compile .py to .pyc [default]"),
|
228 |
-
('no-compile', None, "don't compile .py files"),
|
229 |
-
(
|
230 |
-
'optimize=',
|
231 |
-
'O',
|
232 |
-
"also compile with optimization: -O1 for \"python -O\", "
|
233 |
-
"-O2 for \"python -OO\", and -O0 to disable [default: -O0]",
|
234 |
-
),
|
235 |
-
# Miscellaneous control options
|
236 |
-
('force', 'f', "force installation (overwrite any existing files)"),
|
237 |
-
('skip-build', None, "skip rebuilding everything (for testing/debugging)"),
|
238 |
-
# Where to install documentation (eventually!)
|
239 |
-
# ('doc-format=', None, "format of documentation to generate"),
|
240 |
-
# ('install-man=', None, "directory for Unix man pages"),
|
241 |
-
# ('install-html=', None, "directory for HTML documentation"),
|
242 |
-
# ('install-info=', None, "directory for GNU info files"),
|
243 |
-
('record=', None, "filename in which to record list of installed files"),
|
244 |
-
]
|
245 |
-
|
246 |
-
boolean_options = ['compile', 'force', 'skip-build']
|
247 |
-
|
248 |
-
if HAS_USER_SITE:
|
249 |
-
user_options.append(
|
250 |
-
('user', None, "install in user site-package '%s'" % USER_SITE)
|
251 |
-
)
|
252 |
-
boolean_options.append('user')
|
253 |
-
|
254 |
-
negative_opt = {'no-compile': 'compile'}
|
255 |
-
|
256 |
-
def initialize_options(self):
|
257 |
-
"""Initializes options."""
|
258 |
-
# High-level options: these select both an installation base
|
259 |
-
# and scheme.
|
260 |
-
self.prefix = None
|
261 |
-
self.exec_prefix = None
|
262 |
-
self.home = None
|
263 |
-
self.user = 0
|
264 |
-
|
265 |
-
# These select only the installation base; it's up to the user to
|
266 |
-
# specify the installation scheme (currently, that means supplying
|
267 |
-
# the --install-{platlib,purelib,scripts,data} options).
|
268 |
-
self.install_base = None
|
269 |
-
self.install_platbase = None
|
270 |
-
self.root = None
|
271 |
-
|
272 |
-
# These options are the actual installation directories; if not
|
273 |
-
# supplied by the user, they are filled in using the installation
|
274 |
-
# scheme implied by prefix/exec-prefix/home and the contents of
|
275 |
-
# that installation scheme.
|
276 |
-
self.install_purelib = None # for pure module distributions
|
277 |
-
self.install_platlib = None # non-pure (dists w/ extensions)
|
278 |
-
self.install_headers = None # for C/C++ headers
|
279 |
-
self.install_lib = None # set to either purelib or platlib
|
280 |
-
self.install_scripts = None
|
281 |
-
self.install_data = None
|
282 |
-
self.install_userbase = USER_BASE
|
283 |
-
self.install_usersite = USER_SITE
|
284 |
-
|
285 |
-
self.compile = None
|
286 |
-
self.optimize = None
|
287 |
-
|
288 |
-
# Deprecated
|
289 |
-
# These two are for putting non-packagized distributions into their
|
290 |
-
# own directory and creating a .pth file if it makes sense.
|
291 |
-
# 'extra_path' comes from the setup file; 'install_path_file' can
|
292 |
-
# be turned off if it makes no sense to install a .pth file. (But
|
293 |
-
# better to install it uselessly than to guess wrong and not
|
294 |
-
# install it when it's necessary and would be used!) Currently,
|
295 |
-
# 'install_path_file' is always true unless some outsider meddles
|
296 |
-
# with it.
|
297 |
-
self.extra_path = None
|
298 |
-
self.install_path_file = 1
|
299 |
-
|
300 |
-
# 'force' forces installation, even if target files are not
|
301 |
-
# out-of-date. 'skip_build' skips running the "build" command,
|
302 |
-
# handy if you know it's not necessary. 'warn_dir' (which is *not*
|
303 |
-
# a user option, it's just there so the bdist_* commands can turn
|
304 |
-
# it off) determines whether we warn about installing to a
|
305 |
-
# directory not in sys.path.
|
306 |
-
self.force = 0
|
307 |
-
self.skip_build = 0
|
308 |
-
self.warn_dir = 1
|
309 |
-
|
310 |
-
# These are only here as a conduit from the 'build' command to the
|
311 |
-
# 'install_*' commands that do the real work. ('build_base' isn't
|
312 |
-
# actually used anywhere, but it might be useful in future.) They
|
313 |
-
# are not user options, because if the user told the install
|
314 |
-
# command where the build directory is, that wouldn't affect the
|
315 |
-
# build command.
|
316 |
-
self.build_base = None
|
317 |
-
self.build_lib = None
|
318 |
-
|
319 |
-
# Not defined yet because we don't know anything about
|
320 |
-
# documentation yet.
|
321 |
-
# self.install_man = None
|
322 |
-
# self.install_html = None
|
323 |
-
# self.install_info = None
|
324 |
-
|
325 |
-
self.record = None
|
326 |
-
|
327 |
-
# -- Option finalizing methods -------------------------------------
|
328 |
-
# (This is rather more involved than for most commands,
|
329 |
-
# because this is where the policy for installing third-
|
330 |
-
# party Python modules on various platforms given a wide
|
331 |
-
# array of user input is decided. Yes, it's quite complex!)
|
332 |
-
|
333 |
-
def finalize_options(self): # noqa: C901
|
334 |
-
"""Finalizes options."""
|
335 |
-
# This method (and its helpers, like 'finalize_unix()',
|
336 |
-
# 'finalize_other()', and 'select_scheme()') is where the default
|
337 |
-
# installation directories for modules, extension modules, and
|
338 |
-
# anything else we care to install from a Python module
|
339 |
-
# distribution. Thus, this code makes a pretty important policy
|
340 |
-
# statement about how third-party stuff is added to a Python
|
341 |
-
# installation! Note that the actual work of installation is done
|
342 |
-
# by the relatively simple 'install_*' commands; they just take
|
343 |
-
# their orders from the installation directory options determined
|
344 |
-
# here.
|
345 |
-
|
346 |
-
# Check for errors/inconsistencies in the options; first, stuff
|
347 |
-
# that's wrong on any platform.
|
348 |
-
|
349 |
-
if (self.prefix or self.exec_prefix or self.home) and (
|
350 |
-
self.install_base or self.install_platbase
|
351 |
-
):
|
352 |
-
raise DistutilsOptionError(
|
353 |
-
"must supply either prefix/exec-prefix/home or "
|
354 |
-
+ "install-base/install-platbase -- not both"
|
355 |
-
)
|
356 |
-
|
357 |
-
if self.home and (self.prefix or self.exec_prefix):
|
358 |
-
raise DistutilsOptionError(
|
359 |
-
"must supply either home or prefix/exec-prefix -- not both"
|
360 |
-
)
|
361 |
-
|
362 |
-
if self.user and (
|
363 |
-
self.prefix
|
364 |
-
or self.exec_prefix
|
365 |
-
or self.home
|
366 |
-
or self.install_base
|
367 |
-
or self.install_platbase
|
368 |
-
):
|
369 |
-
raise DistutilsOptionError(
|
370 |
-
"can't combine user with prefix, "
|
371 |
-
"exec_prefix/home, or install_(plat)base"
|
372 |
-
)
|
373 |
-
|
374 |
-
# Next, stuff that's wrong (or dubious) only on certain platforms.
|
375 |
-
if os.name != "posix":
|
376 |
-
if self.exec_prefix:
|
377 |
-
self.warn("exec-prefix option ignored on this platform")
|
378 |
-
self.exec_prefix = None
|
379 |
-
|
380 |
-
# Now the interesting logic -- so interesting that we farm it out
|
381 |
-
# to other methods. The goal of these methods is to set the final
|
382 |
-
# values for the install_{lib,scripts,data,...} options, using as
|
383 |
-
# input a heady brew of prefix, exec_prefix, home, install_base,
|
384 |
-
# install_platbase, user-supplied versions of
|
385 |
-
# install_{purelib,platlib,lib,scripts,data,...}, and the
|
386 |
-
# install schemes. Phew!
|
387 |
-
|
388 |
-
self.dump_dirs("pre-finalize_{unix,other}")
|
389 |
-
|
390 |
-
if os.name == 'posix':
|
391 |
-
self.finalize_unix()
|
392 |
-
else:
|
393 |
-
self.finalize_other()
|
394 |
-
|
395 |
-
self.dump_dirs("post-finalize_{unix,other}()")
|
396 |
-
|
397 |
-
# Expand configuration variables, tilde, etc. in self.install_base
|
398 |
-
# and self.install_platbase -- that way, we can use $base or
|
399 |
-
# $platbase in the other installation directories and not worry
|
400 |
-
# about needing recursive variable expansion (shudder).
|
401 |
-
|
402 |
-
py_version = sys.version.split()[0]
|
403 |
-
(prefix, exec_prefix) = get_config_vars('prefix', 'exec_prefix')
|
404 |
-
try:
|
405 |
-
abiflags = sys.abiflags
|
406 |
-
except AttributeError:
|
407 |
-
# sys.abiflags may not be defined on all platforms.
|
408 |
-
abiflags = ''
|
409 |
-
local_vars = {
|
410 |
-
'dist_name': self.distribution.get_name(),
|
411 |
-
'dist_version': self.distribution.get_version(),
|
412 |
-
'dist_fullname': self.distribution.get_fullname(),
|
413 |
-
'py_version': py_version,
|
414 |
-
'py_version_short': '%d.%d' % sys.version_info[:2],
|
415 |
-
'py_version_nodot': '%d%d' % sys.version_info[:2],
|
416 |
-
'sys_prefix': prefix,
|
417 |
-
'prefix': prefix,
|
418 |
-
'sys_exec_prefix': exec_prefix,
|
419 |
-
'exec_prefix': exec_prefix,
|
420 |
-
'abiflags': abiflags,
|
421 |
-
'platlibdir': getattr(sys, 'platlibdir', 'lib'),
|
422 |
-
'implementation_lower': _get_implementation().lower(),
|
423 |
-
'implementation': _get_implementation(),
|
424 |
-
}
|
425 |
-
|
426 |
-
# vars for compatibility on older Pythons
|
427 |
-
compat_vars = dict(
|
428 |
-
# Python 3.9 and earlier
|
429 |
-
py_version_nodot_plat=getattr(sys, 'winver', '').replace('.', ''),
|
430 |
-
)
|
431 |
-
|
432 |
-
if HAS_USER_SITE:
|
433 |
-
local_vars['userbase'] = self.install_userbase
|
434 |
-
local_vars['usersite'] = self.install_usersite
|
435 |
-
|
436 |
-
self.config_vars = _collections.DictStack(
|
437 |
-
[fw.vars(), compat_vars, sysconfig.get_config_vars(), local_vars]
|
438 |
-
)
|
439 |
-
|
440 |
-
self.expand_basedirs()
|
441 |
-
|
442 |
-
self.dump_dirs("post-expand_basedirs()")
|
443 |
-
|
444 |
-
# Now define config vars for the base directories so we can expand
|
445 |
-
# everything else.
|
446 |
-
local_vars['base'] = self.install_base
|
447 |
-
local_vars['platbase'] = self.install_platbase
|
448 |
-
|
449 |
-
if DEBUG:
|
450 |
-
from pprint import pprint
|
451 |
-
|
452 |
-
print("config vars:")
|
453 |
-
pprint(dict(self.config_vars))
|
454 |
-
|
455 |
-
# Expand "~" and configuration variables in the installation
|
456 |
-
# directories.
|
457 |
-
self.expand_dirs()
|
458 |
-
|
459 |
-
self.dump_dirs("post-expand_dirs()")
|
460 |
-
|
461 |
-
# Create directories in the home dir:
|
462 |
-
if self.user:
|
463 |
-
self.create_home_path()
|
464 |
-
|
465 |
-
# Pick the actual directory to install all modules to: either
|
466 |
-
# install_purelib or install_platlib, depending on whether this
|
467 |
-
# module distribution is pure or not. Of course, if the user
|
468 |
-
# already specified install_lib, use their selection.
|
469 |
-
if self.install_lib is None:
|
470 |
-
if self.distribution.has_ext_modules(): # has extensions: non-pure
|
471 |
-
self.install_lib = self.install_platlib
|
472 |
-
else:
|
473 |
-
self.install_lib = self.install_purelib
|
474 |
-
|
475 |
-
# Convert directories from Unix /-separated syntax to the local
|
476 |
-
# convention.
|
477 |
-
self.convert_paths(
|
478 |
-
'lib',
|
479 |
-
'purelib',
|
480 |
-
'platlib',
|
481 |
-
'scripts',
|
482 |
-
'data',
|
483 |
-
'headers',
|
484 |
-
'userbase',
|
485 |
-
'usersite',
|
486 |
-
)
|
487 |
-
|
488 |
-
# Deprecated
|
489 |
-
# Well, we're not actually fully completely finalized yet: we still
|
490 |
-
# have to deal with 'extra_path', which is the hack for allowing
|
491 |
-
# non-packagized module distributions (hello, Numerical Python!) to
|
492 |
-
# get their own directories.
|
493 |
-
self.handle_extra_path()
|
494 |
-
self.install_libbase = self.install_lib # needed for .pth file
|
495 |
-
self.install_lib = os.path.join(self.install_lib, self.extra_dirs)
|
496 |
-
|
497 |
-
# If a new root directory was supplied, make all the installation
|
498 |
-
# dirs relative to it.
|
499 |
-
if self.root is not None:
|
500 |
-
self.change_roots(
|
501 |
-
'libbase', 'lib', 'purelib', 'platlib', 'scripts', 'data', 'headers'
|
502 |
-
)
|
503 |
-
|
504 |
-
self.dump_dirs("after prepending root")
|
505 |
-
|
506 |
-
# Find out the build directories, ie. where to install from.
|
507 |
-
self.set_undefined_options(
|
508 |
-
'build', ('build_base', 'build_base'), ('build_lib', 'build_lib')
|
509 |
-
)
|
510 |
-
|
511 |
-
# Punt on doc directories for now -- after all, we're punting on
|
512 |
-
# documentation completely!
|
513 |
-
|
514 |
-
def dump_dirs(self, msg):
|
515 |
-
"""Dumps the list of user options."""
|
516 |
-
if not DEBUG:
|
517 |
-
return
|
518 |
-
from distutils.fancy_getopt import longopt_xlate
|
519 |
-
|
520 |
-
log.debug(msg + ":")
|
521 |
-
for opt in self.user_options:
|
522 |
-
opt_name = opt[0]
|
523 |
-
if opt_name[-1] == "=":
|
524 |
-
opt_name = opt_name[0:-1]
|
525 |
-
if opt_name in self.negative_opt:
|
526 |
-
opt_name = self.negative_opt[opt_name]
|
527 |
-
opt_name = opt_name.translate(longopt_xlate)
|
528 |
-
val = not getattr(self, opt_name)
|
529 |
-
else:
|
530 |
-
opt_name = opt_name.translate(longopt_xlate)
|
531 |
-
val = getattr(self, opt_name)
|
532 |
-
log.debug(" %s: %s", opt_name, val)
|
533 |
-
|
534 |
-
def finalize_unix(self):
|
535 |
-
"""Finalizes options for posix platforms."""
|
536 |
-
if self.install_base is not None or self.install_platbase is not None:
|
537 |
-
incomplete_scheme = (
|
538 |
-
(
|
539 |
-
self.install_lib is None
|
540 |
-
and self.install_purelib is None
|
541 |
-
and self.install_platlib is None
|
542 |
-
)
|
543 |
-
or self.install_headers is None
|
544 |
-
or self.install_scripts is None
|
545 |
-
or self.install_data is None
|
546 |
-
)
|
547 |
-
if incomplete_scheme:
|
548 |
-
raise DistutilsOptionError(
|
549 |
-
"install-base or install-platbase supplied, but "
|
550 |
-
"installation scheme is incomplete"
|
551 |
-
)
|
552 |
-
return
|
553 |
-
|
554 |
-
if self.user:
|
555 |
-
if self.install_userbase is None:
|
556 |
-
raise DistutilsPlatformError("User base directory is not specified")
|
557 |
-
self.install_base = self.install_platbase = self.install_userbase
|
558 |
-
self.select_scheme("posix_user")
|
559 |
-
elif self.home is not None:
|
560 |
-
self.install_base = self.install_platbase = self.home
|
561 |
-
self.select_scheme("posix_home")
|
562 |
-
else:
|
563 |
-
if self.prefix is None:
|
564 |
-
if self.exec_prefix is not None:
|
565 |
-
raise DistutilsOptionError(
|
566 |
-
"must not supply exec-prefix without prefix"
|
567 |
-
)
|
568 |
-
|
569 |
-
# Allow Fedora to add components to the prefix
|
570 |
-
_prefix_addition = getattr(sysconfig, '_prefix_addition', "")
|
571 |
-
|
572 |
-
self.prefix = os.path.normpath(sys.prefix) + _prefix_addition
|
573 |
-
self.exec_prefix = os.path.normpath(sys.exec_prefix) + _prefix_addition
|
574 |
-
|
575 |
-
else:
|
576 |
-
if self.exec_prefix is None:
|
577 |
-
self.exec_prefix = self.prefix
|
578 |
-
|
579 |
-
self.install_base = self.prefix
|
580 |
-
self.install_platbase = self.exec_prefix
|
581 |
-
self.select_scheme("posix_prefix")
|
582 |
-
|
583 |
-
def finalize_other(self):
|
584 |
-
"""Finalizes options for non-posix platforms"""
|
585 |
-
if self.user:
|
586 |
-
if self.install_userbase is None:
|
587 |
-
raise DistutilsPlatformError("User base directory is not specified")
|
588 |
-
self.install_base = self.install_platbase = self.install_userbase
|
589 |
-
self.select_scheme(os.name + "_user")
|
590 |
-
elif self.home is not None:
|
591 |
-
self.install_base = self.install_platbase = self.home
|
592 |
-
self.select_scheme("posix_home")
|
593 |
-
else:
|
594 |
-
if self.prefix is None:
|
595 |
-
self.prefix = os.path.normpath(sys.prefix)
|
596 |
-
|
597 |
-
self.install_base = self.install_platbase = self.prefix
|
598 |
-
try:
|
599 |
-
self.select_scheme(os.name)
|
600 |
-
except KeyError:
|
601 |
-
raise DistutilsPlatformError(
|
602 |
-
"I don't know how to install stuff on '%s'" % os.name
|
603 |
-
)
|
604 |
-
|
605 |
-
def select_scheme(self, name):
|
606 |
-
_select_scheme(self, name)
|
607 |
-
|
608 |
-
def _expand_attrs(self, attrs):
|
609 |
-
for attr in attrs:
|
610 |
-
val = getattr(self, attr)
|
611 |
-
if val is not None:
|
612 |
-
if os.name == 'posix' or os.name == 'nt':
|
613 |
-
val = os.path.expanduser(val)
|
614 |
-
val = subst_vars(val, self.config_vars)
|
615 |
-
setattr(self, attr, val)
|
616 |
-
|
617 |
-
def expand_basedirs(self):
|
618 |
-
"""Calls `os.path.expanduser` on install_base, install_platbase and
|
619 |
-
root."""
|
620 |
-
self._expand_attrs(['install_base', 'install_platbase', 'root'])
|
621 |
-
|
622 |
-
def expand_dirs(self):
|
623 |
-
"""Calls `os.path.expanduser` on install dirs."""
|
624 |
-
self._expand_attrs(
|
625 |
-
[
|
626 |
-
'install_purelib',
|
627 |
-
'install_platlib',
|
628 |
-
'install_lib',
|
629 |
-
'install_headers',
|
630 |
-
'install_scripts',
|
631 |
-
'install_data',
|
632 |
-
]
|
633 |
-
)
|
634 |
-
|
635 |
-
def convert_paths(self, *names):
|
636 |
-
"""Call `convert_path` over `names`."""
|
637 |
-
for name in names:
|
638 |
-
attr = "install_" + name
|
639 |
-
setattr(self, attr, convert_path(getattr(self, attr)))
|
640 |
-
|
641 |
-
def handle_extra_path(self):
|
642 |
-
"""Set `path_file` and `extra_dirs` using `extra_path`."""
|
643 |
-
if self.extra_path is None:
|
644 |
-
self.extra_path = self.distribution.extra_path
|
645 |
-
|
646 |
-
if self.extra_path is not None:
|
647 |
-
log.warn(
|
648 |
-
"Distribution option extra_path is deprecated. "
|
649 |
-
"See issue27919 for details."
|
650 |
-
)
|
651 |
-
if isinstance(self.extra_path, str):
|
652 |
-
self.extra_path = self.extra_path.split(',')
|
653 |
-
|
654 |
-
if len(self.extra_path) == 1:
|
655 |
-
path_file = extra_dirs = self.extra_path[0]
|
656 |
-
elif len(self.extra_path) == 2:
|
657 |
-
path_file, extra_dirs = self.extra_path
|
658 |
-
else:
|
659 |
-
raise DistutilsOptionError(
|
660 |
-
"'extra_path' option must be a list, tuple, or "
|
661 |
-
"comma-separated string with 1 or 2 elements"
|
662 |
-
)
|
663 |
-
|
664 |
-
# convert to local form in case Unix notation used (as it
|
665 |
-
# should be in setup scripts)
|
666 |
-
extra_dirs = convert_path(extra_dirs)
|
667 |
-
else:
|
668 |
-
path_file = None
|
669 |
-
extra_dirs = ''
|
670 |
-
|
671 |
-
# XXX should we warn if path_file and not extra_dirs? (in which
|
672 |
-
# case the path file would be harmless but pointless)
|
673 |
-
self.path_file = path_file
|
674 |
-
self.extra_dirs = extra_dirs
|
675 |
-
|
676 |
-
def change_roots(self, *names):
|
677 |
-
"""Change the install directories pointed by name using root."""
|
678 |
-
for name in names:
|
679 |
-
attr = "install_" + name
|
680 |
-
setattr(self, attr, change_root(self.root, getattr(self, attr)))
|
681 |
-
|
682 |
-
def create_home_path(self):
|
683 |
-
"""Create directories under ~."""
|
684 |
-
if not self.user:
|
685 |
-
return
|
686 |
-
home = convert_path(os.path.expanduser("~"))
|
687 |
-
for name, path in self.config_vars.items():
|
688 |
-
if str(path).startswith(home) and not os.path.isdir(path):
|
689 |
-
self.debug_print("os.makedirs('%s', 0o700)" % path)
|
690 |
-
os.makedirs(path, 0o700)
|
691 |
-
|
692 |
-
# -- Command execution methods -------------------------------------
|
693 |
-
|
694 |
-
def run(self):
|
695 |
-
"""Runs the command."""
|
696 |
-
# Obviously have to build before we can install
|
697 |
-
if not self.skip_build:
|
698 |
-
self.run_command('build')
|
699 |
-
# If we built for any other platform, we can't install.
|
700 |
-
build_plat = self.distribution.get_command_obj('build').plat_name
|
701 |
-
# check warn_dir - it is a clue that the 'install' is happening
|
702 |
-
# internally, and not to sys.path, so we don't check the platform
|
703 |
-
# matches what we are running.
|
704 |
-
if self.warn_dir and build_plat != get_platform():
|
705 |
-
raise DistutilsPlatformError("Can't install when " "cross-compiling")
|
706 |
-
|
707 |
-
# Run all sub-commands (at least those that need to be run)
|
708 |
-
for cmd_name in self.get_sub_commands():
|
709 |
-
self.run_command(cmd_name)
|
710 |
-
|
711 |
-
if self.path_file:
|
712 |
-
self.create_path_file()
|
713 |
-
|
714 |
-
# write list of installed files, if requested.
|
715 |
-
if self.record:
|
716 |
-
outputs = self.get_outputs()
|
717 |
-
if self.root: # strip any package prefix
|
718 |
-
root_len = len(self.root)
|
719 |
-
for counter in range(len(outputs)):
|
720 |
-
outputs[counter] = outputs[counter][root_len:]
|
721 |
-
self.execute(
|
722 |
-
write_file,
|
723 |
-
(self.record, outputs),
|
724 |
-
"writing list of installed files to '%s'" % self.record,
|
725 |
-
)
|
726 |
-
|
727 |
-
sys_path = map(os.path.normpath, sys.path)
|
728 |
-
sys_path = map(os.path.normcase, sys_path)
|
729 |
-
install_lib = os.path.normcase(os.path.normpath(self.install_lib))
|
730 |
-
if (
|
731 |
-
self.warn_dir
|
732 |
-
and not (self.path_file and self.install_path_file)
|
733 |
-
and install_lib not in sys_path
|
734 |
-
):
|
735 |
-
log.debug(
|
736 |
-
(
|
737 |
-
"modules installed to '%s', which is not in "
|
738 |
-
"Python's module search path (sys.path) -- "
|
739 |
-
"you'll have to change the search path yourself"
|
740 |
-
),
|
741 |
-
self.install_lib,
|
742 |
-
)
|
743 |
-
|
744 |
-
def create_path_file(self):
|
745 |
-
"""Creates the .pth file"""
|
746 |
-
filename = os.path.join(self.install_libbase, self.path_file + ".pth")
|
747 |
-
if self.install_path_file:
|
748 |
-
self.execute(
|
749 |
-
write_file, (filename, [self.extra_dirs]), "creating %s" % filename
|
750 |
-
)
|
751 |
-
else:
|
752 |
-
self.warn("path file '%s' not created" % filename)
|
753 |
-
|
754 |
-
# -- Reporting methods ---------------------------------------------
|
755 |
-
|
756 |
-
def get_outputs(self):
|
757 |
-
"""Assembles the outputs of all the sub-commands."""
|
758 |
-
outputs = []
|
759 |
-
for cmd_name in self.get_sub_commands():
|
760 |
-
cmd = self.get_finalized_command(cmd_name)
|
761 |
-
# Add the contents of cmd.get_outputs(), ensuring
|
762 |
-
# that outputs doesn't contain duplicate entries
|
763 |
-
for filename in cmd.get_outputs():
|
764 |
-
if filename not in outputs:
|
765 |
-
outputs.append(filename)
|
766 |
-
|
767 |
-
if self.path_file and self.install_path_file:
|
768 |
-
outputs.append(os.path.join(self.install_libbase, self.path_file + ".pth"))
|
769 |
-
|
770 |
-
return outputs
|
771 |
-
|
772 |
-
def get_inputs(self):
|
773 |
-
"""Returns the inputs of all the sub-commands"""
|
774 |
-
# XXX gee, this looks familiar ;-(
|
775 |
-
inputs = []
|
776 |
-
for cmd_name in self.get_sub_commands():
|
777 |
-
cmd = self.get_finalized_command(cmd_name)
|
778 |
-
inputs.extend(cmd.get_inputs())
|
779 |
-
|
780 |
-
return inputs
|
781 |
-
|
782 |
-
# -- Predicates for sub-command list -------------------------------
|
783 |
-
|
784 |
-
def has_lib(self):
|
785 |
-
"""Returns true if the current distribution has any Python
|
786 |
-
modules to install."""
|
787 |
-
return (
|
788 |
-
self.distribution.has_pure_modules() or self.distribution.has_ext_modules()
|
789 |
-
)
|
790 |
-
|
791 |
-
def has_headers(self):
|
792 |
-
"""Returns true if the current distribution has any headers to
|
793 |
-
install."""
|
794 |
-
return self.distribution.has_headers()
|
795 |
-
|
796 |
-
def has_scripts(self):
|
797 |
-
"""Returns true if the current distribution has any scripts to.
|
798 |
-
install."""
|
799 |
-
return self.distribution.has_scripts()
|
800 |
-
|
801 |
-
def has_data(self):
|
802 |
-
"""Returns true if the current distribution has any data to.
|
803 |
-
install."""
|
804 |
-
return self.distribution.has_data_files()
|
805 |
-
|
806 |
-
# 'sub_commands': a list of commands this command might have to run to
|
807 |
-
# get its work done. See cmd.py for more info.
|
808 |
-
sub_commands = [
|
809 |
-
('install_lib', has_lib),
|
810 |
-
('install_headers', has_headers),
|
811 |
-
('install_scripts', has_scripts),
|
812 |
-
('install_data', has_data),
|
813 |
-
('install_egg_info', lambda self: True),
|
814 |
-
]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Big-Web/MMSD/env/Lib/site-packages/setuptools/_distutils/fancy_getopt.py
DELETED
@@ -1,470 +0,0 @@
|
|
1 |
-
"""distutils.fancy_getopt
|
2 |
-
|
3 |
-
Wrapper around the standard getopt module that provides the following
|
4 |
-
additional features:
|
5 |
-
* short and long options are tied together
|
6 |
-
* options have help strings, so fancy_getopt could potentially
|
7 |
-
create a complete usage summary
|
8 |
-
* options set attributes of a passed-in object
|
9 |
-
"""
|
10 |
-
|
11 |
-
import sys
|
12 |
-
import string
|
13 |
-
import re
|
14 |
-
import getopt
|
15 |
-
from distutils.errors import DistutilsGetoptError, DistutilsArgError
|
16 |
-
|
17 |
-
# Much like command_re in distutils.core, this is close to but not quite
|
18 |
-
# the same as a Python NAME -- except, in the spirit of most GNU
|
19 |
-
# utilities, we use '-' in place of '_'. (The spirit of LISP lives on!)
|
20 |
-
# The similarities to NAME are again not a coincidence...
|
21 |
-
longopt_pat = r'[a-zA-Z](?:[a-zA-Z0-9-]*)'
|
22 |
-
longopt_re = re.compile(r'^%s$' % longopt_pat)
|
23 |
-
|
24 |
-
# For recognizing "negative alias" options, eg. "quiet=!verbose"
|
25 |
-
neg_alias_re = re.compile("^({})=!({})$".format(longopt_pat, longopt_pat))
|
26 |
-
|
27 |
-
# This is used to translate long options to legitimate Python identifiers
|
28 |
-
# (for use as attributes of some object).
|
29 |
-
longopt_xlate = str.maketrans('-', '_')
|
30 |
-
|
31 |
-
|
32 |
-
class FancyGetopt:
|
33 |
-
"""Wrapper around the standard 'getopt()' module that provides some
|
34 |
-
handy extra functionality:
|
35 |
-
* short and long options are tied together
|
36 |
-
* options have help strings, and help text can be assembled
|
37 |
-
from them
|
38 |
-
* options set attributes of a passed-in object
|
39 |
-
* boolean options can have "negative aliases" -- eg. if
|
40 |
-
--quiet is the "negative alias" of --verbose, then "--quiet"
|
41 |
-
on the command line sets 'verbose' to false
|
42 |
-
"""
|
43 |
-
|
44 |
-
def __init__(self, option_table=None):
|
45 |
-
# The option table is (currently) a list of tuples. The
|
46 |
-
# tuples may have 3 or four values:
|
47 |
-
# (long_option, short_option, help_string [, repeatable])
|
48 |
-
# if an option takes an argument, its long_option should have '='
|
49 |
-
# appended; short_option should just be a single character, no ':'
|
50 |
-
# in any case. If a long_option doesn't have a corresponding
|
51 |
-
# short_option, short_option should be None. All option tuples
|
52 |
-
# must have long options.
|
53 |
-
self.option_table = option_table
|
54 |
-
|
55 |
-
# 'option_index' maps long option names to entries in the option
|
56 |
-
# table (ie. those 3-tuples).
|
57 |
-
self.option_index = {}
|
58 |
-
if self.option_table:
|
59 |
-
self._build_index()
|
60 |
-
|
61 |
-
# 'alias' records (duh) alias options; {'foo': 'bar'} means
|
62 |
-
# --foo is an alias for --bar
|
63 |
-
self.alias = {}
|
64 |
-
|
65 |
-
# 'negative_alias' keeps track of options that are the boolean
|
66 |
-
# opposite of some other option
|
67 |
-
self.negative_alias = {}
|
68 |
-
|
69 |
-
# These keep track of the information in the option table. We
|
70 |
-
# don't actually populate these structures until we're ready to
|
71 |
-
# parse the command-line, since the 'option_table' passed in here
|
72 |
-
# isn't necessarily the final word.
|
73 |
-
self.short_opts = []
|
74 |
-
self.long_opts = []
|
75 |
-
self.short2long = {}
|
76 |
-
self.attr_name = {}
|
77 |
-
self.takes_arg = {}
|
78 |
-
|
79 |
-
# And 'option_order' is filled up in 'getopt()'; it records the
|
80 |
-
# original order of options (and their values) on the command-line,
|
81 |
-
# but expands short options, converts aliases, etc.
|
82 |
-
self.option_order = []
|
83 |
-
|
84 |
-
def _build_index(self):
|
85 |
-
self.option_index.clear()
|
86 |
-
for option in self.option_table:
|
87 |
-
self.option_index[option[0]] = option
|
88 |
-
|
89 |
-
def set_option_table(self, option_table):
|
90 |
-
self.option_table = option_table
|
91 |
-
self._build_index()
|
92 |
-
|
93 |
-
def add_option(self, long_option, short_option=None, help_string=None):
|
94 |
-
if long_option in self.option_index:
|
95 |
-
raise DistutilsGetoptError(
|
96 |
-
"option conflict: already an option '%s'" % long_option
|
97 |
-
)
|
98 |
-
else:
|
99 |
-
option = (long_option, short_option, help_string)
|
100 |
-
self.option_table.append(option)
|
101 |
-
self.option_index[long_option] = option
|
102 |
-
|
103 |
-
def has_option(self, long_option):
|
104 |
-
"""Return true if the option table for this parser has an
|
105 |
-
option with long name 'long_option'."""
|
106 |
-
return long_option in self.option_index
|
107 |
-
|
108 |
-
def get_attr_name(self, long_option):
|
109 |
-
"""Translate long option name 'long_option' to the form it
|
110 |
-
has as an attribute of some object: ie., translate hyphens
|
111 |
-
to underscores."""
|
112 |
-
return long_option.translate(longopt_xlate)
|
113 |
-
|
114 |
-
def _check_alias_dict(self, aliases, what):
|
115 |
-
assert isinstance(aliases, dict)
|
116 |
-
for (alias, opt) in aliases.items():
|
117 |
-
if alias not in self.option_index:
|
118 |
-
raise DistutilsGetoptError(
|
119 |
-
("invalid %s '%s': " "option '%s' not defined")
|
120 |
-
% (what, alias, alias)
|
121 |
-
)
|
122 |
-
if opt not in self.option_index:
|
123 |
-
raise DistutilsGetoptError(
|
124 |
-
("invalid %s '%s': " "aliased option '%s' not defined")
|
125 |
-
% (what, alias, opt)
|
126 |
-
)
|
127 |
-
|
128 |
-
def set_aliases(self, alias):
|
129 |
-
"""Set the aliases for this option parser."""
|
130 |
-
self._check_alias_dict(alias, "alias")
|
131 |
-
self.alias = alias
|
132 |
-
|
133 |
-
def set_negative_aliases(self, negative_alias):
|
134 |
-
"""Set the negative aliases for this option parser.
|
135 |
-
'negative_alias' should be a dictionary mapping option names to
|
136 |
-
option names, both the key and value must already be defined
|
137 |
-
in the option table."""
|
138 |
-
self._check_alias_dict(negative_alias, "negative alias")
|
139 |
-
self.negative_alias = negative_alias
|
140 |
-
|
141 |
-
def _grok_option_table(self): # noqa: C901
|
142 |
-
"""Populate the various data structures that keep tabs on the
|
143 |
-
option table. Called by 'getopt()' before it can do anything
|
144 |
-
worthwhile.
|
145 |
-
"""
|
146 |
-
self.long_opts = []
|
147 |
-
self.short_opts = []
|
148 |
-
self.short2long.clear()
|
149 |
-
self.repeat = {}
|
150 |
-
|
151 |
-
for option in self.option_table:
|
152 |
-
if len(option) == 3:
|
153 |
-
long, short, help = option
|
154 |
-
repeat = 0
|
155 |
-
elif len(option) == 4:
|
156 |
-
long, short, help, repeat = option
|
157 |
-
else:
|
158 |
-
# the option table is part of the code, so simply
|
159 |
-
# assert that it is correct
|
160 |
-
raise ValueError("invalid option tuple: {!r}".format(option))
|
161 |
-
|
162 |
-
# Type- and value-check the option names
|
163 |
-
if not isinstance(long, str) or len(long) < 2:
|
164 |
-
raise DistutilsGetoptError(
|
165 |
-
("invalid long option '%s': " "must be a string of length >= 2")
|
166 |
-
% long
|
167 |
-
)
|
168 |
-
|
169 |
-
if not ((short is None) or (isinstance(short, str) and len(short) == 1)):
|
170 |
-
raise DistutilsGetoptError(
|
171 |
-
"invalid short option '%s': "
|
172 |
-
"must a single character or None" % short
|
173 |
-
)
|
174 |
-
|
175 |
-
self.repeat[long] = repeat
|
176 |
-
self.long_opts.append(long)
|
177 |
-
|
178 |
-
if long[-1] == '=': # option takes an argument?
|
179 |
-
if short:
|
180 |
-
short = short + ':'
|
181 |
-
long = long[0:-1]
|
182 |
-
self.takes_arg[long] = 1
|
183 |
-
else:
|
184 |
-
# Is option is a "negative alias" for some other option (eg.
|
185 |
-
# "quiet" == "!verbose")?
|
186 |
-
alias_to = self.negative_alias.get(long)
|
187 |
-
if alias_to is not None:
|
188 |
-
if self.takes_arg[alias_to]:
|
189 |
-
raise DistutilsGetoptError(
|
190 |
-
"invalid negative alias '%s': "
|
191 |
-
"aliased option '%s' takes a value" % (long, alias_to)
|
192 |
-
)
|
193 |
-
|
194 |
-
self.long_opts[-1] = long # XXX redundant?!
|
195 |
-
self.takes_arg[long] = 0
|
196 |
-
|
197 |
-
# If this is an alias option, make sure its "takes arg" flag is
|
198 |
-
# the same as the option it's aliased to.
|
199 |
-
alias_to = self.alias.get(long)
|
200 |
-
if alias_to is not None:
|
201 |
-
if self.takes_arg[long] != self.takes_arg[alias_to]:
|
202 |
-
raise DistutilsGetoptError(
|
203 |
-
"invalid alias '%s': inconsistent with "
|
204 |
-
"aliased option '%s' (one of them takes a value, "
|
205 |
-
"the other doesn't" % (long, alias_to)
|
206 |
-
)
|
207 |
-
|
208 |
-
# Now enforce some bondage on the long option name, so we can
|
209 |
-
# later translate it to an attribute name on some object. Have
|
210 |
-
# to do this a bit late to make sure we've removed any trailing
|
211 |
-
# '='.
|
212 |
-
if not longopt_re.match(long):
|
213 |
-
raise DistutilsGetoptError(
|
214 |
-
"invalid long option name '%s' "
|
215 |
-
"(must be letters, numbers, hyphens only" % long
|
216 |
-
)
|
217 |
-
|
218 |
-
self.attr_name[long] = self.get_attr_name(long)
|
219 |
-
if short:
|
220 |
-
self.short_opts.append(short)
|
221 |
-
self.short2long[short[0]] = long
|
222 |
-
|
223 |
-
def getopt(self, args=None, object=None): # noqa: C901
|
224 |
-
"""Parse command-line options in args. Store as attributes on object.
|
225 |
-
|
226 |
-
If 'args' is None or not supplied, uses 'sys.argv[1:]'. If
|
227 |
-
'object' is None or not supplied, creates a new OptionDummy
|
228 |
-
object, stores option values there, and returns a tuple (args,
|
229 |
-
object). If 'object' is supplied, it is modified in place and
|
230 |
-
'getopt()' just returns 'args'; in both cases, the returned
|
231 |
-
'args' is a modified copy of the passed-in 'args' list, which
|
232 |
-
is left untouched.
|
233 |
-
"""
|
234 |
-
if args is None:
|
235 |
-
args = sys.argv[1:]
|
236 |
-
if object is None:
|
237 |
-
object = OptionDummy()
|
238 |
-
created_object = True
|
239 |
-
else:
|
240 |
-
created_object = False
|
241 |
-
|
242 |
-
self._grok_option_table()
|
243 |
-
|
244 |
-
short_opts = ' '.join(self.short_opts)
|
245 |
-
try:
|
246 |
-
opts, args = getopt.getopt(args, short_opts, self.long_opts)
|
247 |
-
except getopt.error as msg:
|
248 |
-
raise DistutilsArgError(msg)
|
249 |
-
|
250 |
-
for opt, val in opts:
|
251 |
-
if len(opt) == 2 and opt[0] == '-': # it's a short option
|
252 |
-
opt = self.short2long[opt[1]]
|
253 |
-
else:
|
254 |
-
assert len(opt) > 2 and opt[:2] == '--'
|
255 |
-
opt = opt[2:]
|
256 |
-
|
257 |
-
alias = self.alias.get(opt)
|
258 |
-
if alias:
|
259 |
-
opt = alias
|
260 |
-
|
261 |
-
if not self.takes_arg[opt]: # boolean option?
|
262 |
-
assert val == '', "boolean option can't have value"
|
263 |
-
alias = self.negative_alias.get(opt)
|
264 |
-
if alias:
|
265 |
-
opt = alias
|
266 |
-
val = 0
|
267 |
-
else:
|
268 |
-
val = 1
|
269 |
-
|
270 |
-
attr = self.attr_name[opt]
|
271 |
-
# The only repeating option at the moment is 'verbose'.
|
272 |
-
# It has a negative option -q quiet, which should set verbose = 0.
|
273 |
-
if val and self.repeat.get(attr) is not None:
|
274 |
-
val = getattr(object, attr, 0) + 1
|
275 |
-
setattr(object, attr, val)
|
276 |
-
self.option_order.append((opt, val))
|
277 |
-
|
278 |
-
# for opts
|
279 |
-
if created_object:
|
280 |
-
return args, object
|
281 |
-
else:
|
282 |
-
return args
|
283 |
-
|
284 |
-
def get_option_order(self):
|
285 |
-
"""Returns the list of (option, value) tuples processed by the
|
286 |
-
previous run of 'getopt()'. Raises RuntimeError if
|
287 |
-
'getopt()' hasn't been called yet.
|
288 |
-
"""
|
289 |
-
if self.option_order is None:
|
290 |
-
raise RuntimeError("'getopt()' hasn't been called yet")
|
291 |
-
else:
|
292 |
-
return self.option_order
|
293 |
-
|
294 |
-
def generate_help(self, header=None): # noqa: C901
|
295 |
-
"""Generate help text (a list of strings, one per suggested line of
|
296 |
-
output) from the option table for this FancyGetopt object.
|
297 |
-
"""
|
298 |
-
# Blithely assume the option table is good: probably wouldn't call
|
299 |
-
# 'generate_help()' unless you've already called 'getopt()'.
|
300 |
-
|
301 |
-
# First pass: determine maximum length of long option names
|
302 |
-
max_opt = 0
|
303 |
-
for option in self.option_table:
|
304 |
-
long = option[0]
|
305 |
-
short = option[1]
|
306 |
-
ell = len(long)
|
307 |
-
if long[-1] == '=':
|
308 |
-
ell = ell - 1
|
309 |
-
if short is not None:
|
310 |
-
ell = ell + 5 # " (-x)" where short == 'x'
|
311 |
-
if ell > max_opt:
|
312 |
-
max_opt = ell
|
313 |
-
|
314 |
-
opt_width = max_opt + 2 + 2 + 2 # room for indent + dashes + gutter
|
315 |
-
|
316 |
-
# Typical help block looks like this:
|
317 |
-
# --foo controls foonabulation
|
318 |
-
# Help block for longest option looks like this:
|
319 |
-
# --flimflam set the flim-flam level
|
320 |
-
# and with wrapped text:
|
321 |
-
# --flimflam set the flim-flam level (must be between
|
322 |
-
# 0 and 100, except on Tuesdays)
|
323 |
-
# Options with short names will have the short name shown (but
|
324 |
-
# it doesn't contribute to max_opt):
|
325 |
-
# --foo (-f) controls foonabulation
|
326 |
-
# If adding the short option would make the left column too wide,
|
327 |
-
# we push the explanation off to the next line
|
328 |
-
# --flimflam (-l)
|
329 |
-
# set the flim-flam level
|
330 |
-
# Important parameters:
|
331 |
-
# - 2 spaces before option block start lines
|
332 |
-
# - 2 dashes for each long option name
|
333 |
-
# - min. 2 spaces between option and explanation (gutter)
|
334 |
-
# - 5 characters (incl. space) for short option name
|
335 |
-
|
336 |
-
# Now generate lines of help text. (If 80 columns were good enough
|
337 |
-
# for Jesus, then 78 columns are good enough for me!)
|
338 |
-
line_width = 78
|
339 |
-
text_width = line_width - opt_width
|
340 |
-
big_indent = ' ' * opt_width
|
341 |
-
if header:
|
342 |
-
lines = [header]
|
343 |
-
else:
|
344 |
-
lines = ['Option summary:']
|
345 |
-
|
346 |
-
for option in self.option_table:
|
347 |
-
long, short, help = option[:3]
|
348 |
-
text = wrap_text(help, text_width)
|
349 |
-
if long[-1] == '=':
|
350 |
-
long = long[0:-1]
|
351 |
-
|
352 |
-
# Case 1: no short option at all (makes life easy)
|
353 |
-
if short is None:
|
354 |
-
if text:
|
355 |
-
lines.append(" --%-*s %s" % (max_opt, long, text[0]))
|
356 |
-
else:
|
357 |
-
lines.append(" --%-*s " % (max_opt, long))
|
358 |
-
|
359 |
-
# Case 2: we have a short option, so we have to include it
|
360 |
-
# just after the long option
|
361 |
-
else:
|
362 |
-
opt_names = "{} (-{})".format(long, short)
|
363 |
-
if text:
|
364 |
-
lines.append(" --%-*s %s" % (max_opt, opt_names, text[0]))
|
365 |
-
else:
|
366 |
-
lines.append(" --%-*s" % opt_names)
|
367 |
-
|
368 |
-
for ell in text[1:]:
|
369 |
-
lines.append(big_indent + ell)
|
370 |
-
return lines
|
371 |
-
|
372 |
-
def print_help(self, header=None, file=None):
|
373 |
-
if file is None:
|
374 |
-
file = sys.stdout
|
375 |
-
for line in self.generate_help(header):
|
376 |
-
file.write(line + "\n")
|
377 |
-
|
378 |
-
|
379 |
-
def fancy_getopt(options, negative_opt, object, args):
|
380 |
-
parser = FancyGetopt(options)
|
381 |
-
parser.set_negative_aliases(negative_opt)
|
382 |
-
return parser.getopt(args, object)
|
383 |
-
|
384 |
-
|
385 |
-
WS_TRANS = {ord(_wschar): ' ' for _wschar in string.whitespace}
|
386 |
-
|
387 |
-
|
388 |
-
def wrap_text(text, width):
|
389 |
-
"""wrap_text(text : string, width : int) -> [string]
|
390 |
-
|
391 |
-
Split 'text' into multiple lines of no more than 'width' characters
|
392 |
-
each, and return the list of strings that results.
|
393 |
-
"""
|
394 |
-
if text is None:
|
395 |
-
return []
|
396 |
-
if len(text) <= width:
|
397 |
-
return [text]
|
398 |
-
|
399 |
-
text = text.expandtabs()
|
400 |
-
text = text.translate(WS_TRANS)
|
401 |
-
chunks = re.split(r'( +|-+)', text)
|
402 |
-
chunks = [ch for ch in chunks if ch] # ' - ' results in empty strings
|
403 |
-
lines = []
|
404 |
-
|
405 |
-
while chunks:
|
406 |
-
cur_line = [] # list of chunks (to-be-joined)
|
407 |
-
cur_len = 0 # length of current line
|
408 |
-
|
409 |
-
while chunks:
|
410 |
-
ell = len(chunks[0])
|
411 |
-
if cur_len + ell <= width: # can squeeze (at least) this chunk in
|
412 |
-
cur_line.append(chunks[0])
|
413 |
-
del chunks[0]
|
414 |
-
cur_len = cur_len + ell
|
415 |
-
else: # this line is full
|
416 |
-
# drop last chunk if all space
|
417 |
-
if cur_line and cur_line[-1][0] == ' ':
|
418 |
-
del cur_line[-1]
|
419 |
-
break
|
420 |
-
|
421 |
-
if chunks: # any chunks left to process?
|
422 |
-
# if the current line is still empty, then we had a single
|
423 |
-
# chunk that's too big too fit on a line -- so we break
|
424 |
-
# down and break it up at the line width
|
425 |
-
if cur_len == 0:
|
426 |
-
cur_line.append(chunks[0][0:width])
|
427 |
-
chunks[0] = chunks[0][width:]
|
428 |
-
|
429 |
-
# all-whitespace chunks at the end of a line can be discarded
|
430 |
-
# (and we know from the re.split above that if a chunk has
|
431 |
-
# *any* whitespace, it is *all* whitespace)
|
432 |
-
if chunks[0][0] == ' ':
|
433 |
-
del chunks[0]
|
434 |
-
|
435 |
-
# and store this line in the list-of-all-lines -- as a single
|
436 |
-
# string, of course!
|
437 |
-
lines.append(''.join(cur_line))
|
438 |
-
|
439 |
-
return lines
|
440 |
-
|
441 |
-
|
442 |
-
def translate_longopt(opt):
|
443 |
-
"""Convert a long option name to a valid Python identifier by
|
444 |
-
changing "-" to "_".
|
445 |
-
"""
|
446 |
-
return opt.translate(longopt_xlate)
|
447 |
-
|
448 |
-
|
449 |
-
class OptionDummy:
|
450 |
-
"""Dummy class just used as a place to hold command-line option
|
451 |
-
values as instance attributes."""
|
452 |
-
|
453 |
-
def __init__(self, options=[]):
|
454 |
-
"""Create a new OptionDummy instance. The attributes listed in
|
455 |
-
'options' will be initialized to None."""
|
456 |
-
for opt in options:
|
457 |
-
setattr(self, opt, None)
|
458 |
-
|
459 |
-
|
460 |
-
if __name__ == "__main__":
|
461 |
-
text = """\
|
462 |
-
Tra-la-la, supercalifragilisticexpialidocious.
|
463 |
-
How *do* you spell that odd word, anyways?
|
464 |
-
(Someone ask Mary -- she'll know [or she'll
|
465 |
-
say, "How should I know?"].)"""
|
466 |
-
|
467 |
-
for w in (10, 20, 30, 40):
|
468 |
-
print("width: %d" % w)
|
469 |
-
print("\n".join(wrap_text(text, w)))
|
470 |
-
print()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/BigChia/bird_classifier/app.py
DELETED
@@ -1,26 +0,0 @@
|
|
1 |
-
from fastbook import *
|
2 |
-
from fastai.vision.widgets import *
|
3 |
-
import gradio as gr
|
4 |
-
|
5 |
-
learn = load_learner('model/uk_model.pkl')
|
6 |
-
labels = learn.dls.vocab
|
7 |
-
|
8 |
-
def predict(img):
|
9 |
-
img = PILImage.create(img)
|
10 |
-
pred, pred_idx, probs = learn.predict(img)
|
11 |
-
return {labels[i]: float(probs[i]) for i in range(len(labels))}
|
12 |
-
|
13 |
-
if __name__ == "__main__":
|
14 |
-
title = "Bird Breed Classifier"
|
15 |
-
description = """A bird breed classifier trained on a dataset of over 200 UK birds with fastai.
|
16 |
-
This makes it one of the most comprehensive UK bird classifiers available in the world."""
|
17 |
-
interpretation='default'
|
18 |
-
enable_queue=True
|
19 |
-
|
20 |
-
gr.Interface(fn=predict,
|
21 |
-
inputs=gr.inputs.Image(shape=(512, 512)),
|
22 |
-
outputs=gr.outputs.Label(num_top_classes=3),
|
23 |
-
title=title,
|
24 |
-
description=description,
|
25 |
-
interpretation=interpretation,
|
26 |
-
enable_queue=enable_queue).launch()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/BigSalmon/BackTranslation2/app.py
DELETED
@@ -1,117 +0,0 @@
|
|
1 |
-
from deep_translator import GoogleTranslator
|
2 |
-
import streamlit as st
|
3 |
-
|
4 |
-
st.set_page_config(page_title='Language Translator (Adaptation of https://github.com/Ompramod9921/Language_translator)')
|
5 |
-
|
6 |
-
hide_streamlit_style = """
|
7 |
-
<style>
|
8 |
-
#MainMenu {visibility: hidden;}
|
9 |
-
footer {visibility: hidden;}
|
10 |
-
footer:after {
|
11 |
-
content: 'Adaptation of https://github.com/Ompramod9921/Language_translator (om pram)'
|
12 |
-
visibility: visible;
|
13 |
-
}
|
14 |
-
</style>
|
15 |
-
"""
|
16 |
-
st.markdown(hide_streamlit_style, unsafe_allow_html=True)
|
17 |
-
|
18 |
-
st.markdown("<h1 style='text-align: center; font-size: 24px; color: voilet;font-family: Droid Sans'>Language Translator (Adaptation of https://github.com/Ompramod9921/Language_translator)</h1>", unsafe_allow_html=True)
|
19 |
-
st.write("****")
|
20 |
-
|
21 |
-
text = st.text_area("Enter text:",height=None,max_chars=None,key=None,help="Enter your text here -")
|
22 |
-
st.write("****")
|
23 |
-
|
24 |
-
option1 = st.selectbox('Input language',('english','hindi','afrikaans', 'albanian', 'amharic', 'arabic', 'armenian', 'azerbaijani', 'basque', 'belarusian', 'bengali', 'bosnian', 'bulgarian', 'catalan', 'cebuano', 'chichewa', 'chinese', 'chinese (simplified)', 'chinese (traditional)', 'corsican', 'croatian', 'czech', 'danish', 'dutch', 'esperanto', 'estonian', 'filipino', 'finnish', 'french', 'frisian', 'galician', 'georgian', 'german', 'greek', 'gujarati', 'haitian creole', 'hausa', 'hawaiian', 'hebrew', 'hmong', 'hungarian', 'icelandic', 'igbo', 'indonesian', 'irish', 'italian', 'japanese', 'javanese', 'kannada', 'kazakh', 'khmer', 'korean', 'kurdish (kurmanji)', 'kyrgyz', 'lao', 'latin', 'latvian', 'lithuanian', 'luxembourgish', 'macedonian', 'malagasy', 'malay', 'malayalam', 'maltese', 'maori', 'marathi', 'mongolian', 'myanmar (burmese)', 'nepali', 'norwegian', 'pashto', 'persian', 'polish', 'portuguese', 'punjabi', 'romanian', 'russian', 'samoan', 'scots gaelic', 'serbian', 'sesotho', 'shona', 'sindhi', 'sinhala', 'slovak', 'slovenian', 'somali', 'spanish', 'sundanese', 'swahili', 'swedish', 'tajik', 'tamil', 'telugu', 'thai', 'turkish', 'ukrainian', 'urdu', 'uzbek', 'vietnamese', 'welsh', 'xhosa', 'yiddish', 'yoruba', 'zulu', 'Filipino'))
|
25 |
-
option2 = st.selectbox('Output language',('english','hindi','afrikaans', 'albanian', 'amharic', 'arabic', 'armenian', 'azerbaijani', 'basque', 'belarusian', 'bengali', 'bosnian', 'bulgarian', 'catalan', 'cebuano', 'chichewa', 'chinese', 'chinese (simplified)', 'chinese (traditional)', 'corsican', 'croatian', 'czech', 'danish', 'dutch', 'esperanto', 'estonian', 'filipino', 'finnish', 'french', 'frisian', 'galician', 'georgian', 'german', 'greek', 'gujarati', 'haitian creole', 'hausa', 'hawaiian', 'hebrew', 'hmong', 'hungarian', 'icelandic', 'igbo', 'indonesian', 'irish', 'italian', 'japanese', 'javanese', 'kannada', 'kazakh', 'khmer', 'korean', 'kurdish (kurmanji)', 'kyrgyz', 'lao', 'latin', 'latvian', 'lithuanian', 'luxembourgish', 'macedonian', 'malagasy', 'malay', 'malayalam', 'maltese', 'maori', 'marathi', 'mongolian', 'myanmar (burmese)', 'nepali', 'norwegian', 'pashto', 'persian', 'polish', 'portuguese', 'punjabi', 'romanian', 'russian', 'samoan', 'scots gaelic', 'serbian', 'sesotho', 'shona', 'sindhi', 'sinhala', 'slovak', 'slovenian', 'somali', 'spanish', 'sundanese', 'swahili', 'swedish', 'tajik', 'tamil', 'telugu', 'thai', 'turkish', 'ukrainian', 'urdu', 'uzbek', 'vietnamese', 'welsh', 'xhosa', 'yiddish', 'yoruba', 'zulu', 'Filipino'))
|
26 |
-
st.write("****")
|
27 |
-
|
28 |
-
if st.button('Translate Sentence'):
|
29 |
-
st.write(" ")
|
30 |
-
st.write(" ")
|
31 |
-
if text == "":
|
32 |
-
st.warning('Please **enter text** for translation')
|
33 |
-
|
34 |
-
else:
|
35 |
-
if option1 == option2 :
|
36 |
-
st.error("source and target language can't be the same")
|
37 |
-
else :
|
38 |
-
translated = GoogleTranslator(source=option1,target=option2).translate(text=text)
|
39 |
-
st.write("Translated text -")
|
40 |
-
st.info(str(translated))
|
41 |
-
translated_text = str(translated)
|
42 |
-
back_translated = GoogleTranslator(source=option2,target=option1).translate(text=translated_text)
|
43 |
-
st.write("Back Translated text -")
|
44 |
-
st.info(str(back_translated))
|
45 |
-
|
46 |
-
if st.button('Back Translate: Multiple Languages'):
|
47 |
-
st.write(" ")
|
48 |
-
st.write(" ")
|
49 |
-
if text == "":
|
50 |
-
st.warning('Please **enter text** for translation')
|
51 |
-
else:
|
52 |
-
if option1 == option2 :
|
53 |
-
st.error("source and target language can't be the same")
|
54 |
-
else:
|
55 |
-
translated = GoogleTranslator(source=option1,target=option2).translate(text=text)
|
56 |
-
st.write("Translated text -")
|
57 |
-
st.info(str(translated))
|
58 |
-
translated_text = str(translated)
|
59 |
-
back_translated = GoogleTranslator(source=option2,target=option1).translate(text=translated_text)
|
60 |
-
st.write("Back Translated text -")
|
61 |
-
st.info(str(back_translated))
|
62 |
-
|
63 |
-
translated = GoogleTranslator(source=option1,target="albanian").translate(text=text)
|
64 |
-
st.write("Translated text -")
|
65 |
-
st.info(str(translated))
|
66 |
-
translated_text = str(translated)
|
67 |
-
back_translated = GoogleTranslator(source="albanian",target=option1).translate(text=translated_text)
|
68 |
-
st.write("Back Translated text -")
|
69 |
-
st.info(str(back_translated))
|
70 |
-
|
71 |
-
translated = GoogleTranslator(source=option1,target="greek").translate(text=text)
|
72 |
-
st.write("Translated text -")
|
73 |
-
st.info(str(translated))
|
74 |
-
translated_text = str(translated)
|
75 |
-
back_translated = GoogleTranslator(source="greek",target=option1).translate(text=translated_text)
|
76 |
-
st.write("Back Translated text -")
|
77 |
-
st.info(str(back_translated))
|
78 |
-
|
79 |
-
translated = GoogleTranslator(source=option1,target="italian").translate(text=text)
|
80 |
-
st.write("Translated text -")
|
81 |
-
st.info(str(translated))
|
82 |
-
translated_text = str(translated)
|
83 |
-
back_translated = GoogleTranslator(source="italian",target=option1).translate(text=translated_text)
|
84 |
-
st.write("Back Translated text -")
|
85 |
-
st.info(str(back_translated))
|
86 |
-
|
87 |
-
translated = GoogleTranslator(source=option1,target="polish").translate(text=text)
|
88 |
-
st.write("Translated text -")
|
89 |
-
st.info(str(translated))
|
90 |
-
translated_text = str(translated)
|
91 |
-
back_translated = GoogleTranslator(source="polish",target=option1).translate(text=translated_text)
|
92 |
-
st.write("Back Translated text -")
|
93 |
-
st.info(str(back_translated))
|
94 |
-
|
95 |
-
translated = GoogleTranslator(source=option1,target="spanish").translate(text=text)
|
96 |
-
st.write("Translated text -")
|
97 |
-
st.info(str(translated))
|
98 |
-
translated_text = str(translated)
|
99 |
-
back_translated = GoogleTranslator(source="spanish",target=option1).translate(text=translated_text)
|
100 |
-
st.write("Back Translated text -")
|
101 |
-
st.info(str(back_translated))
|
102 |
-
|
103 |
-
translated = GoogleTranslator(source=option1,target="galician").translate(text=text)
|
104 |
-
st.write("Translated text -")
|
105 |
-
st.info(str(translated))
|
106 |
-
translated_text = str(translated)
|
107 |
-
back_translated = GoogleTranslator(source="galician",target=option1).translate(text=translated_text)
|
108 |
-
st.write("Back Translated text -")
|
109 |
-
st.info(str(back_translated))
|
110 |
-
|
111 |
-
translated = GoogleTranslator(source=option1,target="dutch").translate(text=text)
|
112 |
-
st.write("Translated text -")
|
113 |
-
st.info(str(translated))
|
114 |
-
translated_text = str(translated)
|
115 |
-
back_translated = GoogleTranslator(source="dutch",target=option1).translate(text=translated_text)
|
116 |
-
st.write("Back Translated text -")
|
117 |
-
st.info(str(back_translated))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Brightmzb/test/README.md
DELETED
@@ -1,13 +0,0 @@
|
|
1 |
-
---
|
2 |
-
title: Test
|
3 |
-
emoji: 🏢
|
4 |
-
colorFrom: yellow
|
5 |
-
colorTo: purple
|
6 |
-
sdk: gradio
|
7 |
-
sdk_version: 3.40.1
|
8 |
-
app_file: app.py
|
9 |
-
pinned: false
|
10 |
-
license: openrail
|
11 |
-
---
|
12 |
-
|
13 |
-
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/CVPR/LIVE/thrust/dependencies/cub/test/Makefile
DELETED
@@ -1,468 +0,0 @@
|
|
1 |
-
#/******************************************************************************
|
2 |
-
# * Copyright (c) 2011, Duane Merrill. All rights reserved.
|
3 |
-
# * Copyright (c) 2011-2018, NVIDIA CORPORATION. All rights reserved.
|
4 |
-
# *
|
5 |
-
# * Redistribution and use in source and binary forms, with or without
|
6 |
-
# * modification, are permitted provided that the following conditions are met:
|
7 |
-
# * * Redistributions of source code must retain the above copyright
|
8 |
-
# * notice, this list of conditions and the following disclaimer.
|
9 |
-
# * * Redistributions in binary form must reproduce the above copyright
|
10 |
-
# * notice, this list of conditions and the following disclaimer in the
|
11 |
-
# * documentation and/or other materials provided with the distribution.
|
12 |
-
# * * Neither the name of the NVIDIA CORPORATION nor the
|
13 |
-
# * names of its contributors may be used to endorse or promote products
|
14 |
-
# * derived from this software without specific prior written permission.
|
15 |
-
# *
|
16 |
-
# * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
17 |
-
# * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
18 |
-
# * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
19 |
-
# * DISCLAIMED. IN NO EVENT SHALL NVIDIA CORPORATION BE LIABLE FOR ANY
|
20 |
-
# * DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
21 |
-
# * (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
22 |
-
# * LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
23 |
-
# * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
24 |
-
# * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
25 |
-
# * SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
26 |
-
# *
|
27 |
-
#******************************************************************************/
|
28 |
-
|
29 |
-
|
30 |
-
#-------------------------------------------------------------------------------
|
31 |
-
#
|
32 |
-
# Makefile usage
|
33 |
-
#
|
34 |
-
# make <target> [sm=<XXX,...>] [cdp=<0|1>] [force32=<0|1>] [abi=<0|1>] [open64=<0|1>] [verbose=<0|1>] [keep=<0|1>] [quicktest=<0|1>] [quickertest=<0|1>]
|
35 |
-
#
|
36 |
-
#-------------------------------------------------------------------------------
|
37 |
-
|
38 |
-
include ../common.mk
|
39 |
-
|
40 |
-
#-------------------------------------------------------------------------------
|
41 |
-
# Commandline Options
|
42 |
-
#-------------------------------------------------------------------------------
|
43 |
-
|
44 |
-
# Testing mode option (quick/thorough)
|
45 |
-
ifeq ($(quickertest), 1)
|
46 |
-
NVCCFLAGS += -DQUICKER_TEST
|
47 |
-
TEST_SUFFIX = quicker
|
48 |
-
else ifeq ($(quicktest), 1)
|
49 |
-
NVCCFLAGS += -DQUICK_TEST
|
50 |
-
TEST_SUFFIX = quick
|
51 |
-
else
|
52 |
-
TEST_SUFFIX = thorough
|
53 |
-
NPPI =
|
54 |
-
endif
|
55 |
-
|
56 |
-
|
57 |
-
# CUDA memcheck (enabled by default)
|
58 |
-
ifeq ($(memcheck), 0)
|
59 |
-
MEMCHECK =
|
60 |
-
else
|
61 |
-
MEMCHECK = cuda-memcheck
|
62 |
-
endif
|
63 |
-
|
64 |
-
|
65 |
-
#-------------------------------------------------------------------------------
|
66 |
-
# Compiler and compilation platform
|
67 |
-
#-------------------------------------------------------------------------------
|
68 |
-
|
69 |
-
# Includes
|
70 |
-
INC += -I$(CUB_DIR) -I$(CUB_DIR)test
|
71 |
-
|
72 |
-
# Suffix to append to each binary
|
73 |
-
SUFFIX = $(BIN_SUFFIX)_$(TEST_SUFFIX)
|
74 |
-
|
75 |
-
# Define test arch
|
76 |
-
DEFINES += -DTEST_ARCH=$(TEST_ARCH)
|
77 |
-
|
78 |
-
|
79 |
-
#-------------------------------------------------------------------------------
|
80 |
-
# Dependency Lists
|
81 |
-
#-------------------------------------------------------------------------------
|
82 |
-
|
83 |
-
rwildcard=$(foreach d,$(wildcard $1*),$(call rwildcard,$d/,$2) $(filter $(subst *,%,$2),$d))
|
84 |
-
|
85 |
-
DEPS = $(CUB_DEPS) \
|
86 |
-
$(CUB_DIR)test/Makefile \
|
87 |
-
$(CUB_DIR)test/test_util.h \
|
88 |
-
$(CUB_DIR)test/mersenne.h \
|
89 |
-
|
90 |
-
BLOCK_REDUCE = test_block_reduce_raking \
|
91 |
-
test_block_reduce_warp_reductions
|
92 |
-
|
93 |
-
|
94 |
-
BLOCK_SCAN = test_block_scan_raking \
|
95 |
-
test_block_scan_raking_memoize \
|
96 |
-
test_block_scan_warp_scans
|
97 |
-
|
98 |
-
|
99 |
-
BLOCK_RADIX_SORT = test_block_radix_sort_keys \
|
100 |
-
test_block_radix_sort_pairs
|
101 |
-
|
102 |
-
DEVICE_RADIX_SORT = test_device_radix_sort \
|
103 |
-
test_device_radix_sort_segmented
|
104 |
-
|
105 |
-
ALL = link \
|
106 |
-
test_iterator \
|
107 |
-
test_allocator \
|
108 |
-
test_warp_scan \
|
109 |
-
test_warp_reduce \
|
110 |
-
$(BLOCK_REDUCE) \
|
111 |
-
$(BLOCK_SCAN) \
|
112 |
-
$(BLOCK_RADIX_SORT) \
|
113 |
-
test_block_load_store \
|
114 |
-
test_block_histogram \
|
115 |
-
test_device_reduce \
|
116 |
-
test_device_histogram \
|
117 |
-
test_device_scan \
|
118 |
-
$(DEVICE_RADIX_SORT) \
|
119 |
-
test_device_reduce_by_key\
|
120 |
-
test_device_run_length_encode\
|
121 |
-
test_device_select_unique \
|
122 |
-
test_device_select_if
|
123 |
-
|
124 |
-
# test_grid_barrier \ fails on sm110
|
125 |
-
# test_device_seg_reduce
|
126 |
-
|
127 |
-
|
128 |
-
|
129 |
-
#-------------------------------------------------------------------------------
|
130 |
-
# make default
|
131 |
-
#-------------------------------------------------------------------------------
|
132 |
-
|
133 |
-
default:
|
134 |
-
|
135 |
-
|
136 |
-
#-------------------------------------------------------------------------------
|
137 |
-
# make clean
|
138 |
-
#-------------------------------------------------------------------------------
|
139 |
-
|
140 |
-
clean :
|
141 |
-
rm -f bin/*$(CPU_ARCH_SUFFIX)*
|
142 |
-
rm -f *.i* *.cubin *.cu.c *.cudafe* *.fatbin.c *.ptx *.hash *.cu.cpp *.o
|
143 |
-
|
144 |
-
|
145 |
-
#-------------------------------------------------------------------------------
|
146 |
-
# make all
|
147 |
-
#-------------------------------------------------------------------------------
|
148 |
-
|
149 |
-
all : $(ALL)
|
150 |
-
|
151 |
-
|
152 |
-
#-------------------------------------------------------------------------------
|
153 |
-
# make run
|
154 |
-
#-------------------------------------------------------------------------------
|
155 |
-
|
156 |
-
run :
|
157 |
-
for i in $(ALL); do $(MEMCHECK) ./bin/$${i}_$(SUFFIX) --device=$(device) || exit 1; done
|
158 |
-
|
159 |
-
run_block_reduce :
|
160 |
-
for i in $(BLOCK_REDUCE); do $(MEMCHECK) ./bin/$${i}_$(SUFFIX) --device=$(device) || exit 1; done
|
161 |
-
|
162 |
-
run_block_scan :
|
163 |
-
for i in $(BLOCK_SCAN); do $(MEMCHECK) ./bin/$${i}_$(SUFFIX) --device=$(device) || exit 1; done
|
164 |
-
|
165 |
-
run_block_radix_sort :
|
166 |
-
for i in $(BLOCK_RADIX_SORT); do $(MEMCHECK) ./bin/$${i}_$(SUFFIX) --device=$(device) || exit 1; done
|
167 |
-
|
168 |
-
run_device_radix_sort :
|
169 |
-
for i in $(DEVICE_RADIX_SORT); do $(MEMCHECK) ./bin/$${i}_$(SUFFIX) --device=$(device) || exit 1; done
|
170 |
-
|
171 |
-
|
172 |
-
#-------------------------------------------------------------------------------
|
173 |
-
# make link
|
174 |
-
#-------------------------------------------------------------------------------
|
175 |
-
|
176 |
-
link : bin/link_$(SUFFIX)
|
177 |
-
|
178 |
-
bin/link_$(SUFFIX) : link_a.cu link_b.cu link_main.cpp $(DEPS)
|
179 |
-
mkdir -p bin
|
180 |
-
$(NVCC) $(NVCCFLAGS) $(CPU_ARCH) $(INC) $(DEFINES) $(SM_TARGETS) link_a.cu -c -o bin/link_a.obj
|
181 |
-
$(NVCC) $(NVCCFLAGS) $(CPU_ARCH) $(INC) $(DEFINES) $(SM_TARGETS) link_b.cu -c -o bin/link_b.obj
|
182 |
-
$(NVCC) $(NVCCFLAGS) $(CPU_ARCH) $(INC) $(DEFINES) $(SM_TARGETS) link_main.cpp bin/link_a.obj bin/link_b.obj -o bin/link_$(SUFFIX)
|
183 |
-
|
184 |
-
|
185 |
-
#-------------------------------------------------------------------------------
|
186 |
-
# make test_iterator
|
187 |
-
#-------------------------------------------------------------------------------
|
188 |
-
|
189 |
-
test_iterator: bin/test_iterator_$(SUFFIX)
|
190 |
-
|
191 |
-
bin/test_iterator_$(SUFFIX) : test_iterator.cu $(DEPS)
|
192 |
-
mkdir -p bin
|
193 |
-
$(NVCC) $(DEFINES) $(SM_TARGETS) -o bin/test_iterator_$(SUFFIX) test_iterator.cu $(NVCCFLAGS) $(CPU_ARCH) $(INC) $(LIBS) -O3
|
194 |
-
|
195 |
-
|
196 |
-
#-------------------------------------------------------------------------------
|
197 |
-
# make test_allocator
|
198 |
-
#-------------------------------------------------------------------------------
|
199 |
-
|
200 |
-
test_allocator: bin/test_allocator_$(SUFFIX)
|
201 |
-
|
202 |
-
bin/test_allocator_$(SUFFIX) : test_allocator.cu $(DEPS)
|
203 |
-
mkdir -p bin
|
204 |
-
$(NVCC) $(DEFINES) $(SM_TARGETS) -o bin/test_allocator_$(SUFFIX) test_allocator.cu $(NVCCFLAGS) $(CPU_ARCH) $(INC) $(LIBS) -O3
|
205 |
-
|
206 |
-
|
207 |
-
#-------------------------------------------------------------------------------
|
208 |
-
# make test_grid_barrier
|
209 |
-
#-------------------------------------------------------------------------------
|
210 |
-
|
211 |
-
test_grid_barrier: bin/test_grid_barrier_$(SUFFIX)
|
212 |
-
|
213 |
-
bin/test_grid_barrier_$(SUFFIX) : test_grid_barrier.cu $(DEPS)
|
214 |
-
mkdir -p bin
|
215 |
-
$(NVCC) $(DEFINES) $(SM_TARGETS) -o bin/test_grid_barrier_$(SUFFIX) test_grid_barrier.cu $(NVCCFLAGS) $(CPU_ARCH) $(INC) $(LIBS) -O3
|
216 |
-
|
217 |
-
|
218 |
-
#-------------------------------------------------------------------------------
|
219 |
-
# make test_warp_scan
|
220 |
-
#-------------------------------------------------------------------------------
|
221 |
-
|
222 |
-
test_warp_scan: bin/test_warp_scan_$(SUFFIX)
|
223 |
-
|
224 |
-
bin/test_warp_scan_$(SUFFIX) : test_warp_scan.cu $(DEPS)
|
225 |
-
mkdir -p bin
|
226 |
-
$(NVCC) $(DEFINES) $(SM_TARGETS) -o bin/test_warp_scan_$(SUFFIX) test_warp_scan.cu $(NVCCFLAGS) $(CPU_ARCH) $(INC) $(LIBS) -O3
|
227 |
-
|
228 |
-
|
229 |
-
#-------------------------------------------------------------------------------
|
230 |
-
# make test_warp_reduce
|
231 |
-
#-------------------------------------------------------------------------------
|
232 |
-
|
233 |
-
test_warp_reduce: bin/test_warp_reduce_$(SUFFIX)
|
234 |
-
|
235 |
-
bin/test_warp_reduce_$(SUFFIX) : test_warp_reduce.cu $(DEPS)
|
236 |
-
mkdir -p bin
|
237 |
-
$(NVCC) $(DEFINES) $(SM_TARGETS) -o bin/test_warp_reduce_$(SUFFIX) test_warp_reduce.cu $(NVCCFLAGS) $(CPU_ARCH) $(INC) $(LIBS) -O3
|
238 |
-
|
239 |
-
|
240 |
-
#-------------------------------------------------------------------------------
|
241 |
-
# make test_block_reduce_raking
|
242 |
-
#-------------------------------------------------------------------------------
|
243 |
-
|
244 |
-
test_block_reduce_raking: bin/test_block_reduce_raking_$(SUFFIX)
|
245 |
-
|
246 |
-
bin/test_block_reduce_raking_$(SUFFIX) : test_block_reduce.cu $(DEPS)
|
247 |
-
mkdir -p bin
|
248 |
-
$(NVCC) $(DEFINES) -DTEST_RAKING $(SM_TARGETS) -o bin/test_block_reduce_raking_$(SUFFIX) test_block_reduce.cu $(NVCCFLAGS) $(CPU_ARCH) $(INC) $(LIBS) -O3
|
249 |
-
|
250 |
-
|
251 |
-
#-------------------------------------------------------------------------------
|
252 |
-
# make test_block_reduce_warp_reductions
|
253 |
-
#-------------------------------------------------------------------------------
|
254 |
-
|
255 |
-
test_block_reduce_warp_reductions: bin/test_block_reduce_warp_reductions_$(SUFFIX)
|
256 |
-
|
257 |
-
bin/test_block_reduce_warp_reductions_$(SUFFIX) : test_block_reduce.cu $(DEPS)
|
258 |
-
mkdir -p bin
|
259 |
-
$(NVCC) $(DEFINES) -DTEST_WARP_REDUCTIONS $(SM_TARGETS) -o bin/test_block_reduce_warp_reductions_$(SUFFIX) test_block_reduce.cu $(NVCCFLAGS) $(CPU_ARCH) $(INC) $(LIBS) -O3
|
260 |
-
|
261 |
-
|
262 |
-
#-------------------------------------------------------------------------------
|
263 |
-
# make test_block_reduce
|
264 |
-
#-------------------------------------------------------------------------------
|
265 |
-
|
266 |
-
test_block_reduce: $(BLOCK_REDUCE)
|
267 |
-
|
268 |
-
|
269 |
-
#-------------------------------------------------------------------------------
|
270 |
-
# make test_block_scan_raking
|
271 |
-
#-------------------------------------------------------------------------------
|
272 |
-
|
273 |
-
test_block_scan_raking: bin/test_block_scan_raking_$(SUFFIX)
|
274 |
-
|
275 |
-
bin/test_block_scan_raking_$(SUFFIX) : test_block_scan.cu $(DEPS)
|
276 |
-
mkdir -p bin
|
277 |
-
$(NVCC) $(DEFINES) -DTEST_RAKING $(SM_TARGETS) -o bin/test_block_scan_raking_$(SUFFIX) test_block_scan.cu $(NVCCFLAGS) $(CPU_ARCH) $(INC) $(LIBS) -O3
|
278 |
-
|
279 |
-
|
280 |
-
#-------------------------------------------------------------------------------
|
281 |
-
# make test_block_scan_raking_memoize
|
282 |
-
#-------------------------------------------------------------------------------
|
283 |
-
|
284 |
-
test_block_scan_raking_memoize: bin/test_block_scan_raking_memoize_$(SUFFIX)
|
285 |
-
|
286 |
-
bin/test_block_scan_raking_memoize_$(SUFFIX) : test_block_scan.cu $(DEPS)
|
287 |
-
mkdir -p bin
|
288 |
-
$(NVCC) $(DEFINES) -DTEST_RAKING_MEMOIZE $(SM_TARGETS) -o bin/test_block_scan_raking_memoize_$(SUFFIX) test_block_scan.cu $(NVCCFLAGS) $(CPU_ARCH) $(INC) $(LIBS) -O3
|
289 |
-
|
290 |
-
|
291 |
-
#-------------------------------------------------------------------------------
|
292 |
-
# make test_block_scan_warp_scans
|
293 |
-
#-------------------------------------------------------------------------------
|
294 |
-
|
295 |
-
test_block_scan_warp_scans: bin/test_block_scan_warp_scans_$(SUFFIX)
|
296 |
-
|
297 |
-
bin/test_block_scan_warp_scans_$(SUFFIX) : test_block_scan.cu $(DEPS)
|
298 |
-
mkdir -p bin
|
299 |
-
$(NVCC) $(DEFINES) -DTEST_WARP_SCANS $(SM_TARGETS) -o bin/test_block_scan_warp_scans_$(SUFFIX) test_block_scan.cu $(NVCCFLAGS) $(CPU_ARCH) $(INC) $(LIBS) -O3
|
300 |
-
|
301 |
-
|
302 |
-
#-------------------------------------------------------------------------------
|
303 |
-
# make test_block_scan
|
304 |
-
#-------------------------------------------------------------------------------
|
305 |
-
|
306 |
-
test_block_scan: $(BLOCK_SCAN)
|
307 |
-
|
308 |
-
|
309 |
-
#-------------------------------------------------------------------------------
|
310 |
-
# make test_block_load_store
|
311 |
-
#-------------------------------------------------------------------------------
|
312 |
-
|
313 |
-
test_block_load_store: bin/test_block_load_store_$(SUFFIX)
|
314 |
-
|
315 |
-
bin/test_block_load_store_$(SUFFIX) : test_block_load_store.cu $(DEPS)
|
316 |
-
mkdir -p bin
|
317 |
-
$(NVCC) $(DEFINES) $(SM_TARGETS) -o bin/test_block_load_store_$(SUFFIX) test_block_load_store.cu $(NVCCFLAGS) $(CPU_ARCH) $(INC) $(LIBS) -O3
|
318 |
-
|
319 |
-
|
320 |
-
#-------------------------------------------------------------------------------
|
321 |
-
# make test_block_radix_sort_keys
|
322 |
-
#-------------------------------------------------------------------------------
|
323 |
-
|
324 |
-
test_block_radix_sort_keys: bin/test_block_radix_sort_keys_$(SUFFIX)
|
325 |
-
|
326 |
-
bin/test_block_radix_sort_keys_$(SUFFIX) : test_block_radix_sort.cu $(DEPS)
|
327 |
-
mkdir -p bin
|
328 |
-
$(NVCC) $(DEFINES) -DTEST_KEYS_ONLY $(SM_TARGETS) -o bin/test_block_radix_sort_keys_$(SUFFIX) test_block_radix_sort.cu $(NVCCFLAGS) $(CPU_ARCH) $(INC) $(LIBS) -O3
|
329 |
-
|
330 |
-
#-------------------------------------------------------------------------------
|
331 |
-
# make test_block_radix_sort_pairs
|
332 |
-
#-------------------------------------------------------------------------------
|
333 |
-
|
334 |
-
test_block_radix_sort_pairs: bin/test_block_radix_sort_pairs_$(SUFFIX)
|
335 |
-
|
336 |
-
bin/test_block_radix_sort_pairs_$(SUFFIX) : test_block_radix_sort.cu $(DEPS)
|
337 |
-
mkdir -p bin
|
338 |
-
$(NVCC) $(DEFINES) $(SM_TARGETS) -o bin/test_block_radix_sort_pairs_$(SUFFIX) test_block_radix_sort.cu $(NVCCFLAGS) $(CPU_ARCH) $(INC) $(LIBS) -O3
|
339 |
-
|
340 |
-
|
341 |
-
#-------------------------------------------------------------------------------
|
342 |
-
# make test_block_radix_sort
|
343 |
-
#-------------------------------------------------------------------------------
|
344 |
-
|
345 |
-
test_block_radix_sort : $(BLOCK_RADIX_SORT)
|
346 |
-
|
347 |
-
|
348 |
-
#-------------------------------------------------------------------------------
|
349 |
-
# make test_block_histogram
|
350 |
-
#-------------------------------------------------------------------------------
|
351 |
-
|
352 |
-
test_block_histogram: bin/test_block_histogram_$(SUFFIX)
|
353 |
-
|
354 |
-
bin/test_block_histogram_$(SUFFIX) : test_block_histogram.cu $(DEPS)
|
355 |
-
mkdir -p bin
|
356 |
-
$(NVCC) $(DEFINES) $(SM_TARGETS) -o bin/test_block_histogram_$(SUFFIX) test_block_histogram.cu $(NVCCFLAGS) $(CPU_ARCH) $(INC) $(LIBS) -O3
|
357 |
-
|
358 |
-
|
359 |
-
#-------------------------------------------------------------------------------
|
360 |
-
# make test_device_reduce
|
361 |
-
#-------------------------------------------------------------------------------
|
362 |
-
|
363 |
-
test_device_reduce: bin/test_device_reduce_$(SUFFIX)
|
364 |
-
|
365 |
-
bin/test_device_reduce_$(SUFFIX) : test_device_reduce.cu $(DEPS)
|
366 |
-
mkdir -p bin
|
367 |
-
$(NVCC) $(DEFINES) $(SM_TARGETS) -o bin/test_device_reduce_$(SUFFIX) test_device_reduce.cu $(NVCCFLAGS) $(CPU_ARCH) $(INC) $(LIBS) -O3
|
368 |
-
|
369 |
-
|
370 |
-
#-------------------------------------------------------------------------------
|
371 |
-
# make test_device_histogram
|
372 |
-
#-------------------------------------------------------------------------------
|
373 |
-
|
374 |
-
test_device_histogram: bin/test_device_histogram_$(SUFFIX)
|
375 |
-
|
376 |
-
bin/test_device_histogram_$(SUFFIX) : test_device_histogram.cu $(DEPS)
|
377 |
-
mkdir -p bin
|
378 |
-
$(NVCC) $(DEFINES) $(SM_TARGETS) -o bin/test_device_histogram_$(SUFFIX) test_device_histogram.cu $(NVCCFLAGS) $(CPU_ARCH) $(INC) $(LIBS) $(NPPI) -O3
|
379 |
-
|
380 |
-
|
381 |
-
#-------------------------------------------------------------------------------
|
382 |
-
# make test_device_scan
|
383 |
-
#-------------------------------------------------------------------------------
|
384 |
-
|
385 |
-
test_device_scan: bin/test_device_scan_$(SUFFIX)
|
386 |
-
|
387 |
-
bin/test_device_scan_$(SUFFIX) : test_device_scan.cu $(DEPS)
|
388 |
-
mkdir -p bin
|
389 |
-
$(NVCC) $(DEFINES) $(SM_TARGETS) -o bin/test_device_scan_$(SUFFIX) test_device_scan.cu $(NVCCFLAGS) $(CPU_ARCH) $(INC) $(LIBS) -O3
|
390 |
-
|
391 |
-
|
392 |
-
#-------------------------------------------------------------------------------
|
393 |
-
# make test_device_radix_sort
|
394 |
-
#-------------------------------------------------------------------------------
|
395 |
-
|
396 |
-
test_device_radix_sort: bin/test_device_radix_sort_$(SUFFIX)
|
397 |
-
|
398 |
-
bin/test_device_radix_sort_$(SUFFIX) : test_device_radix_sort.cu $(DEPS)
|
399 |
-
mkdir -p bin
|
400 |
-
$(NVCC) $(DEFINES) $(SM_TARGETS) -o bin/test_device_radix_sort_$(SUFFIX) test_device_radix_sort.cu $(NVCCFLAGS) $(CPU_ARCH) $(INC) $(LIBS) -O3
|
401 |
-
|
402 |
-
|
403 |
-
#-------------------------------------------------------------------------------
|
404 |
-
# make test_device_radix_sort_segmented
|
405 |
-
#-------------------------------------------------------------------------------
|
406 |
-
|
407 |
-
test_device_radix_sort_segmented: bin/test_device_radix_sort_segmented_$(SUFFIX)
|
408 |
-
|
409 |
-
bin/test_device_radix_sort_segmented_$(SUFFIX) : test_device_radix_sort.cu $(DEPS)
|
410 |
-
mkdir -p bin
|
411 |
-
$(NVCC) $(DEFINES) -DSEGMENTED_SORT $(SM_TARGETS) -o bin/test_device_radix_sort_segmented_$(SUFFIX) test_device_radix_sort.cu $(NVCCFLAGS) $(CPU_ARCH) $(INC) $(LIBS) -O3
|
412 |
-
|
413 |
-
|
414 |
-
#-------------------------------------------------------------------------------
|
415 |
-
# make test_device_select_unique
|
416 |
-
#-------------------------------------------------------------------------------
|
417 |
-
|
418 |
-
test_device_select_unique: bin/test_device_select_unique_$(SUFFIX)
|
419 |
-
|
420 |
-
bin/test_device_select_unique_$(SUFFIX) : test_device_select_unique.cu $(DEPS)
|
421 |
-
mkdir -p bin
|
422 |
-
$(NVCC) $(DEFINES) $(SM_TARGETS) -o bin/test_device_select_unique_$(SUFFIX) test_device_select_unique.cu $(NVCCFLAGS) $(CPU_ARCH) $(INC) $(LIBS) -O3
|
423 |
-
|
424 |
-
|
425 |
-
#-------------------------------------------------------------------------------
|
426 |
-
# make test_device_select_if
|
427 |
-
#-------------------------------------------------------------------------------
|
428 |
-
|
429 |
-
test_device_select_if: bin/test_device_select_if_$(SUFFIX)
|
430 |
-
|
431 |
-
bin/test_device_select_if_$(SUFFIX) : test_device_select_if.cu $(DEPS)
|
432 |
-
mkdir -p bin
|
433 |
-
$(NVCC) $(DEFINES) $(SM_TARGETS) -o bin/test_device_select_if_$(SUFFIX) test_device_select_if.cu $(NVCCFLAGS) $(CPU_ARCH) $(INC) $(LIBS) -O3
|
434 |
-
|
435 |
-
#-------------------------------------------------------------------------------
|
436 |
-
# make test_device_reduce_by_key
|
437 |
-
#-------------------------------------------------------------------------------
|
438 |
-
|
439 |
-
test_device_reduce_by_key: bin/test_device_reduce_by_key_$(SUFFIX)
|
440 |
-
|
441 |
-
bin/test_device_reduce_by_key_$(SUFFIX) : test_device_reduce_by_key.cu $(DEPS)
|
442 |
-
mkdir -p bin
|
443 |
-
$(NVCC) $(DEFINES) $(SM_TARGETS) -o bin/test_device_reduce_by_key_$(SUFFIX) test_device_reduce_by_key.cu $(NVCCFLAGS) $(CPU_ARCH) $(INC) $(LIBS) -O3
|
444 |
-
|
445 |
-
#-------------------------------------------------------------------------------
|
446 |
-
# make test_device_run_length_encode
|
447 |
-
#-------------------------------------------------------------------------------
|
448 |
-
|
449 |
-
test_device_run_length_encode: bin/test_device_run_length_encode_$(SUFFIX)
|
450 |
-
|
451 |
-
bin/test_device_run_length_encode_$(SUFFIX) : test_device_run_length_encode.cu $(DEPS)
|
452 |
-
mkdir -p bin
|
453 |
-
$(NVCC) $(DEFINES) $(SM_TARGETS) -o bin/test_device_run_length_encode_$(SUFFIX) test_device_run_length_encode.cu $(NVCCFLAGS) $(CPU_ARCH) $(INC) $(LIBS) -O3
|
454 |
-
|
455 |
-
|
456 |
-
|
457 |
-
|
458 |
-
#-------------------------------------------------------------------------------
|
459 |
-
# make test_device_seg_reduce
|
460 |
-
#-------------------------------------------------------------------------------
|
461 |
-
#
|
462 |
-
#test_device_seg_reduce: bin/test_device_seg_reduce_$(SUFFIX)
|
463 |
-
#
|
464 |
-
#bin/test_device_seg_reduce_$(SUFFIX) : test_device_seg_reduce.cu $(DEPS)
|
465 |
-
# mkdir -p bin
|
466 |
-
# $(NVCC) $(DEFINES) $(SM_TARGETS) -o bin/test_device_seg_reduce_$(SUFFIX) test_device_seg_reduce.cu $(NVCCFLAGS) $(CPU_ARCH) $(INC) $(LIBS) -O3
|
467 |
-
|
468 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/CVPR/LIVE/thrust/thrust/detail/cpp14_required.h
DELETED
@@ -1,26 +0,0 @@
|
|
1 |
-
/*
|
2 |
-
* Copyright 2018 NVIDIA Corporation
|
3 |
-
*
|
4 |
-
* Licensed under the Apache License, Version 2.0 (the "License");
|
5 |
-
* you may not use this file except in compliance with the License.
|
6 |
-
* You may obtain a copy of the License at
|
7 |
-
*
|
8 |
-
* http://www.apache.org/licenses/LICENSE-2.0
|
9 |
-
*
|
10 |
-
* Unless required by applicable law or agreed to in writing, software
|
11 |
-
* distributed under the License is distributed on an "AS IS" BASIS,
|
12 |
-
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
13 |
-
* See the License for the specific language governing permissions and
|
14 |
-
* limitations under the License.
|
15 |
-
*/
|
16 |
-
|
17 |
-
#pragma once
|
18 |
-
|
19 |
-
#include <thrust/detail/config/cpp_dialect.h>
|
20 |
-
|
21 |
-
#ifndef THRUST_CPP14_REQUIRED_NO_ERROR
|
22 |
-
# if THRUST_CPP_DIALECT < 2014
|
23 |
-
# error C++14 is required for this Thrust feature; please upgrade your compiler or pass the appropriate -std=c++14 flag to it.
|
24 |
-
# endif
|
25 |
-
#endif
|
26 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/CVPR/drawings-to-human/static/_app/immutable/assets/pages/__layout.svelte-cc9dd261.css
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
@import"https://fonts.googleapis.com/css2?family=Open+Sans:wght@100;200;300;400;500;600;700;800&display=swap";*,:before,:after{box-sizing:border-box;border-width:0;border-style:solid;border-color:#e5e7eb}:before,:after{--tw-content: ""}html{line-height:1.5;-webkit-text-size-adjust:100%;-moz-tab-size:4;-o-tab-size:4;tab-size:4;font-family:ui-sans-serif,system-ui,-apple-system,BlinkMacSystemFont,Segoe UI,Roboto,Helvetica Neue,Arial,Noto Sans,sans-serif,"Apple Color Emoji","Segoe UI Emoji",Segoe UI Symbol,"Noto Color Emoji"}body{margin:0;line-height:inherit}hr{height:0;color:inherit;border-top-width:1px}abbr:where([title]){-webkit-text-decoration:underline dotted;text-decoration:underline dotted}h1,h2,h3,h4,h5,h6{font-size:inherit;font-weight:inherit}a{color:inherit;text-decoration:inherit}b,strong{font-weight:bolder}code,kbd,samp,pre{font-family:ui-monospace,SFMono-Regular,Menlo,Monaco,Consolas,Liberation Mono,Courier New,monospace;font-size:1em}small{font-size:80%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sub{bottom:-.25em}sup{top:-.5em}table{text-indent:0;border-color:inherit;border-collapse:collapse}button,input,optgroup,select,textarea{font-family:inherit;font-size:100%;font-weight:inherit;line-height:inherit;color:inherit;margin:0;padding:0}button,select{text-transform:none}button,[type=button],[type=reset],[type=submit]{-webkit-appearance:button;background-color:transparent;background-image:none}:-moz-focusring{outline:auto}:-moz-ui-invalid{box-shadow:none}progress{vertical-align:baseline}::-webkit-inner-spin-button,::-webkit-outer-spin-button{height:auto}[type=search]{-webkit-appearance:textfield;outline-offset:-2px}::-webkit-search-decoration{-webkit-appearance:none}::-webkit-file-upload-button{-webkit-appearance:button;font:inherit}summary{display:list-item}blockquote,dl,dd,h1,h2,h3,h4,h5,h6,hr,figure,p,pre{margin:0}fieldset{margin:0;padding:0}legend{padding:0}ol,ul,menu{list-style:none;margin:0;padding:0}textarea{resize:vertical}input::-moz-placeholder,textarea::-moz-placeholder{opacity:1;color:#9ca3af}input::placeholder,textarea::placeholder{opacity:1;color:#9ca3af}button,[role=button]{cursor:pointer}:disabled{cursor:default}img,svg,video,canvas,audio,iframe,embed,object{display:block;vertical-align:middle}img,video{max-width:100%;height:auto}html{font-family:Open Sans,sans-serif}*,:before,:after{--tw-border-spacing-x: 0;--tw-border-spacing-y: 0;--tw-translate-x: 0;--tw-translate-y: 0;--tw-rotate: 0;--tw-skew-x: 0;--tw-skew-y: 0;--tw-scale-x: 1;--tw-scale-y: 1;--tw-pan-x: ;--tw-pan-y: ;--tw-pinch-zoom: ;--tw-scroll-snap-strictness: proximity;--tw-ordinal: ;--tw-slashed-zero: ;--tw-numeric-figure: ;--tw-numeric-spacing: ;--tw-numeric-fraction: ;--tw-ring-inset: ;--tw-ring-offset-width: 0px;--tw-ring-offset-color: #fff;--tw-ring-color: rgb(59 130 246 / .5);--tw-ring-offset-shadow: 0 0 #0000;--tw-ring-shadow: 0 0 #0000;--tw-shadow: 0 0 #0000;--tw-shadow-colored: 0 0 #0000;--tw-blur: ;--tw-brightness: ;--tw-contrast: ;--tw-grayscale: ;--tw-hue-rotate: ;--tw-invert: ;--tw-saturate: ;--tw-sepia: ;--tw-drop-shadow: ;--tw-backdrop-blur: ;--tw-backdrop-brightness: ;--tw-backdrop-contrast: ;--tw-backdrop-grayscale: ;--tw-backdrop-hue-rotate: ;--tw-backdrop-invert: ;--tw-backdrop-opacity: ;--tw-backdrop-saturate: ;--tw-backdrop-sepia: }::-webkit-backdrop{--tw-border-spacing-x: 0;--tw-border-spacing-y: 0;--tw-translate-x: 0;--tw-translate-y: 0;--tw-rotate: 0;--tw-skew-x: 0;--tw-skew-y: 0;--tw-scale-x: 1;--tw-scale-y: 1;--tw-pan-x: ;--tw-pan-y: ;--tw-pinch-zoom: ;--tw-scroll-snap-strictness: proximity;--tw-ordinal: ;--tw-slashed-zero: ;--tw-numeric-figure: ;--tw-numeric-spacing: ;--tw-numeric-fraction: ;--tw-ring-inset: ;--tw-ring-offset-width: 0px;--tw-ring-offset-color: #fff;--tw-ring-color: rgb(59 130 246 / .5);--tw-ring-offset-shadow: 0 0 #0000;--tw-ring-shadow: 0 0 #0000;--tw-shadow: 0 0 #0000;--tw-shadow-colored: 0 0 #0000;--tw-blur: ;--tw-brightness: ;--tw-contrast: ;--tw-grayscale: ;--tw-hue-rotate: ;--tw-invert: ;--tw-saturate: ;--tw-sepia: ;--tw-drop-shadow: ;--tw-backdrop-blur: ;--tw-backdrop-brightness: ;--tw-backdrop-contrast: ;--tw-backdrop-grayscale: ;--tw-backdrop-hue-rotate: ;--tw-backdrop-invert: ;--tw-backdrop-opacity: ;--tw-backdrop-saturate: ;--tw-backdrop-sepia: }::backdrop{--tw-border-spacing-x: 0;--tw-border-spacing-y: 0;--tw-translate-x: 0;--tw-translate-y: 0;--tw-rotate: 0;--tw-skew-x: 0;--tw-skew-y: 0;--tw-scale-x: 1;--tw-scale-y: 1;--tw-pan-x: ;--tw-pan-y: ;--tw-pinch-zoom: ;--tw-scroll-snap-strictness: proximity;--tw-ordinal: ;--tw-slashed-zero: ;--tw-numeric-figure: ;--tw-numeric-spacing: ;--tw-numeric-fraction: ;--tw-ring-inset: ;--tw-ring-offset-width: 0px;--tw-ring-offset-color: #fff;--tw-ring-color: rgb(59 130 246 / .5);--tw-ring-offset-shadow: 0 0 #0000;--tw-ring-shadow: 0 0 #0000;--tw-shadow: 0 0 #0000;--tw-shadow-colored: 0 0 #0000;--tw-blur: ;--tw-brightness: ;--tw-contrast: ;--tw-grayscale: ;--tw-hue-rotate: ;--tw-invert: ;--tw-saturate: ;--tw-sepia: ;--tw-drop-shadow: ;--tw-backdrop-blur: ;--tw-backdrop-brightness: ;--tw-backdrop-contrast: ;--tw-backdrop-grayscale: ;--tw-backdrop-hue-rotate: ;--tw-backdrop-invert: ;--tw-backdrop-opacity: ;--tw-backdrop-saturate: ;--tw-backdrop-sepia: }.prose{color:var(--tw-prose-body);max-width:65ch}.prose :where([class~="lead"]):not(:where([class~="not-prose"] *)){color:var(--tw-prose-lead);font-size:1.25em;line-height:1.6;margin-top:1.2em;margin-bottom:1.2em}.prose :where(a):not(:where([class~="not-prose"] *)){color:var(--tw-prose-links);text-decoration:underline;font-weight:500}.prose :where(strong):not(:where([class~="not-prose"] *)){color:var(--tw-prose-bold);font-weight:600}.prose :where(ol):not(:where([class~="not-prose"] *)){list-style-type:decimal;padding-left:1.625em}.prose :where(ol[type="A"]):not(:where([class~="not-prose"] *)){list-style-type:upper-alpha}.prose :where(ol[type="a"]):not(:where([class~="not-prose"] *)){list-style-type:lower-alpha}.prose :where(ol[type="A" s]):not(:where([class~="not-prose"] *)){list-style-type:upper-alpha}.prose :where(ol[type="a" s]):not(:where([class~="not-prose"] *)){list-style-type:lower-alpha}.prose :where(ol[type="I"]):not(:where([class~="not-prose"] *)){list-style-type:upper-roman}.prose :where(ol[type="i"]):not(:where([class~="not-prose"] *)){list-style-type:lower-roman}.prose :where(ol[type="I" s]):not(:where([class~="not-prose"] *)){list-style-type:upper-roman}.prose :where(ol[type="i" s]):not(:where([class~="not-prose"] *)){list-style-type:lower-roman}.prose :where(ol[type="1"]):not(:where([class~="not-prose"] *)){list-style-type:decimal}.prose :where(ul):not(:where([class~="not-prose"] *)){list-style-type:disc;padding-left:1.625em}.prose :where(ol > li):not(:where([class~="not-prose"] *))::marker{font-weight:400;color:var(--tw-prose-counters)}.prose :where(ul > li):not(:where([class~="not-prose"] *))::marker{color:var(--tw-prose-bullets)}.prose :where(hr):not(:where([class~="not-prose"] *)){border-color:var(--tw-prose-hr);border-top-width:1px;margin-top:3em;margin-bottom:3em}.prose :where(blockquote):not(:where([class~="not-prose"] *)){font-weight:500;font-style:italic;color:var(--tw-prose-quotes);border-left-width:.25rem;border-left-color:var(--tw-prose-quote-borders);quotes:"\201c""\201d""\2018""\2019";margin-top:1.6em;margin-bottom:1.6em;padding-left:1em}.prose :where(h1):not(:where([class~="not-prose"] *)){color:var(--tw-prose-headings);font-weight:800;font-size:2.25em;margin-top:0;margin-bottom:.8888889em;line-height:1.1111111}.prose :where(h1 strong):not(:where([class~="not-prose"] *)){font-weight:900}.prose :where(h2):not(:where([class~="not-prose"] *)){color:var(--tw-prose-headings);font-weight:700;font-size:1.5em;margin-top:2em;margin-bottom:1em;line-height:1.3333333}.prose :where(h2 strong):not(:where([class~="not-prose"] *)){font-weight:800}.prose :where(h3):not(:where([class~="not-prose"] *)){color:var(--tw-prose-headings);font-weight:600;font-size:1.25em;margin-top:1.6em;margin-bottom:.6em;line-height:1.6}.prose :where(h3 strong):not(:where([class~="not-prose"] *)){font-weight:700}.prose :where(h4):not(:where([class~="not-prose"] *)){color:var(--tw-prose-headings);font-weight:600;margin-top:1.5em;margin-bottom:.5em;line-height:1.5}.prose :where(h4 strong):not(:where([class~="not-prose"] *)){font-weight:700}.prose :where(figure > *):not(:where([class~="not-prose"] *)){margin-top:0;margin-bottom:0}.prose :where(figcaption):not(:where([class~="not-prose"] *)){color:var(--tw-prose-captions);font-size:.875em;line-height:1.4285714;margin-top:.8571429em}.prose :where(a code):not(:where([class~="not-prose"] *)){color:var(--tw-prose-links)}.prose :where(pre code):not(:where([class~="not-prose"] *)):before{content:none}.prose :where(pre code):not(:where([class~="not-prose"] *)):after{content:none}.prose :where(table):not(:where([class~="not-prose"] *)){width:100%;table-layout:auto;text-align:left;margin-top:2em;margin-bottom:2em;font-size:.875em;line-height:1.7142857}.prose :where(thead):not(:where([class~="not-prose"] *)){border-bottom-width:1px;border-bottom-color:var(--tw-prose-th-borders)}.prose :where(thead th):not(:where([class~="not-prose"] *)){color:var(--tw-prose-headings);font-weight:600;vertical-align:bottom;padding-right:.5714286em;padding-bottom:.5714286em;padding-left:.5714286em}.prose :where(tbody tr):not(:where([class~="not-prose"] *)){border-bottom-width:1px;border-bottom-color:var(--tw-prose-td-borders)}.prose :where(tbody tr:last-child):not(:where([class~="not-prose"] *)){border-bottom-width:0}.prose :where(tbody td):not(:where([class~="not-prose"] *)){vertical-align:baseline;padding:.5714286em}.prose{--tw-prose-body: #374151;--tw-prose-headings: #111827;--tw-prose-lead: #4b5563;--tw-prose-links: #111827;--tw-prose-bold: #111827;--tw-prose-counters: #6b7280;--tw-prose-bullets: #d1d5db;--tw-prose-hr: #e5e7eb;--tw-prose-quotes: #111827;--tw-prose-quote-borders: #e5e7eb;--tw-prose-captions: #6b7280;--tw-prose-code: #111827;--tw-prose-pre-code: #e5e7eb;--tw-prose-pre-bg: #1f2937;--tw-prose-th-borders: #d1d5db;--tw-prose-td-borders: #e5e7eb;--tw-prose-invert-body: #d1d5db;--tw-prose-invert-headings: #fff;--tw-prose-invert-lead: #9ca3af;--tw-prose-invert-links: #fff;--tw-prose-invert-bold: #fff;--tw-prose-invert-counters: #9ca3af;--tw-prose-invert-bullets: #4b5563;--tw-prose-invert-hr: #374151;--tw-prose-invert-quotes: #f3f4f6;--tw-prose-invert-quote-borders: #374151;--tw-prose-invert-captions: #9ca3af;--tw-prose-invert-code: #fff;--tw-prose-invert-pre-code: #d1d5db;--tw-prose-invert-pre-bg: rgb(0 0 0 / 50%);--tw-prose-invert-th-borders: #4b5563;--tw-prose-invert-td-borders: #374151;font-size:1rem;line-height:1.75}.prose :where(p):not(:where([class~="not-prose"] *)){margin-top:1.25em;margin-bottom:1.25em}.prose :where(img):not(:where([class~="not-prose"] *)){margin-top:2em;margin-bottom:2em}.prose :where(video):not(:where([class~="not-prose"] *)){margin-top:2em;margin-bottom:2em}.prose :where(figure):not(:where([class~="not-prose"] *)){margin-top:2em;margin-bottom:2em}.prose :where(h2 code):not(:where([class~="not-prose"] *)){font-size:.875em}.prose :where(h3 code):not(:where([class~="not-prose"] *)){font-size:.9em}.prose :where(li):not(:where([class~="not-prose"] *)){margin-top:.5em;margin-bottom:.5em}.prose :where(ol > li):not(:where([class~="not-prose"] *)){padding-left:.375em}.prose :where(ul > li):not(:where([class~="not-prose"] *)){padding-left:.375em}.prose>:where(ul > li p):not(:where([class~="not-prose"] *)){margin-top:.75em;margin-bottom:.75em}.prose>:where(ul > li > *:first-child):not(:where([class~="not-prose"] *)){margin-top:1.25em}.prose>:where(ul > li > *:last-child):not(:where([class~="not-prose"] *)){margin-bottom:1.25em}.prose>:where(ol > li > *:first-child):not(:where([class~="not-prose"] *)){margin-top:1.25em}.prose>:where(ol > li > *:last-child):not(:where([class~="not-prose"] *)){margin-bottom:1.25em}.prose :where(ul ul,ul ol,ol ul,ol ol):not(:where([class~="not-prose"] *)){margin-top:.75em;margin-bottom:.75em}.prose :where(hr + *):not(:where([class~="not-prose"] *)){margin-top:0}.prose :where(h2 + *):not(:where([class~="not-prose"] *)){margin-top:0}.prose :where(h3 + *):not(:where([class~="not-prose"] *)){margin-top:0}.prose :where(h4 + *):not(:where([class~="not-prose"] *)){margin-top:0}.prose :where(thead th:first-child):not(:where([class~="not-prose"] *)){padding-left:0}.prose :where(thead th:last-child):not(:where([class~="not-prose"] *)){padding-right:0}.prose :where(tbody td:first-child):not(:where([class~="not-prose"] *)){padding-left:0}.prose :where(tbody td:last-child):not(:where([class~="not-prose"] *)){padding-right:0}.prose>:where(:first-child):not(:where([class~="not-prose"] *)){margin-top:0}.prose>:where(:last-child):not(:where([class~="not-prose"] *)){margin-bottom:0}.pointer-events-none{pointer-events:none}.absolute{position:absolute}.relative{position:relative}.bottom-0{bottom:0px}.left-0{left:0px}.top-0{top:0px}.right-0{right:0px}.z-0{z-index:0}.z-10{z-index:10}.z-20{z-index:20}.my-3{margin-top:.75rem;margin-bottom:.75rem}.my-6{margin-top:1.5rem;margin-bottom:1.5rem}.mx-auto{margin-left:auto;margin-right:auto}.-mx-3{margin-left:-.75rem;margin-right:-.75rem}.mt-6{margin-top:1.5rem}.mb-2{margin-bottom:.5rem}.box-border{box-sizing:border-box}.block{display:block}.flex{display:flex}.grid{display:grid}.hidden{display:none}.aspect-\[256\/512\]{aspect-ratio:256/512}.h-0{height:0px}.h-full{height:100%}.max-h-\[9rem\]{max-height:9rem}.max-h-24{max-height:6rem}.w-0{width:0px}.w-full{width:100%}.max-w-full{max-width:100%}.max-w-\[3rem\]{max-width:3rem}.max-w-screen-md{max-width:768px}.-translate-x-1\/2{--tw-translate-x: -50%;transform:translate(var(--tw-translate-x),var(--tw-translate-y)) rotate(var(--tw-rotate)) skew(var(--tw-skew-x)) skewY(var(--tw-skew-y)) scaleX(var(--tw-scale-x)) scaleY(var(--tw-scale-y))}@-webkit-keyframes spin{to{transform:rotate(360deg)}}@keyframes spin{to{transform:rotate(360deg)}}.animate-spin{-webkit-animation:spin 1s linear infinite;animation:spin 1s linear infinite}.cursor-pointer{cursor:pointer}.snap-x{scroll-snap-type:x var(--tw-scroll-snap-strictness)}.snap-y{scroll-snap-type:y var(--tw-scroll-snap-strictness)}.snap-mandatory{--tw-scroll-snap-strictness: mandatory}.snap-start{scroll-snap-align:start}.snap-always{scroll-snap-stop:always}.grid-cols-2{grid-template-columns:repeat(2,minmax(0,1fr))}.grid-cols-\[2fr_1\.5fr\]{grid-template-columns:2fr 1.5fr}.flex-col{flex-direction:column}.flex-nowrap{flex-wrap:nowrap}.items-center{align-items:center}.justify-center{justify-content:center}.gap-2{gap:.5rem}.gap-1{gap:.25rem}.overflow-hidden{overflow:hidden}.overflow-clip{overflow:clip}.overflow-scroll{overflow:scroll}.overflow-x-scroll{overflow-x:scroll}.whitespace-nowrap{white-space:nowrap}.whitespace-pre{white-space:pre}.rounded-lg{border-radius:.5rem}.border{border-width:1px}.border-gray-500{--tw-border-opacity: 1;border-color:rgb(107 114 128 / var(--tw-border-opacity))}.border-gray-300{--tw-border-opacity: 1;border-color:rgb(209 213 219 / var(--tw-border-opacity))}.bg-white{--tw-bg-opacity: 1;background-color:rgb(255 255 255 / var(--tw-bg-opacity))}.bg-gray-50{--tw-bg-opacity: 1;background-color:rgb(249 250 251 / var(--tw-bg-opacity))}.p-3{padding:.75rem}.p-1{padding:.25rem}.px-2{padding-left:.5rem;padding-right:.5rem}.px-3{padding-left:.75rem;padding-right:.75rem}.py-5{padding-top:1.25rem;padding-bottom:1.25rem}.py-3{padding-top:.75rem;padding-bottom:.75rem}.pl-2{padding-left:.5rem}.text-base{font-size:1rem;line-height:1.5rem}.text-sm{font-size:.875rem;line-height:1.25rem}.text-xs{font-size:.75rem;line-height:1rem}.font-bold{font-weight:700}.leading-6{line-height:1.5rem}.text-black{--tw-text-opacity: 1;color:rgb(0 0 0 / var(--tw-text-opacity))}.text-white{--tw-text-opacity: 1;color:rgb(255 255 255 / var(--tw-text-opacity))}.text-gray-900{--tw-text-opacity: 1;color:rgb(17 24 39 / var(--tw-text-opacity))}.opacity-0{opacity:0}.opacity-30{opacity:.3}.outline{outline-style:solid}.outline-2{outline-width:2px}.outline-offset-\[-2px\]{outline-offset:-2px}.ring{--tw-ring-offset-shadow: var(--tw-ring-inset) 0 0 0 var(--tw-ring-offset-width) var(--tw-ring-offset-color);--tw-ring-shadow: var(--tw-ring-inset) 0 0 0 calc(3px + var(--tw-ring-offset-width)) var(--tw-ring-color);box-shadow:var(--tw-ring-offset-shadow),var(--tw-ring-shadow),var(--tw-shadow, 0 0 #0000)}.transition-all{transition-property:all;transition-timing-function:cubic-bezier(.4,0,.2,1);transition-duration:.15s}.duration-200{transition-duration:.2s}.ease-in-out{transition-timing-function:cubic-bezier(.4,0,.2,1)}.hover\:outline:hover{outline-style:solid}.focus\:border-blue-500:focus{--tw-border-opacity: 1;border-color:rgb(59 130 246 / var(--tw-border-opacity))}.focus\:ring-blue-500:focus{--tw-ring-opacity: 1;--tw-ring-color: rgb(59 130 246 / var(--tw-ring-opacity))}.disabled\:opacity-50:disabled{opacity:.5}@media (prefers-color-scheme: dark){.dark\:prose-invert{--tw-prose-body: var(--tw-prose-invert-body);--tw-prose-headings: var(--tw-prose-invert-headings);--tw-prose-lead: var(--tw-prose-invert-lead);--tw-prose-links: var(--tw-prose-invert-links);--tw-prose-bold: var(--tw-prose-invert-bold);--tw-prose-counters: var(--tw-prose-invert-counters);--tw-prose-bullets: var(--tw-prose-invert-bullets);--tw-prose-hr: var(--tw-prose-invert-hr);--tw-prose-quotes: var(--tw-prose-invert-quotes);--tw-prose-quote-borders: var(--tw-prose-invert-quote-borders);--tw-prose-captions: var(--tw-prose-invert-captions);--tw-prose-code: var(--tw-prose-invert-code);--tw-prose-pre-code: var(--tw-prose-invert-pre-code);--tw-prose-pre-bg: var(--tw-prose-invert-pre-bg);--tw-prose-th-borders: var(--tw-prose-invert-th-borders);--tw-prose-td-borders: var(--tw-prose-invert-td-borders)}.dark\:border-gray-300{--tw-border-opacity: 1;border-color:rgb(209 213 219 / var(--tw-border-opacity))}.dark\:border-gray-600{--tw-border-opacity: 1;border-color:rgb(75 85 99 / var(--tw-border-opacity))}.dark\:bg-\[rgb\(11\,15\,25\)\]{--tw-bg-opacity: 1;background-color:rgb(11 15 25 / var(--tw-bg-opacity))}.dark\:bg-gray-700{--tw-bg-opacity: 1;background-color:rgb(55 65 81 / var(--tw-bg-opacity))}.dark\:text-white{--tw-text-opacity: 1;color:rgb(255 255 255 / var(--tw-text-opacity))}.dark\:placeholder-gray-400::-moz-placeholder{--tw-placeholder-opacity: 1;color:rgb(156 163 175 / var(--tw-placeholder-opacity))}.dark\:placeholder-gray-400::placeholder{--tw-placeholder-opacity: 1;color:rgb(156 163 175 / var(--tw-placeholder-opacity))}.dark\:focus\:ring-blue-500:focus{--tw-ring-opacity: 1;--tw-ring-color: rgb(59 130 246 / var(--tw-ring-opacity))}}@media (min-width: 530px){.sm\:max-h-\[none\]{max-height:none}.sm\:grid-cols-3{grid-template-columns:repeat(3,minmax(0,1fr))}.sm\:grid-cols-2{grid-template-columns:repeat(2,minmax(0,1fr))}.sm\:flex-row{flex-direction:row}}
|
|
|
|
spaces/CVPR/lama-example/saicinpainting/training/modules/pix2pixhd.py
DELETED
@@ -1,669 +0,0 @@
|
|
1 |
-
# original: https://github.com/NVIDIA/pix2pixHD/blob/master/models/networks.py
|
2 |
-
import collections
|
3 |
-
from functools import partial
|
4 |
-
import functools
|
5 |
-
import logging
|
6 |
-
from collections import defaultdict
|
7 |
-
|
8 |
-
import numpy as np
|
9 |
-
import torch.nn as nn
|
10 |
-
|
11 |
-
from saicinpainting.training.modules.base import BaseDiscriminator, deconv_factory, get_conv_block_ctor, get_norm_layer, get_activation
|
12 |
-
from saicinpainting.training.modules.ffc import FFCResnetBlock
|
13 |
-
from saicinpainting.training.modules.multidilated_conv import MultidilatedConv
|
14 |
-
|
15 |
-
class DotDict(defaultdict):
|
16 |
-
# https://stackoverflow.com/questions/2352181/how-to-use-a-dot-to-access-members-of-dictionary
|
17 |
-
"""dot.notation access to dictionary attributes"""
|
18 |
-
__getattr__ = defaultdict.get
|
19 |
-
__setattr__ = defaultdict.__setitem__
|
20 |
-
__delattr__ = defaultdict.__delitem__
|
21 |
-
|
22 |
-
class Identity(nn.Module):
|
23 |
-
def __init__(self):
|
24 |
-
super().__init__()
|
25 |
-
|
26 |
-
def forward(self, x):
|
27 |
-
return x
|
28 |
-
|
29 |
-
|
30 |
-
class ResnetBlock(nn.Module):
|
31 |
-
def __init__(self, dim, padding_type, norm_layer, activation=nn.ReLU(True), use_dropout=False, conv_kind='default',
|
32 |
-
dilation=1, in_dim=None, groups=1, second_dilation=None):
|
33 |
-
super(ResnetBlock, self).__init__()
|
34 |
-
self.in_dim = in_dim
|
35 |
-
self.dim = dim
|
36 |
-
if second_dilation is None:
|
37 |
-
second_dilation = dilation
|
38 |
-
self.conv_block = self.build_conv_block(dim, padding_type, norm_layer, activation, use_dropout,
|
39 |
-
conv_kind=conv_kind, dilation=dilation, in_dim=in_dim, groups=groups,
|
40 |
-
second_dilation=second_dilation)
|
41 |
-
|
42 |
-
if self.in_dim is not None:
|
43 |
-
self.input_conv = nn.Conv2d(in_dim, dim, 1)
|
44 |
-
|
45 |
-
self.out_channnels = dim
|
46 |
-
|
47 |
-
def build_conv_block(self, dim, padding_type, norm_layer, activation, use_dropout, conv_kind='default',
|
48 |
-
dilation=1, in_dim=None, groups=1, second_dilation=1):
|
49 |
-
conv_layer = get_conv_block_ctor(conv_kind)
|
50 |
-
|
51 |
-
conv_block = []
|
52 |
-
p = 0
|
53 |
-
if padding_type == 'reflect':
|
54 |
-
conv_block += [nn.ReflectionPad2d(dilation)]
|
55 |
-
elif padding_type == 'replicate':
|
56 |
-
conv_block += [nn.ReplicationPad2d(dilation)]
|
57 |
-
elif padding_type == 'zero':
|
58 |
-
p = dilation
|
59 |
-
else:
|
60 |
-
raise NotImplementedError('padding [%s] is not implemented' % padding_type)
|
61 |
-
|
62 |
-
if in_dim is None:
|
63 |
-
in_dim = dim
|
64 |
-
|
65 |
-
conv_block += [conv_layer(in_dim, dim, kernel_size=3, padding=p, dilation=dilation),
|
66 |
-
norm_layer(dim),
|
67 |
-
activation]
|
68 |
-
if use_dropout:
|
69 |
-
conv_block += [nn.Dropout(0.5)]
|
70 |
-
|
71 |
-
p = 0
|
72 |
-
if padding_type == 'reflect':
|
73 |
-
conv_block += [nn.ReflectionPad2d(second_dilation)]
|
74 |
-
elif padding_type == 'replicate':
|
75 |
-
conv_block += [nn.ReplicationPad2d(second_dilation)]
|
76 |
-
elif padding_type == 'zero':
|
77 |
-
p = second_dilation
|
78 |
-
else:
|
79 |
-
raise NotImplementedError('padding [%s] is not implemented' % padding_type)
|
80 |
-
conv_block += [conv_layer(dim, dim, kernel_size=3, padding=p, dilation=second_dilation, groups=groups),
|
81 |
-
norm_layer(dim)]
|
82 |
-
|
83 |
-
return nn.Sequential(*conv_block)
|
84 |
-
|
85 |
-
def forward(self, x):
|
86 |
-
x_before = x
|
87 |
-
if self.in_dim is not None:
|
88 |
-
x = self.input_conv(x)
|
89 |
-
out = x + self.conv_block(x_before)
|
90 |
-
return out
|
91 |
-
|
92 |
-
class ResnetBlock5x5(nn.Module):
|
93 |
-
def __init__(self, dim, padding_type, norm_layer, activation=nn.ReLU(True), use_dropout=False, conv_kind='default',
|
94 |
-
dilation=1, in_dim=None, groups=1, second_dilation=None):
|
95 |
-
super(ResnetBlock5x5, self).__init__()
|
96 |
-
self.in_dim = in_dim
|
97 |
-
self.dim = dim
|
98 |
-
if second_dilation is None:
|
99 |
-
second_dilation = dilation
|
100 |
-
self.conv_block = self.build_conv_block(dim, padding_type, norm_layer, activation, use_dropout,
|
101 |
-
conv_kind=conv_kind, dilation=dilation, in_dim=in_dim, groups=groups,
|
102 |
-
second_dilation=second_dilation)
|
103 |
-
|
104 |
-
if self.in_dim is not None:
|
105 |
-
self.input_conv = nn.Conv2d(in_dim, dim, 1)
|
106 |
-
|
107 |
-
self.out_channnels = dim
|
108 |
-
|
109 |
-
def build_conv_block(self, dim, padding_type, norm_layer, activation, use_dropout, conv_kind='default',
|
110 |
-
dilation=1, in_dim=None, groups=1, second_dilation=1):
|
111 |
-
conv_layer = get_conv_block_ctor(conv_kind)
|
112 |
-
|
113 |
-
conv_block = []
|
114 |
-
p = 0
|
115 |
-
if padding_type == 'reflect':
|
116 |
-
conv_block += [nn.ReflectionPad2d(dilation * 2)]
|
117 |
-
elif padding_type == 'replicate':
|
118 |
-
conv_block += [nn.ReplicationPad2d(dilation * 2)]
|
119 |
-
elif padding_type == 'zero':
|
120 |
-
p = dilation * 2
|
121 |
-
else:
|
122 |
-
raise NotImplementedError('padding [%s] is not implemented' % padding_type)
|
123 |
-
|
124 |
-
if in_dim is None:
|
125 |
-
in_dim = dim
|
126 |
-
|
127 |
-
conv_block += [conv_layer(in_dim, dim, kernel_size=5, padding=p, dilation=dilation),
|
128 |
-
norm_layer(dim),
|
129 |
-
activation]
|
130 |
-
if use_dropout:
|
131 |
-
conv_block += [nn.Dropout(0.5)]
|
132 |
-
|
133 |
-
p = 0
|
134 |
-
if padding_type == 'reflect':
|
135 |
-
conv_block += [nn.ReflectionPad2d(second_dilation * 2)]
|
136 |
-
elif padding_type == 'replicate':
|
137 |
-
conv_block += [nn.ReplicationPad2d(second_dilation * 2)]
|
138 |
-
elif padding_type == 'zero':
|
139 |
-
p = second_dilation * 2
|
140 |
-
else:
|
141 |
-
raise NotImplementedError('padding [%s] is not implemented' % padding_type)
|
142 |
-
conv_block += [conv_layer(dim, dim, kernel_size=5, padding=p, dilation=second_dilation, groups=groups),
|
143 |
-
norm_layer(dim)]
|
144 |
-
|
145 |
-
return nn.Sequential(*conv_block)
|
146 |
-
|
147 |
-
def forward(self, x):
|
148 |
-
x_before = x
|
149 |
-
if self.in_dim is not None:
|
150 |
-
x = self.input_conv(x)
|
151 |
-
out = x + self.conv_block(x_before)
|
152 |
-
return out
|
153 |
-
|
154 |
-
|
155 |
-
class MultidilatedResnetBlock(nn.Module):
|
156 |
-
def __init__(self, dim, padding_type, conv_layer, norm_layer, activation=nn.ReLU(True), use_dropout=False):
|
157 |
-
super().__init__()
|
158 |
-
self.conv_block = self.build_conv_block(dim, padding_type, conv_layer, norm_layer, activation, use_dropout)
|
159 |
-
|
160 |
-
def build_conv_block(self, dim, padding_type, conv_layer, norm_layer, activation, use_dropout, dilation=1):
|
161 |
-
conv_block = []
|
162 |
-
conv_block += [conv_layer(dim, dim, kernel_size=3, padding_mode=padding_type),
|
163 |
-
norm_layer(dim),
|
164 |
-
activation]
|
165 |
-
if use_dropout:
|
166 |
-
conv_block += [nn.Dropout(0.5)]
|
167 |
-
|
168 |
-
conv_block += [conv_layer(dim, dim, kernel_size=3, padding_mode=padding_type),
|
169 |
-
norm_layer(dim)]
|
170 |
-
|
171 |
-
return nn.Sequential(*conv_block)
|
172 |
-
|
173 |
-
def forward(self, x):
|
174 |
-
out = x + self.conv_block(x)
|
175 |
-
return out
|
176 |
-
|
177 |
-
|
178 |
-
class MultiDilatedGlobalGenerator(nn.Module):
|
179 |
-
def __init__(self, input_nc, output_nc, ngf=64, n_downsampling=3,
|
180 |
-
n_blocks=3, norm_layer=nn.BatchNorm2d,
|
181 |
-
padding_type='reflect', conv_kind='default',
|
182 |
-
deconv_kind='convtranspose', activation=nn.ReLU(True),
|
183 |
-
up_norm_layer=nn.BatchNorm2d, affine=None, up_activation=nn.ReLU(True),
|
184 |
-
add_out_act=True, max_features=1024, multidilation_kwargs={},
|
185 |
-
ffc_positions=None, ffc_kwargs={}):
|
186 |
-
assert (n_blocks >= 0)
|
187 |
-
super().__init__()
|
188 |
-
|
189 |
-
conv_layer = get_conv_block_ctor(conv_kind)
|
190 |
-
resnet_conv_layer = functools.partial(get_conv_block_ctor('multidilated'), **multidilation_kwargs)
|
191 |
-
norm_layer = get_norm_layer(norm_layer)
|
192 |
-
if affine is not None:
|
193 |
-
norm_layer = partial(norm_layer, affine=affine)
|
194 |
-
up_norm_layer = get_norm_layer(up_norm_layer)
|
195 |
-
if affine is not None:
|
196 |
-
up_norm_layer = partial(up_norm_layer, affine=affine)
|
197 |
-
|
198 |
-
model = [nn.ReflectionPad2d(3),
|
199 |
-
conv_layer(input_nc, ngf, kernel_size=7, padding=0),
|
200 |
-
norm_layer(ngf),
|
201 |
-
activation]
|
202 |
-
|
203 |
-
identity = Identity()
|
204 |
-
### downsample
|
205 |
-
for i in range(n_downsampling):
|
206 |
-
mult = 2 ** i
|
207 |
-
|
208 |
-
model += [conv_layer(min(max_features, ngf * mult),
|
209 |
-
min(max_features, ngf * mult * 2),
|
210 |
-
kernel_size=3, stride=2, padding=1),
|
211 |
-
norm_layer(min(max_features, ngf * mult * 2)),
|
212 |
-
activation]
|
213 |
-
|
214 |
-
mult = 2 ** n_downsampling
|
215 |
-
feats_num_bottleneck = min(max_features, ngf * mult)
|
216 |
-
|
217 |
-
### resnet blocks
|
218 |
-
for i in range(n_blocks):
|
219 |
-
if ffc_positions is not None and i in ffc_positions:
|
220 |
-
model += [FFCResnetBlock(feats_num_bottleneck, padding_type, norm_layer, activation_layer=nn.ReLU,
|
221 |
-
inline=True, **ffc_kwargs)]
|
222 |
-
model += [MultidilatedResnetBlock(feats_num_bottleneck, padding_type=padding_type,
|
223 |
-
conv_layer=resnet_conv_layer, activation=activation,
|
224 |
-
norm_layer=norm_layer)]
|
225 |
-
|
226 |
-
### upsample
|
227 |
-
for i in range(n_downsampling):
|
228 |
-
mult = 2 ** (n_downsampling - i)
|
229 |
-
model += deconv_factory(deconv_kind, ngf, mult, up_norm_layer, up_activation, max_features)
|
230 |
-
model += [nn.ReflectionPad2d(3),
|
231 |
-
nn.Conv2d(ngf, output_nc, kernel_size=7, padding=0)]
|
232 |
-
if add_out_act:
|
233 |
-
model.append(get_activation('tanh' if add_out_act is True else add_out_act))
|
234 |
-
self.model = nn.Sequential(*model)
|
235 |
-
|
236 |
-
def forward(self, input):
|
237 |
-
return self.model(input)
|
238 |
-
|
239 |
-
class ConfigGlobalGenerator(nn.Module):
|
240 |
-
def __init__(self, input_nc, output_nc, ngf=64, n_downsampling=3,
|
241 |
-
n_blocks=3, norm_layer=nn.BatchNorm2d,
|
242 |
-
padding_type='reflect', conv_kind='default',
|
243 |
-
deconv_kind='convtranspose', activation=nn.ReLU(True),
|
244 |
-
up_norm_layer=nn.BatchNorm2d, affine=None, up_activation=nn.ReLU(True),
|
245 |
-
add_out_act=True, max_features=1024,
|
246 |
-
manual_block_spec=[],
|
247 |
-
resnet_block_kind='multidilatedresnetblock',
|
248 |
-
resnet_conv_kind='multidilated',
|
249 |
-
resnet_dilation=1,
|
250 |
-
multidilation_kwargs={}):
|
251 |
-
assert (n_blocks >= 0)
|
252 |
-
super().__init__()
|
253 |
-
|
254 |
-
conv_layer = get_conv_block_ctor(conv_kind)
|
255 |
-
resnet_conv_layer = functools.partial(get_conv_block_ctor(resnet_conv_kind), **multidilation_kwargs)
|
256 |
-
norm_layer = get_norm_layer(norm_layer)
|
257 |
-
if affine is not None:
|
258 |
-
norm_layer = partial(norm_layer, affine=affine)
|
259 |
-
up_norm_layer = get_norm_layer(up_norm_layer)
|
260 |
-
if affine is not None:
|
261 |
-
up_norm_layer = partial(up_norm_layer, affine=affine)
|
262 |
-
|
263 |
-
model = [nn.ReflectionPad2d(3),
|
264 |
-
conv_layer(input_nc, ngf, kernel_size=7, padding=0),
|
265 |
-
norm_layer(ngf),
|
266 |
-
activation]
|
267 |
-
|
268 |
-
identity = Identity()
|
269 |
-
|
270 |
-
### downsample
|
271 |
-
for i in range(n_downsampling):
|
272 |
-
mult = 2 ** i
|
273 |
-
model += [conv_layer(min(max_features, ngf * mult),
|
274 |
-
min(max_features, ngf * mult * 2),
|
275 |
-
kernel_size=3, stride=2, padding=1),
|
276 |
-
norm_layer(min(max_features, ngf * mult * 2)),
|
277 |
-
activation]
|
278 |
-
|
279 |
-
mult = 2 ** n_downsampling
|
280 |
-
feats_num_bottleneck = min(max_features, ngf * mult)
|
281 |
-
|
282 |
-
if len(manual_block_spec) == 0:
|
283 |
-
manual_block_spec = [
|
284 |
-
DotDict(lambda : None, {
|
285 |
-
'n_blocks': n_blocks,
|
286 |
-
'use_default': True})
|
287 |
-
]
|
288 |
-
|
289 |
-
### resnet blocks
|
290 |
-
for block_spec in manual_block_spec:
|
291 |
-
def make_and_add_blocks(model, block_spec):
|
292 |
-
block_spec = DotDict(lambda : None, block_spec)
|
293 |
-
if not block_spec.use_default:
|
294 |
-
resnet_conv_layer = functools.partial(get_conv_block_ctor(block_spec.resnet_conv_kind), **block_spec.multidilation_kwargs)
|
295 |
-
resnet_conv_kind = block_spec.resnet_conv_kind
|
296 |
-
resnet_block_kind = block_spec.resnet_block_kind
|
297 |
-
if block_spec.resnet_dilation is not None:
|
298 |
-
resnet_dilation = block_spec.resnet_dilation
|
299 |
-
for i in range(block_spec.n_blocks):
|
300 |
-
if resnet_block_kind == "multidilatedresnetblock":
|
301 |
-
model += [MultidilatedResnetBlock(feats_num_bottleneck, padding_type=padding_type,
|
302 |
-
conv_layer=resnet_conv_layer, activation=activation,
|
303 |
-
norm_layer=norm_layer)]
|
304 |
-
if resnet_block_kind == "resnetblock":
|
305 |
-
model += [ResnetBlock(ngf * mult, padding_type=padding_type, activation=activation, norm_layer=norm_layer,
|
306 |
-
conv_kind=resnet_conv_kind)]
|
307 |
-
if resnet_block_kind == "resnetblock5x5":
|
308 |
-
model += [ResnetBlock5x5(ngf * mult, padding_type=padding_type, activation=activation, norm_layer=norm_layer,
|
309 |
-
conv_kind=resnet_conv_kind)]
|
310 |
-
if resnet_block_kind == "resnetblockdwdil":
|
311 |
-
model += [ResnetBlock(ngf * mult, padding_type=padding_type, activation=activation, norm_layer=norm_layer,
|
312 |
-
conv_kind=resnet_conv_kind, dilation=resnet_dilation, second_dilation=resnet_dilation)]
|
313 |
-
make_and_add_blocks(model, block_spec)
|
314 |
-
|
315 |
-
### upsample
|
316 |
-
for i in range(n_downsampling):
|
317 |
-
mult = 2 ** (n_downsampling - i)
|
318 |
-
model += deconv_factory(deconv_kind, ngf, mult, up_norm_layer, up_activation, max_features)
|
319 |
-
model += [nn.ReflectionPad2d(3),
|
320 |
-
nn.Conv2d(ngf, output_nc, kernel_size=7, padding=0)]
|
321 |
-
if add_out_act:
|
322 |
-
model.append(get_activation('tanh' if add_out_act is True else add_out_act))
|
323 |
-
self.model = nn.Sequential(*model)
|
324 |
-
|
325 |
-
def forward(self, input):
|
326 |
-
return self.model(input)
|
327 |
-
|
328 |
-
|
329 |
-
def make_dil_blocks(dilated_blocks_n, dilation_block_kind, dilated_block_kwargs):
|
330 |
-
blocks = []
|
331 |
-
for i in range(dilated_blocks_n):
|
332 |
-
if dilation_block_kind == 'simple':
|
333 |
-
blocks.append(ResnetBlock(**dilated_block_kwargs, dilation=2 ** (i + 1)))
|
334 |
-
elif dilation_block_kind == 'multi':
|
335 |
-
blocks.append(MultidilatedResnetBlock(**dilated_block_kwargs))
|
336 |
-
else:
|
337 |
-
raise ValueError(f'dilation_block_kind could not be "{dilation_block_kind}"')
|
338 |
-
return blocks
|
339 |
-
|
340 |
-
|
341 |
-
class GlobalGenerator(nn.Module):
|
342 |
-
def __init__(self, input_nc, output_nc, ngf=64, n_downsampling=3, n_blocks=9, norm_layer=nn.BatchNorm2d,
|
343 |
-
padding_type='reflect', conv_kind='default', activation=nn.ReLU(True),
|
344 |
-
up_norm_layer=nn.BatchNorm2d, affine=None,
|
345 |
-
up_activation=nn.ReLU(True), dilated_blocks_n=0, dilated_blocks_n_start=0,
|
346 |
-
dilated_blocks_n_middle=0,
|
347 |
-
add_out_act=True,
|
348 |
-
max_features=1024, is_resblock_depthwise=False,
|
349 |
-
ffc_positions=None, ffc_kwargs={}, dilation=1, second_dilation=None,
|
350 |
-
dilation_block_kind='simple', multidilation_kwargs={}):
|
351 |
-
assert (n_blocks >= 0)
|
352 |
-
super().__init__()
|
353 |
-
|
354 |
-
conv_layer = get_conv_block_ctor(conv_kind)
|
355 |
-
norm_layer = get_norm_layer(norm_layer)
|
356 |
-
if affine is not None:
|
357 |
-
norm_layer = partial(norm_layer, affine=affine)
|
358 |
-
up_norm_layer = get_norm_layer(up_norm_layer)
|
359 |
-
if affine is not None:
|
360 |
-
up_norm_layer = partial(up_norm_layer, affine=affine)
|
361 |
-
|
362 |
-
if ffc_positions is not None:
|
363 |
-
ffc_positions = collections.Counter(ffc_positions)
|
364 |
-
|
365 |
-
model = [nn.ReflectionPad2d(3),
|
366 |
-
conv_layer(input_nc, ngf, kernel_size=7, padding=0),
|
367 |
-
norm_layer(ngf),
|
368 |
-
activation]
|
369 |
-
|
370 |
-
identity = Identity()
|
371 |
-
### downsample
|
372 |
-
for i in range(n_downsampling):
|
373 |
-
mult = 2 ** i
|
374 |
-
|
375 |
-
model += [conv_layer(min(max_features, ngf * mult),
|
376 |
-
min(max_features, ngf * mult * 2),
|
377 |
-
kernel_size=3, stride=2, padding=1),
|
378 |
-
norm_layer(min(max_features, ngf * mult * 2)),
|
379 |
-
activation]
|
380 |
-
|
381 |
-
mult = 2 ** n_downsampling
|
382 |
-
feats_num_bottleneck = min(max_features, ngf * mult)
|
383 |
-
|
384 |
-
dilated_block_kwargs = dict(dim=feats_num_bottleneck, padding_type=padding_type,
|
385 |
-
activation=activation, norm_layer=norm_layer)
|
386 |
-
if dilation_block_kind == 'simple':
|
387 |
-
dilated_block_kwargs['conv_kind'] = conv_kind
|
388 |
-
elif dilation_block_kind == 'multi':
|
389 |
-
dilated_block_kwargs['conv_layer'] = functools.partial(
|
390 |
-
get_conv_block_ctor('multidilated'), **multidilation_kwargs)
|
391 |
-
|
392 |
-
# dilated blocks at the start of the bottleneck sausage
|
393 |
-
if dilated_blocks_n_start is not None and dilated_blocks_n_start > 0:
|
394 |
-
model += make_dil_blocks(dilated_blocks_n_start, dilation_block_kind, dilated_block_kwargs)
|
395 |
-
|
396 |
-
# resnet blocks
|
397 |
-
for i in range(n_blocks):
|
398 |
-
# dilated blocks at the middle of the bottleneck sausage
|
399 |
-
if i == n_blocks // 2 and dilated_blocks_n_middle is not None and dilated_blocks_n_middle > 0:
|
400 |
-
model += make_dil_blocks(dilated_blocks_n_middle, dilation_block_kind, dilated_block_kwargs)
|
401 |
-
|
402 |
-
if ffc_positions is not None and i in ffc_positions:
|
403 |
-
for _ in range(ffc_positions[i]): # same position can occur more than once
|
404 |
-
model += [FFCResnetBlock(feats_num_bottleneck, padding_type, norm_layer, activation_layer=nn.ReLU,
|
405 |
-
inline=True, **ffc_kwargs)]
|
406 |
-
|
407 |
-
if is_resblock_depthwise:
|
408 |
-
resblock_groups = feats_num_bottleneck
|
409 |
-
else:
|
410 |
-
resblock_groups = 1
|
411 |
-
|
412 |
-
model += [ResnetBlock(feats_num_bottleneck, padding_type=padding_type, activation=activation,
|
413 |
-
norm_layer=norm_layer, conv_kind=conv_kind, groups=resblock_groups,
|
414 |
-
dilation=dilation, second_dilation=second_dilation)]
|
415 |
-
|
416 |
-
|
417 |
-
# dilated blocks at the end of the bottleneck sausage
|
418 |
-
if dilated_blocks_n is not None and dilated_blocks_n > 0:
|
419 |
-
model += make_dil_blocks(dilated_blocks_n, dilation_block_kind, dilated_block_kwargs)
|
420 |
-
|
421 |
-
# upsample
|
422 |
-
for i in range(n_downsampling):
|
423 |
-
mult = 2 ** (n_downsampling - i)
|
424 |
-
model += [nn.ConvTranspose2d(min(max_features, ngf * mult),
|
425 |
-
min(max_features, int(ngf * mult / 2)),
|
426 |
-
kernel_size=3, stride=2, padding=1, output_padding=1),
|
427 |
-
up_norm_layer(min(max_features, int(ngf * mult / 2))),
|
428 |
-
up_activation]
|
429 |
-
model += [nn.ReflectionPad2d(3),
|
430 |
-
nn.Conv2d(ngf, output_nc, kernel_size=7, padding=0)]
|
431 |
-
if add_out_act:
|
432 |
-
model.append(get_activation('tanh' if add_out_act is True else add_out_act))
|
433 |
-
self.model = nn.Sequential(*model)
|
434 |
-
|
435 |
-
def forward(self, input):
|
436 |
-
return self.model(input)
|
437 |
-
|
438 |
-
|
439 |
-
class GlobalGeneratorGated(GlobalGenerator):
|
440 |
-
def __init__(self, *args, **kwargs):
|
441 |
-
real_kwargs=dict(
|
442 |
-
conv_kind='gated_bn_relu',
|
443 |
-
activation=nn.Identity(),
|
444 |
-
norm_layer=nn.Identity
|
445 |
-
)
|
446 |
-
real_kwargs.update(kwargs)
|
447 |
-
super().__init__(*args, **real_kwargs)
|
448 |
-
|
449 |
-
|
450 |
-
class GlobalGeneratorFromSuperChannels(nn.Module):
|
451 |
-
def __init__(self, input_nc, output_nc, n_downsampling, n_blocks, super_channels, norm_layer="bn", padding_type='reflect', add_out_act=True):
|
452 |
-
super().__init__()
|
453 |
-
self.n_downsampling = n_downsampling
|
454 |
-
norm_layer = get_norm_layer(norm_layer)
|
455 |
-
if type(norm_layer) == functools.partial:
|
456 |
-
use_bias = (norm_layer.func == nn.InstanceNorm2d)
|
457 |
-
else:
|
458 |
-
use_bias = (norm_layer == nn.InstanceNorm2d)
|
459 |
-
|
460 |
-
channels = self.convert_super_channels(super_channels)
|
461 |
-
self.channels = channels
|
462 |
-
|
463 |
-
model = [nn.ReflectionPad2d(3),
|
464 |
-
nn.Conv2d(input_nc, channels[0], kernel_size=7, padding=0, bias=use_bias),
|
465 |
-
norm_layer(channels[0]),
|
466 |
-
nn.ReLU(True)]
|
467 |
-
|
468 |
-
for i in range(n_downsampling): # add downsampling layers
|
469 |
-
mult = 2 ** i
|
470 |
-
model += [nn.Conv2d(channels[0+i], channels[1+i], kernel_size=3, stride=2, padding=1, bias=use_bias),
|
471 |
-
norm_layer(channels[1+i]),
|
472 |
-
nn.ReLU(True)]
|
473 |
-
|
474 |
-
mult = 2 ** n_downsampling
|
475 |
-
|
476 |
-
n_blocks1 = n_blocks // 3
|
477 |
-
n_blocks2 = n_blocks1
|
478 |
-
n_blocks3 = n_blocks - n_blocks1 - n_blocks2
|
479 |
-
|
480 |
-
for i in range(n_blocks1):
|
481 |
-
c = n_downsampling
|
482 |
-
dim = channels[c]
|
483 |
-
model += [ResnetBlock(dim, padding_type=padding_type, norm_layer=norm_layer)]
|
484 |
-
|
485 |
-
for i in range(n_blocks2):
|
486 |
-
c = n_downsampling+1
|
487 |
-
dim = channels[c]
|
488 |
-
kwargs = {}
|
489 |
-
if i == 0:
|
490 |
-
kwargs = {"in_dim": channels[c-1]}
|
491 |
-
model += [ResnetBlock(dim, padding_type=padding_type, norm_layer=norm_layer, **kwargs)]
|
492 |
-
|
493 |
-
for i in range(n_blocks3):
|
494 |
-
c = n_downsampling+2
|
495 |
-
dim = channels[c]
|
496 |
-
kwargs = {}
|
497 |
-
if i == 0:
|
498 |
-
kwargs = {"in_dim": channels[c-1]}
|
499 |
-
model += [ResnetBlock(dim, padding_type=padding_type, norm_layer=norm_layer, **kwargs)]
|
500 |
-
|
501 |
-
for i in range(n_downsampling): # add upsampling layers
|
502 |
-
mult = 2 ** (n_downsampling - i)
|
503 |
-
model += [nn.ConvTranspose2d(channels[n_downsampling+3+i],
|
504 |
-
channels[n_downsampling+3+i+1],
|
505 |
-
kernel_size=3, stride=2,
|
506 |
-
padding=1, output_padding=1,
|
507 |
-
bias=use_bias),
|
508 |
-
norm_layer(channels[n_downsampling+3+i+1]),
|
509 |
-
nn.ReLU(True)]
|
510 |
-
model += [nn.ReflectionPad2d(3)]
|
511 |
-
model += [nn.Conv2d(channels[2*n_downsampling+3], output_nc, kernel_size=7, padding=0)]
|
512 |
-
|
513 |
-
if add_out_act:
|
514 |
-
model.append(get_activation('tanh' if add_out_act is True else add_out_act))
|
515 |
-
self.model = nn.Sequential(*model)
|
516 |
-
|
517 |
-
def convert_super_channels(self, super_channels):
|
518 |
-
n_downsampling = self.n_downsampling
|
519 |
-
result = []
|
520 |
-
cnt = 0
|
521 |
-
|
522 |
-
if n_downsampling == 2:
|
523 |
-
N1 = 10
|
524 |
-
elif n_downsampling == 3:
|
525 |
-
N1 = 13
|
526 |
-
else:
|
527 |
-
raise NotImplementedError
|
528 |
-
|
529 |
-
for i in range(0, N1):
|
530 |
-
if i in [1,4,7,10]:
|
531 |
-
channel = super_channels[cnt] * (2 ** cnt)
|
532 |
-
config = {'channel': channel}
|
533 |
-
result.append(channel)
|
534 |
-
logging.info(f"Downsample channels {result[-1]}")
|
535 |
-
cnt += 1
|
536 |
-
|
537 |
-
for i in range(3):
|
538 |
-
for counter, j in enumerate(range(N1 + i * 3, N1 + 3 + i * 3)):
|
539 |
-
if len(super_channels) == 6:
|
540 |
-
channel = super_channels[3] * 4
|
541 |
-
else:
|
542 |
-
channel = super_channels[i + 3] * 4
|
543 |
-
config = {'channel': channel}
|
544 |
-
if counter == 0:
|
545 |
-
result.append(channel)
|
546 |
-
logging.info(f"Bottleneck channels {result[-1]}")
|
547 |
-
cnt = 2
|
548 |
-
|
549 |
-
for i in range(N1+9, N1+21):
|
550 |
-
if i in [22, 25,28]:
|
551 |
-
cnt -= 1
|
552 |
-
if len(super_channels) == 6:
|
553 |
-
channel = super_channels[5 - cnt] * (2 ** cnt)
|
554 |
-
else:
|
555 |
-
channel = super_channels[7 - cnt] * (2 ** cnt)
|
556 |
-
result.append(int(channel))
|
557 |
-
logging.info(f"Upsample channels {result[-1]}")
|
558 |
-
return result
|
559 |
-
|
560 |
-
def forward(self, input):
|
561 |
-
return self.model(input)
|
562 |
-
|
563 |
-
|
564 |
-
# Defines the PatchGAN discriminator with the specified arguments.
|
565 |
-
class NLayerDiscriminator(BaseDiscriminator):
|
566 |
-
def __init__(self, input_nc, ndf=64, n_layers=3, norm_layer=nn.BatchNorm2d,):
|
567 |
-
super().__init__()
|
568 |
-
self.n_layers = n_layers
|
569 |
-
|
570 |
-
kw = 4
|
571 |
-
padw = int(np.ceil((kw-1.0)/2))
|
572 |
-
sequence = [[nn.Conv2d(input_nc, ndf, kernel_size=kw, stride=2, padding=padw),
|
573 |
-
nn.LeakyReLU(0.2, True)]]
|
574 |
-
|
575 |
-
nf = ndf
|
576 |
-
for n in range(1, n_layers):
|
577 |
-
nf_prev = nf
|
578 |
-
nf = min(nf * 2, 512)
|
579 |
-
|
580 |
-
cur_model = []
|
581 |
-
cur_model += [
|
582 |
-
nn.Conv2d(nf_prev, nf, kernel_size=kw, stride=2, padding=padw),
|
583 |
-
norm_layer(nf),
|
584 |
-
nn.LeakyReLU(0.2, True)
|
585 |
-
]
|
586 |
-
sequence.append(cur_model)
|
587 |
-
|
588 |
-
nf_prev = nf
|
589 |
-
nf = min(nf * 2, 512)
|
590 |
-
|
591 |
-
cur_model = []
|
592 |
-
cur_model += [
|
593 |
-
nn.Conv2d(nf_prev, nf, kernel_size=kw, stride=1, padding=padw),
|
594 |
-
norm_layer(nf),
|
595 |
-
nn.LeakyReLU(0.2, True)
|
596 |
-
]
|
597 |
-
sequence.append(cur_model)
|
598 |
-
|
599 |
-
sequence += [[nn.Conv2d(nf, 1, kernel_size=kw, stride=1, padding=padw)]]
|
600 |
-
|
601 |
-
for n in range(len(sequence)):
|
602 |
-
setattr(self, 'model'+str(n), nn.Sequential(*sequence[n]))
|
603 |
-
|
604 |
-
def get_all_activations(self, x):
|
605 |
-
res = [x]
|
606 |
-
for n in range(self.n_layers + 2):
|
607 |
-
model = getattr(self, 'model' + str(n))
|
608 |
-
res.append(model(res[-1]))
|
609 |
-
return res[1:]
|
610 |
-
|
611 |
-
def forward(self, x):
|
612 |
-
act = self.get_all_activations(x)
|
613 |
-
return act[-1], act[:-1]
|
614 |
-
|
615 |
-
|
616 |
-
class MultidilatedNLayerDiscriminator(BaseDiscriminator):
|
617 |
-
def __init__(self, input_nc, ndf=64, n_layers=3, norm_layer=nn.BatchNorm2d, multidilation_kwargs={}):
|
618 |
-
super().__init__()
|
619 |
-
self.n_layers = n_layers
|
620 |
-
|
621 |
-
kw = 4
|
622 |
-
padw = int(np.ceil((kw-1.0)/2))
|
623 |
-
sequence = [[nn.Conv2d(input_nc, ndf, kernel_size=kw, stride=2, padding=padw),
|
624 |
-
nn.LeakyReLU(0.2, True)]]
|
625 |
-
|
626 |
-
nf = ndf
|
627 |
-
for n in range(1, n_layers):
|
628 |
-
nf_prev = nf
|
629 |
-
nf = min(nf * 2, 512)
|
630 |
-
|
631 |
-
cur_model = []
|
632 |
-
cur_model += [
|
633 |
-
MultidilatedConv(nf_prev, nf, kernel_size=kw, stride=2, padding=[2, 3], **multidilation_kwargs),
|
634 |
-
norm_layer(nf),
|
635 |
-
nn.LeakyReLU(0.2, True)
|
636 |
-
]
|
637 |
-
sequence.append(cur_model)
|
638 |
-
|
639 |
-
nf_prev = nf
|
640 |
-
nf = min(nf * 2, 512)
|
641 |
-
|
642 |
-
cur_model = []
|
643 |
-
cur_model += [
|
644 |
-
nn.Conv2d(nf_prev, nf, kernel_size=kw, stride=1, padding=padw),
|
645 |
-
norm_layer(nf),
|
646 |
-
nn.LeakyReLU(0.2, True)
|
647 |
-
]
|
648 |
-
sequence.append(cur_model)
|
649 |
-
|
650 |
-
sequence += [[nn.Conv2d(nf, 1, kernel_size=kw, stride=1, padding=padw)]]
|
651 |
-
|
652 |
-
for n in range(len(sequence)):
|
653 |
-
setattr(self, 'model'+str(n), nn.Sequential(*sequence[n]))
|
654 |
-
|
655 |
-
def get_all_activations(self, x):
|
656 |
-
res = [x]
|
657 |
-
for n in range(self.n_layers + 2):
|
658 |
-
model = getattr(self, 'model' + str(n))
|
659 |
-
res.append(model(res[-1]))
|
660 |
-
return res[1:]
|
661 |
-
|
662 |
-
def forward(self, x):
|
663 |
-
act = self.get_all_activations(x)
|
664 |
-
return act[-1], act[:-1]
|
665 |
-
|
666 |
-
|
667 |
-
class NLayerDiscriminatorAsGen(NLayerDiscriminator):
|
668 |
-
def forward(self, x):
|
669 |
-
return super().forward(x)[0]
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/CVPR/regionclip-demo/datasets/prepare_panoptic_fpn.py
DELETED
@@ -1,116 +0,0 @@
|
|
1 |
-
#!/usr/bin/env python3
|
2 |
-
# -*- coding: utf-8 -*-
|
3 |
-
# Copyright (c) Facebook, Inc. and its affiliates.
|
4 |
-
|
5 |
-
import functools
|
6 |
-
import json
|
7 |
-
import multiprocessing as mp
|
8 |
-
import numpy as np
|
9 |
-
import os
|
10 |
-
import time
|
11 |
-
from fvcore.common.download import download
|
12 |
-
from panopticapi.utils import rgb2id
|
13 |
-
from PIL import Image
|
14 |
-
|
15 |
-
from detectron2.data.datasets.builtin_meta import COCO_CATEGORIES
|
16 |
-
|
17 |
-
|
18 |
-
def _process_panoptic_to_semantic(input_panoptic, output_semantic, segments, id_map):
|
19 |
-
panoptic = np.asarray(Image.open(input_panoptic), dtype=np.uint32)
|
20 |
-
panoptic = rgb2id(panoptic)
|
21 |
-
output = np.zeros_like(panoptic, dtype=np.uint8) + 255
|
22 |
-
for seg in segments:
|
23 |
-
cat_id = seg["category_id"]
|
24 |
-
new_cat_id = id_map[cat_id]
|
25 |
-
output[panoptic == seg["id"]] = new_cat_id
|
26 |
-
Image.fromarray(output).save(output_semantic)
|
27 |
-
|
28 |
-
|
29 |
-
def separate_coco_semantic_from_panoptic(panoptic_json, panoptic_root, sem_seg_root, categories):
|
30 |
-
"""
|
31 |
-
Create semantic segmentation annotations from panoptic segmentation
|
32 |
-
annotations, to be used by PanopticFPN.
|
33 |
-
|
34 |
-
It maps all thing categories to class 0, and maps all unlabeled pixels to class 255.
|
35 |
-
It maps all stuff categories to contiguous ids starting from 1.
|
36 |
-
|
37 |
-
Args:
|
38 |
-
panoptic_json (str): path to the panoptic json file, in COCO's format.
|
39 |
-
panoptic_root (str): a directory with panoptic annotation files, in COCO's format.
|
40 |
-
sem_seg_root (str): a directory to output semantic annotation files
|
41 |
-
categories (list[dict]): category metadata. Each dict needs to have:
|
42 |
-
"id": corresponds to the "category_id" in the json annotations
|
43 |
-
"isthing": 0 or 1
|
44 |
-
"""
|
45 |
-
os.makedirs(sem_seg_root, exist_ok=True)
|
46 |
-
|
47 |
-
stuff_ids = [k["id"] for k in categories if k["isthing"] == 0]
|
48 |
-
thing_ids = [k["id"] for k in categories if k["isthing"] == 1]
|
49 |
-
id_map = {} # map from category id to id in the output semantic annotation
|
50 |
-
assert len(stuff_ids) <= 254
|
51 |
-
for i, stuff_id in enumerate(stuff_ids):
|
52 |
-
id_map[stuff_id] = i + 1
|
53 |
-
for thing_id in thing_ids:
|
54 |
-
id_map[thing_id] = 0
|
55 |
-
id_map[0] = 255
|
56 |
-
|
57 |
-
with open(panoptic_json) as f:
|
58 |
-
obj = json.load(f)
|
59 |
-
|
60 |
-
pool = mp.Pool(processes=max(mp.cpu_count() // 2, 4))
|
61 |
-
|
62 |
-
def iter_annotations():
|
63 |
-
for anno in obj["annotations"]:
|
64 |
-
file_name = anno["file_name"]
|
65 |
-
segments = anno["segments_info"]
|
66 |
-
input = os.path.join(panoptic_root, file_name)
|
67 |
-
output = os.path.join(sem_seg_root, file_name)
|
68 |
-
yield input, output, segments
|
69 |
-
|
70 |
-
print("Start writing to {} ...".format(sem_seg_root))
|
71 |
-
start = time.time()
|
72 |
-
pool.starmap(
|
73 |
-
functools.partial(_process_panoptic_to_semantic, id_map=id_map),
|
74 |
-
iter_annotations(),
|
75 |
-
chunksize=100,
|
76 |
-
)
|
77 |
-
print("Finished. time: {:.2f}s".format(time.time() - start))
|
78 |
-
|
79 |
-
|
80 |
-
if __name__ == "__main__":
|
81 |
-
dataset_dir = os.path.join(os.getenv("DETECTRON2_DATASETS", "datasets"), "coco")
|
82 |
-
for s in ["val2017", "train2017"]:
|
83 |
-
separate_coco_semantic_from_panoptic(
|
84 |
-
os.path.join(dataset_dir, "annotations/panoptic_{}.json".format(s)),
|
85 |
-
os.path.join(dataset_dir, "panoptic_{}".format(s)),
|
86 |
-
os.path.join(dataset_dir, "panoptic_stuff_{}".format(s)),
|
87 |
-
COCO_CATEGORIES,
|
88 |
-
)
|
89 |
-
|
90 |
-
# Prepare val2017_100 for quick testing:
|
91 |
-
|
92 |
-
dest_dir = os.path.join(dataset_dir, "annotations/")
|
93 |
-
URL_PREFIX = "https://dl.fbaipublicfiles.com/detectron2/"
|
94 |
-
download(URL_PREFIX + "annotations/coco/panoptic_val2017_100.json", dest_dir)
|
95 |
-
with open(os.path.join(dest_dir, "panoptic_val2017_100.json")) as f:
|
96 |
-
obj = json.load(f)
|
97 |
-
|
98 |
-
def link_val100(dir_full, dir_100):
|
99 |
-
print("Creating " + dir_100 + " ...")
|
100 |
-
os.makedirs(dir_100, exist_ok=True)
|
101 |
-
for img in obj["images"]:
|
102 |
-
basename = os.path.splitext(img["file_name"])[0]
|
103 |
-
src = os.path.join(dir_full, basename + ".png")
|
104 |
-
dst = os.path.join(dir_100, basename + ".png")
|
105 |
-
src = os.path.relpath(src, start=dir_100)
|
106 |
-
os.symlink(src, dst)
|
107 |
-
|
108 |
-
link_val100(
|
109 |
-
os.path.join(dataset_dir, "panoptic_val2017"),
|
110 |
-
os.path.join(dataset_dir, "panoptic_val2017_100"),
|
111 |
-
)
|
112 |
-
|
113 |
-
link_val100(
|
114 |
-
os.path.join(dataset_dir, "panoptic_stuff_val2017"),
|
115 |
-
os.path.join(dataset_dir, "panoptic_stuff_val2017_100"),
|
116 |
-
)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|