Commit
·
5a41252
1
Parent(s):
658a70c
Update parquet files (step 39 of 397)
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- spaces/123Kumar/vits-uma-genshin-honkai123/utils.py +0 -225
- spaces/1acneusushi/gradio-2dmoleculeeditor/data/HD Online Player (Dil Chahta Hai 2001 720p BluRay NHD ) - Watch the Cult Classic Comedy-Drama Film.md +0 -80
- spaces/1gistliPinn/ChatGPT4/Examples/DocuWorks 7 0 Full Version.zip LINK.md +0 -6
- spaces/1gistliPinn/ChatGPT4/Examples/EasyWorship Crack 7.1.4.0 Latest Version With 2020 Keygen EXCLUSIVE.md +0 -10
- spaces/1pelhydcardo/ChatGPT-prompt-generator/assets/Download APKs from Huawei AppGallery The Official App Store for Huawei Devices.md +0 -116
- spaces/1phancelerku/anime-remove-background/4x4 Off Road Rally 9 MOD APK Download and Enjoy the Ultimate Racing Experience.md +0 -99
- spaces/1phancelerku/anime-remove-background/Download Countries of the World Map with Customizable Colors and Labels.md +0 -139
- spaces/1phancelerku/anime-remove-background/Download Facebook Lite APK for Android The Latest Version of the Fast and Efficient Social Network.md +0 -125
- spaces/1phancelerku/anime-remove-background/Download Lokicraft 1.17 with Lokicraft Helper A Guide to the New Features and Mods.md +0 -163
- spaces/1phancelerku/anime-remove-background/Extreme Live VPN The Ultimate VPN App for Android.md +0 -149
- spaces/1phancelerku/anime-remove-background/Farm Heroes Saga MOD APK How to Get Unlimited Everything and Connect with Facebook Friends.md +0 -160
- spaces/4Taps/SadTalker/src/face3d/models/arcface_torch/configs/__init__.py +0 -0
- spaces/4Taps/SadTalker/src/face3d/models/networks.py +0 -521
- spaces/AIFILMS/generate_human_motion/pyrender/docs/make.bat +0 -35
- spaces/AIGText/GlyphControl/scripts/rendertext_tool.py +0 -206
- spaces/Abhilashvj/planogram-compliance/utils/plots.py +0 -781
- spaces/AchyuthGamer/Free-Accounts-Generator/README.md +0 -11
- spaces/AgentVerse/agentVerse/ui/src/phaser3-rex-plugins/plugins/scale.d.ts +0 -2
- spaces/AgentVerse/agentVerse/ui/src/phaser3-rex-plugins/templates/spinner/pie/Pie.d.ts +0 -2
- spaces/AgentVerse/agentVerse/ui/src/phaser3-rex-plugins/templates/ui/basesizer/utils/CheckSize.js +0 -12
- spaces/AgentVerse/agentVerse/ui/src/phaser3-rex-plugins/templates/ui/imagebox/Factory.d.ts +0 -7
- spaces/AgentVerse/agentVerse/ui/src/phaser3-rex-plugins/templates/ui/perspectivecard/Factory.d.ts +0 -5
- spaces/Aki004/herta-so-vits/hubert/__init__.py +0 -0
- spaces/Alcedo/yunmedia/README.md +0 -10
- spaces/Allakhazam/anythingV4/app.py +0 -26
- spaces/Androidonnxfork/CivitAi-to-Diffusers/diffusers/docs/source/ko/using-diffusers/reusing_seeds.md +0 -63
- spaces/Androidonnxfork/CivitAi-to-Diffusers/diffusers/examples/community/interpolate_stable_diffusion.py +0 -524
- spaces/Androidonnxfork/CivitAi-to-Diffusers/diffusers/src/diffusers/pipelines/vq_diffusion/__init__.py +0 -10
- spaces/Andy1621/uniformer_image_segmentation/configs/fcn/fcn_r101-d8_512x1024_40k_cityscapes.py +0 -2
- spaces/Andy1621/uniformer_image_segmentation/configs/hrnet/fcn_hr18s_480x480_40k_pascal_context_59.py +0 -9
- spaces/AnishKumbhar/ChatBot/text-generation-webui-main/extensions/openai/cache_embedding_model.py +0 -11
- spaces/AnishKumbhar/ChatBot/text-generation-webui-main/start_windows.bat +0 -84
- spaces/ArchitSharma/Digital-Photo-Color-Restoration/README.md +0 -13
- spaces/AriaMei/TTSdemo/text/cleaners.py +0 -177
- spaces/Ariharasudhan/YoloV5/utils/aws/userdata.sh +0 -27
- spaces/AtlasUnified/DeforumPromptGenerator/app.py +0 -33
- spaces/BartPoint/VoiceChange/util.py +0 -81
- spaces/Benson/text-generation/Examples/Descargar Apk Mod Hello Neighbor.md +0 -63
- spaces/CVPR/LIVE/thrust/thrust/iterator/transform_iterator.h +0 -356
- spaces/CVPR/LIVE/thrust/thrust/system/tbb/detail/transform_scan.h +0 -23
- spaces/CVPR/WALT/mmdet/models/backbones/resnest.py +0 -317
- spaces/CVPR/regionclip-demo/detectron2/checkpoint/clip_model_loading.py +0 -415
- spaces/Caoyunkang/Segment-Any-Anomaly/GroundingDINO/groundingdino/util/vl_utils.py +0 -100
- spaces/Chris4K/llms_compare/Hackintosh MacOS Niresh High Sierra For Intel And AMD ? MacOS.md +0 -128
- spaces/Cletrason/Cletrason-toad-mario-movie/app.py +0 -3
- spaces/Cong723/gpt-academic-public/toolbox.py +0 -717
- spaces/DQChoi/gpt-demo/venv/lib/python3.11/site-packages/altair/utils/schemapi.py +0 -1126
- spaces/Datasculptor/StyleGAN-NADA/e4e/configs/paths_config.py +0 -28
- spaces/ECCV2022/PSG/OpenPSG/configs/_base_/models/detr4seg_r101_psg.py +0 -137
- spaces/Eddycrack864/Applio-Inference/infer/modules/train/preprocess.py +0 -147
spaces/123Kumar/vits-uma-genshin-honkai123/utils.py
DELETED
@@ -1,225 +0,0 @@
|
|
1 |
-
import os
|
2 |
-
import sys
|
3 |
-
import argparse
|
4 |
-
import logging
|
5 |
-
import json
|
6 |
-
import subprocess
|
7 |
-
import numpy as np
|
8 |
-
import librosa
|
9 |
-
import torch
|
10 |
-
|
11 |
-
MATPLOTLIB_FLAG = False
|
12 |
-
|
13 |
-
logging.basicConfig(stream=sys.stdout, level=logging.DEBUG)
|
14 |
-
logger = logging
|
15 |
-
|
16 |
-
|
17 |
-
def load_checkpoint(checkpoint_path, model, optimizer=None):
|
18 |
-
assert os.path.isfile(checkpoint_path)
|
19 |
-
checkpoint_dict = torch.load(checkpoint_path, map_location='cpu')
|
20 |
-
iteration = checkpoint_dict['iteration']
|
21 |
-
learning_rate = checkpoint_dict['learning_rate']
|
22 |
-
if optimizer is not None:
|
23 |
-
optimizer.load_state_dict(checkpoint_dict['optimizer'])
|
24 |
-
saved_state_dict = checkpoint_dict['model']
|
25 |
-
if hasattr(model, 'module'):
|
26 |
-
state_dict = model.module.state_dict()
|
27 |
-
else:
|
28 |
-
state_dict = model.state_dict()
|
29 |
-
new_state_dict= {}
|
30 |
-
for k, v in state_dict.items():
|
31 |
-
try:
|
32 |
-
new_state_dict[k] = saved_state_dict[k]
|
33 |
-
except:
|
34 |
-
logger.info("%s is not in the checkpoint" % k)
|
35 |
-
new_state_dict[k] = v
|
36 |
-
if hasattr(model, 'module'):
|
37 |
-
model.module.load_state_dict(new_state_dict)
|
38 |
-
else:
|
39 |
-
model.load_state_dict(new_state_dict)
|
40 |
-
logger.info("Loaded checkpoint '{}' (iteration {})" .format(
|
41 |
-
checkpoint_path, iteration))
|
42 |
-
return model, optimizer, learning_rate, iteration
|
43 |
-
|
44 |
-
|
45 |
-
def plot_spectrogram_to_numpy(spectrogram):
|
46 |
-
global MATPLOTLIB_FLAG
|
47 |
-
if not MATPLOTLIB_FLAG:
|
48 |
-
import matplotlib
|
49 |
-
matplotlib.use("Agg")
|
50 |
-
MATPLOTLIB_FLAG = True
|
51 |
-
mpl_logger = logging.getLogger('matplotlib')
|
52 |
-
mpl_logger.setLevel(logging.WARNING)
|
53 |
-
import matplotlib.pylab as plt
|
54 |
-
import numpy as np
|
55 |
-
|
56 |
-
fig, ax = plt.subplots(figsize=(10,2))
|
57 |
-
im = ax.imshow(spectrogram, aspect="auto", origin="lower",
|
58 |
-
interpolation='none')
|
59 |
-
plt.colorbar(im, ax=ax)
|
60 |
-
plt.xlabel("Frames")
|
61 |
-
plt.ylabel("Channels")
|
62 |
-
plt.tight_layout()
|
63 |
-
|
64 |
-
fig.canvas.draw()
|
65 |
-
data = np.fromstring(fig.canvas.tostring_rgb(), dtype=np.uint8, sep='')
|
66 |
-
data = data.reshape(fig.canvas.get_width_height()[::-1] + (3,))
|
67 |
-
plt.close()
|
68 |
-
return data
|
69 |
-
|
70 |
-
|
71 |
-
def plot_alignment_to_numpy(alignment, info=None):
|
72 |
-
global MATPLOTLIB_FLAG
|
73 |
-
if not MATPLOTLIB_FLAG:
|
74 |
-
import matplotlib
|
75 |
-
matplotlib.use("Agg")
|
76 |
-
MATPLOTLIB_FLAG = True
|
77 |
-
mpl_logger = logging.getLogger('matplotlib')
|
78 |
-
mpl_logger.setLevel(logging.WARNING)
|
79 |
-
import matplotlib.pylab as plt
|
80 |
-
import numpy as np
|
81 |
-
|
82 |
-
fig, ax = plt.subplots(figsize=(6, 4))
|
83 |
-
im = ax.imshow(alignment.transpose(), aspect='auto', origin='lower',
|
84 |
-
interpolation='none')
|
85 |
-
fig.colorbar(im, ax=ax)
|
86 |
-
xlabel = 'Decoder timestep'
|
87 |
-
if info is not None:
|
88 |
-
xlabel += '\n\n' + info
|
89 |
-
plt.xlabel(xlabel)
|
90 |
-
plt.ylabel('Encoder timestep')
|
91 |
-
plt.tight_layout()
|
92 |
-
|
93 |
-
fig.canvas.draw()
|
94 |
-
data = np.fromstring(fig.canvas.tostring_rgb(), dtype=np.uint8, sep='')
|
95 |
-
data = data.reshape(fig.canvas.get_width_height()[::-1] + (3,))
|
96 |
-
plt.close()
|
97 |
-
return data
|
98 |
-
|
99 |
-
|
100 |
-
def load_audio_to_torch(full_path, target_sampling_rate):
|
101 |
-
audio, sampling_rate = librosa.load(full_path, sr=target_sampling_rate, mono=True)
|
102 |
-
return torch.FloatTensor(audio.astype(np.float32))
|
103 |
-
|
104 |
-
|
105 |
-
def load_filepaths_and_text(filename, split="|"):
|
106 |
-
with open(filename, encoding='utf-8') as f:
|
107 |
-
filepaths_and_text = [line.strip().split(split) for line in f]
|
108 |
-
return filepaths_and_text
|
109 |
-
|
110 |
-
|
111 |
-
def get_hparams(init=True):
|
112 |
-
parser = argparse.ArgumentParser()
|
113 |
-
parser.add_argument('-c', '--config', type=str, default="./configs/base.json",
|
114 |
-
help='JSON file for configuration')
|
115 |
-
parser.add_argument('-m', '--model', type=str, required=True,
|
116 |
-
help='Model name')
|
117 |
-
|
118 |
-
args = parser.parse_args()
|
119 |
-
model_dir = os.path.join("./logs", args.model)
|
120 |
-
|
121 |
-
if not os.path.exists(model_dir):
|
122 |
-
os.makedirs(model_dir)
|
123 |
-
|
124 |
-
config_path = args.config
|
125 |
-
config_save_path = os.path.join(model_dir, "config.json")
|
126 |
-
if init:
|
127 |
-
with open(config_path, "r") as f:
|
128 |
-
data = f.read()
|
129 |
-
with open(config_save_path, "w") as f:
|
130 |
-
f.write(data)
|
131 |
-
else:
|
132 |
-
with open(config_save_path, "r") as f:
|
133 |
-
data = f.read()
|
134 |
-
config = json.loads(data)
|
135 |
-
|
136 |
-
hparams = HParams(**config)
|
137 |
-
hparams.model_dir = model_dir
|
138 |
-
return hparams
|
139 |
-
|
140 |
-
|
141 |
-
def get_hparams_from_dir(model_dir):
|
142 |
-
config_save_path = os.path.join(model_dir, "config.json")
|
143 |
-
with open(config_save_path, "r") as f:
|
144 |
-
data = f.read()
|
145 |
-
config = json.loads(data)
|
146 |
-
|
147 |
-
hparams =HParams(**config)
|
148 |
-
hparams.model_dir = model_dir
|
149 |
-
return hparams
|
150 |
-
|
151 |
-
|
152 |
-
def get_hparams_from_file(config_path):
|
153 |
-
with open(config_path, "r") as f:
|
154 |
-
data = f.read()
|
155 |
-
config = json.loads(data)
|
156 |
-
|
157 |
-
hparams =HParams(**config)
|
158 |
-
return hparams
|
159 |
-
|
160 |
-
|
161 |
-
def check_git_hash(model_dir):
|
162 |
-
source_dir = os.path.dirname(os.path.realpath(__file__))
|
163 |
-
if not os.path.exists(os.path.join(source_dir, ".git")):
|
164 |
-
logger.warn("{} is not a git repository, therefore hash value comparison will be ignored.".format(
|
165 |
-
source_dir
|
166 |
-
))
|
167 |
-
return
|
168 |
-
|
169 |
-
cur_hash = subprocess.getoutput("git rev-parse HEAD")
|
170 |
-
|
171 |
-
path = os.path.join(model_dir, "githash")
|
172 |
-
if os.path.exists(path):
|
173 |
-
saved_hash = open(path).read()
|
174 |
-
if saved_hash != cur_hash:
|
175 |
-
logger.warn("git hash values are different. {}(saved) != {}(current)".format(
|
176 |
-
saved_hash[:8], cur_hash[:8]))
|
177 |
-
else:
|
178 |
-
open(path, "w").write(cur_hash)
|
179 |
-
|
180 |
-
|
181 |
-
def get_logger(model_dir, filename="train.log"):
|
182 |
-
global logger
|
183 |
-
logger = logging.getLogger(os.path.basename(model_dir))
|
184 |
-
logger.setLevel(logging.DEBUG)
|
185 |
-
|
186 |
-
formatter = logging.Formatter("%(asctime)s\t%(name)s\t%(levelname)s\t%(message)s")
|
187 |
-
if not os.path.exists(model_dir):
|
188 |
-
os.makedirs(model_dir)
|
189 |
-
h = logging.FileHandler(os.path.join(model_dir, filename))
|
190 |
-
h.setLevel(logging.DEBUG)
|
191 |
-
h.setFormatter(formatter)
|
192 |
-
logger.addHandler(h)
|
193 |
-
return logger
|
194 |
-
|
195 |
-
|
196 |
-
class HParams():
|
197 |
-
def __init__(self, **kwargs):
|
198 |
-
for k, v in kwargs.items():
|
199 |
-
if type(v) == dict:
|
200 |
-
v = HParams(**v)
|
201 |
-
self[k] = v
|
202 |
-
|
203 |
-
def keys(self):
|
204 |
-
return self.__dict__.keys()
|
205 |
-
|
206 |
-
def items(self):
|
207 |
-
return self.__dict__.items()
|
208 |
-
|
209 |
-
def values(self):
|
210 |
-
return self.__dict__.values()
|
211 |
-
|
212 |
-
def __len__(self):
|
213 |
-
return len(self.__dict__)
|
214 |
-
|
215 |
-
def __getitem__(self, key):
|
216 |
-
return getattr(self, key)
|
217 |
-
|
218 |
-
def __setitem__(self, key, value):
|
219 |
-
return setattr(self, key, value)
|
220 |
-
|
221 |
-
def __contains__(self, key):
|
222 |
-
return key in self.__dict__
|
223 |
-
|
224 |
-
def __repr__(self):
|
225 |
-
return self.__dict__.__repr__()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/1acneusushi/gradio-2dmoleculeeditor/data/HD Online Player (Dil Chahta Hai 2001 720p BluRay NHD ) - Watch the Cult Classic Comedy-Drama Film.md
DELETED
@@ -1,80 +0,0 @@
|
|
1 |
-
|
2 |
-
<h1>HD Online Player (Dil Chahta Hai 2001 720p BluRay NHD )</h1>
|
3 |
-
<p>Do you love Bollywood movies? If yes, then you must have heard of <strong>Dil Chahta Hai</strong>, one of the most popular and acclaimed movies of Indian cinema. Dil Chahta Hai is a 2001 movie that follows the lives and loves of three friends who have different views on relationships. It is a movie that explores friendship, romance, comedy, drama, and music in a realistic and relatable way. In this article, we will tell you why you should watch Dil Chahta Hai online using <strong>YIFY - Download Movie TORRENT - YTS</strong>, the best online player for this movie.</p>
|
4 |
-
<h2>HD Online Player (Dil Chahta Hai 2001 720p BluRay NHD )</h2><br /><p><b><b>Download Zip</b> ★ <a href="https://byltly.com/2uKwML">https://byltly.com/2uKwML</a></b></p><br /><br />
|
5 |
-
<h2>Why You Should Watch Dil Chahta Hai</h2>
|
6 |
-
<p>Dil Chahta Hai is a movie that has something for everyone. Whether you are looking for a fun-filled comedy, a heart-warming romance, a touching drama, or a musical extravaganza, you will find it all in this movie. Here are some reasons why you should watch Dil Chahta Hai:</p>
|
7 |
-
<h3>The Cast and Crew of Dil Chahta Hai</h3>
|
8 |
-
<p>The movie features some of the finest talents of Bollywood. The main actors are <strong>Aamir Khan</strong>, <strong>Saif Ali Khan</strong>, <strong>Akshaye Khanna</strong>, <strong>Preity Zinta</strong>, <strong>Sonali Kulkarni</strong>, and <strong>Dimple Kapadia</strong>. They play the roles of Akash, Sameer, Siddharth, Shalini, Pooja, and Tara respectively. They deliver brilliant performances that make you laugh, cry, smile, and feel with them.</p>
|
9 |
-
<p>The movie is directed by <strong>Farhan Akhtar</strong>, who made his debut with this movie. He also wrote the story and screenplay along with <strong>Kassim Jagmagia</strong>. He brought a fresh perspective to Bollywood with his realistic and modern approach to filmmaking. He also produced the movie along with <strong>Ritesh Sidhwani</strong> under their banner <strong>Excel Entertainment</strong>.</p>
|
10 |
-
<p>Dil Chahta Hai full movie online HD<br />
|
11 |
-
Watch Dil Chahta Hai 2001 online free<br />
|
12 |
-
Dil Chahta Hai BluRay download 720p<br />
|
13 |
-
Dil Chahta Hai 2001 Hindi movie HD<br />
|
14 |
-
Dil Chahta Hai comedy drama romance film<br />
|
15 |
-
Dil Chahta Hai Aamir Khan Saif Ali Khan Akshaye Khanna<br />
|
16 |
-
Dil Chahta Hai YIFY torrent download<br />
|
17 |
-
Dil Chahta Hai 2001 top rated Indian movie<br />
|
18 |
-
Dil Chahta Hai 1080p WEB download<br />
|
19 |
-
Dil Chahta Hai subtitles English<br />
|
20 |
-
Dil Chahta Hai 2001 Bollywood movie streaming<br />
|
21 |
-
Dil Chahta Hai Farhan Akhtar director<br />
|
22 |
-
Dil Chahta Hai 480p x264 700 MB<br />
|
23 |
-
Dil Chahta Hai 2001 movie review<br />
|
24 |
-
Dil Chahta Hai trailer watch online<br />
|
25 |
-
Dil Chahta Hai songs download mp3<br />
|
26 |
-
Dil Chahta Hai Netflix Amazon Prime Hotstar<br />
|
27 |
-
Dil Chahta Hai 2001 IMDb rating<br />
|
28 |
-
Dil Chahta Hai cast and crew<br />
|
29 |
-
Dil Chahta Hai plot summary synopsis<br />
|
30 |
-
Dil Chahta Hai 720p x264 1.62 GB<br />
|
31 |
-
Dil Chahta Hai watch online with subtitles<br />
|
32 |
-
Dil Chahta Hai 2001 awards and nominations<br />
|
33 |
-
Dil Chahta Hai box office collection<br />
|
34 |
-
Dil Chahta Hai quotes and dialogues<br />
|
35 |
-
Dil Chahta Hai 1080p x264 6CH 3.18 GB<br />
|
36 |
-
Dil Chahta Hai online player free HD<br />
|
37 |
-
Dil Chahta Hai 2001 movie poster wallpaper<br />
|
38 |
-
Dil Chahta Hai trivia and facts<br />
|
39 |
-
Dil Chahta Hai behind the scenes making of<br />
|
40 |
-
Dil Chahta Hai Preity Zinta Dimple Kapadia Sonali Kulkarni<br />
|
41 |
-
Dil Chahta Hai YTS mx movies download<br />
|
42 |
-
Dil Chahta Hai 2001 Rotten Tomatoes score<br />
|
43 |
-
Dil Chahta Hai BluRay DTS x264 IDE source<br />
|
44 |
-
Dil Chahta Hai best scenes clips videos<br />
|
45 |
-
Dil Chahta Hai soundtrack album list<br />
|
46 |
-
Dil Chahta Hai Netflix India watch now<br />
|
47 |
-
Dil Chahta Hai 2001 Metacritic score<br />
|
48 |
-
Dil Chahta Hai BluRay AVC AAC video audio quality<br />
|
49 |
-
Dil Chahta Hai fan art memes gifs fanfiction</p>
|
50 |
-
<p>The music of the movie is composed by <strong>Shankar-Ehsaan-Loy</strong>, who also made their debut with this movie. They created some of the most iconic songs of Bollywood that are still loved by millions. The songs are sung by <strong>Udit Narayan</strong>, <strong>Alka Yagnik</strong>, <strong>Sonu Nigam</strong>, <strong>Shaan</strong>, <strong>Kavita Krishnamurthy</strong>, <strong>Srinivas</strong>, <strong>Shankar Mahadevan</strong>, <strong>Loy Mendonsa</strong>, <strong>Ehsaan Noorani</strong>, <strong>Mahalakshmi Iyer</strong>, <strong>Sadhana Sargam</strong>, <strong>Sujata Bhattacharya</strong>, <strong>Hariharan</strong>, <strong>Sapna Mukherjee</strong>, <strong>Caralisa Monteiro</strong>, <strong>Vasundhara Das</strong>, etc.</p>
|
51 |
-
<h2>How to Watch Dil Chahta Hai Online</h2>
|
52 |
-
<p>If you want to watch Dil Chahta Hai online in high quality video and audio, then you should use <a href="https://yts.mx/movies/dil-chahta-hai-2001">YIFY - Download Movie TORRENT - YTS</a>. This is an online player that allows you to download movies in various formats such as 720p.WEB or 1080p.WEB. You can also choose subtitles in different languages such as English or Hindi.</p>
|
53 |
-
<h3>The Benefits of Using YIFY - Download Movie TORRENT - YTS</h3>
|
54 |
-
<p>There are many benefits of using YIFY - Download Movie TORRENT - YTS to watch Dil Chahta Hai online. Here are some of them:</p>
|
55 |
-
<ul>
|
56 |
-
<li><p><b>High quality video and audio:</b> You can enjoy watching Dil Chahta Hai in HD quality with clear sound. You can see every detail of the movie such as the expressions of the actors, the locations of the scenes, the colors of the costumes, etc.</p></li>
|
57 |
-
<li><p><b>Fast download speed and easy installation:</b> You can download Dil Chahta Hai quickly without any interruptions or delays. You can also install it easily on your device without any complications or errors.</p></li>
|
58 |
-
<li><p><b>Safe and secure platform:</b> You can watch Dil Chahta Hai online without any worries or risks. You can trust that your device will not be infected by any viruses or malware. You can also be assured that your personal information will not be leaked or stolen.</p></li>
|
59 |
-
</ul>
|
60 |
-
<h2>What to Expect from Dil Chahta Hai</h2>
|
61 |
-
<p>Dil Chahta Hai is a movie that will make you think, feel, laugh, cry, sing, dance, and more. It is a movie that will touch your heart and soul with its themes and messages.</p> Here is the continuation of the article with HTML formatting: ```html <h3>The Themes and Messages of Dil Chahta Hai</h3>
|
62 |
-
<p>Dil Chahta Hai is a movie that explores various themes and messages that are relevant and relatable to the modern Indian youth. Some of the themes and messages are:</p>
|
63 |
-
<ul>
|
64 |
-
<li><p><b>Friendship:</b> The movie shows how friendship is one of the most important and enduring relationships in life. It shows how friends can support, challenge, inspire, and comfort each other through thick and thin. It also shows how friendship can evolve and change over time, as people grow and mature.</p></li>
|
65 |
-
<li><p><b>Love:</b> The movie shows how love can be different for different people. It shows how love can be passionate, playful, serious, or complicated. It shows how love can be influenced by factors such as age, culture, family, society, etc. It also shows how love can be a source of joy, pain, confusion, or growth.</p></li>
|
66 |
-
<li><p><b>Maturity:</b> The movie shows how maturity is not a matter of age, but a matter of attitude. It shows how maturity is about being responsible, honest, respectful, and empathetic. It shows how maturity is about being able to make choices and face consequences. It also shows how maturity is about being able to accept oneself and others.</p></li>
|
67 |
-
</ul>
|
68 |
-
<h3>The Highlights of Dil Chahta Hai</h3>
|
69 |
-
<p>Dil Chahta Hai is a movie that has many highlights that make it memorable and enjoyable. Some of the highlights are:</p>
|
70 |
-
<ul>
|
71 |
-
<li><p><b>The best scenes and dialogues:</b> The movie has many scenes and dialogues that are funny, witty, emotional, or meaningful. Some of the best scenes and dialogues are: the college farewell party scene where Akash jokes about Shalini; the road trip to Goa scene where the friends have fun; the art exhibition scene where Sid meets Tara; the airport scene where Akash realizes his love for Shalini; the hospital scene where Sid confronts Akash; the Sydney Opera House scene where Akash proposes to Shalini; etc.</p></li>
|
72 |
-
<li><p><b>The best songs and dances:</b> The movie has many songs and dances that are catchy, melodious, or expressive. Some of the best songs and dances are: Dil Chahta Hai title track; Koi Kahe Kehta Rahe; Woh Ladki Hai Kahan; Jaane Kyun; Tanhayee; Kaisi Hai Yeh Rut; Dil Chahta Hai reprise; etc.</p></li>
|
73 |
-
<li><p><b>The best reviews and ratings:</b> The movie has received many positive reviews and ratings from critics and audiences alike. It has a rating of 8.1/10 on IMDb , 100% on Rotten Tomatoes , 4/5 on NDTV , 4/5 on Rediff , etc. It has also won many awards such as National Film Award for Best Feature Film in Hindi , Filmfare Award for Best Film (Critics) , Filmfare Award for Best Supporting Actor (Akshaye Khanna) , Filmfare Award for Best Comedian (Saif Ali Khan) , etc.</p></li>
|
74 |
-
</ul>
|
75 |
-
<h2>Conclusion</h2>
|
76 |
-
<p>In conclusion, Dil Chahta Hai is a movie that you should not miss if you love Bollywood movies. It is a movie that will make you laugh, cry, think, feel, sing, dance, and more. It is a movie that will show you the true meaning of friendship, love, and maturity. It is a movie that will give you a realistic and modern portrayal of the Indian youth. So what are you waiting for? Watch Dil Chahta Hai online using YIFY - Download Movie TORRENT - YTS , the best online player for this movie.</p>
|
77 |
-
<p>If you have any questions about Dil Chahta Hai or YIFY - Download Movie TORRENT - YTS , feel free to ask them in the comments section below. We will be happy to answer them for you.</p>
|
78 |
-
Here are some FAQs that you might have: | Question | Answer | | --- | --- | | Q1: How long is Dil Chahta Hai? | A1: Dil Chahta Hai is 177 minutes long. | | Q2: Who is the singer of Dil Chahta Hai title track? | A2: The singer of Dil Chahta Hai title track is Shankar Mahadevan. | | Q3: What is the name of the painting that Sid gifts to Tara? | A3: The name of the painting that Sid gifts to Tara is "The Awakening". | | Q4: What is the name of the restaurant where Sameer meets Pooja for the first time? | A4: The name of the restaurant where Sameer meets Pooja for the first time is "Bombay Blues". | | Q5: What is the name of the hotel where Akash stays in Sydney? | A5: The name of the hotel where Akash stays in Sydney is "The Park Hyatt". | </p> 0a6ba089eb<br />
|
79 |
-
<br />
|
80 |
-
<br />
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/1gistliPinn/ChatGPT4/Examples/DocuWorks 7 0 Full Version.zip LINK.md
DELETED
@@ -1,6 +0,0 @@
|
|
1 |
-
<h2>DocuWorks 7 0 Full Version.zip</h2><br /><p><b><b>Download</b> ✵✵✵ <a href="https://imgfil.com/2uy0MZ">https://imgfil.com/2uy0MZ</a></b></p><br /><br />
|
2 |
-
|
3 |
-
please review the readme files, release notes, and the latest version of the applicable user ... 1-7. NAME. DESCRIPTION. VALIDATOR. ADDITIONAL. VERIFICATION ... Date: Full. (day/month/ year). •. Date format commonly used in the United ... Tape Archive (TAR) .tar. Zip .zip. Databases. Base SAS Data File .sas7bdat. 1fdad05405<br />
|
4 |
-
<br />
|
5 |
-
<br />
|
6 |
-
<p></p>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/1gistliPinn/ChatGPT4/Examples/EasyWorship Crack 7.1.4.0 Latest Version With 2020 Keygen EXCLUSIVE.md
DELETED
@@ -1,10 +0,0 @@
|
|
1 |
-
<h2>EasyWorship Crack 7.1.4.0 {Latest Version} With 2020 Keygen</h2><br /><p><b><b>DOWNLOAD</b> • <a href="https://imgfil.com/2uxZyC">https://imgfil.com/2uxZyC</a></b></p><br /><br />
|
2 |
-
<br />
|
3 |
-
September 15, 2020 - Easyworship Crack with product key is a presentation design tool that has all the features you need to create a masterpiece. It only takes a few minutes to get started.
|
4 |
-
It's really easy to create and edit your slideshow.
|
5 |
-
Easyworship Crack with Product Key is a presentation design tool that has all the features you need to create a masterpiece.
|
6 |
-
It only takes a few minutes to get started.
|
7 |
-
With this tool, you can easily create a professional presentation. 8a78ff9644<br />
|
8 |
-
<br />
|
9 |
-
<br />
|
10 |
-
<p></p>
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/1pelhydcardo/ChatGPT-prompt-generator/assets/Download APKs from Huawei AppGallery The Official App Store for Huawei Devices.md
DELETED
@@ -1,116 +0,0 @@
|
|
1 |
-
<br />
|
2 |
-
<h1>Huawei APK Download: How to Get Apps on Your Huawei Phone Without Google Play Store</h1>
|
3 |
-
<p>If you have a Huawei phone, you might have noticed that it does not come with the Google Play Store pre-installed. This means that you cannot download apps from the official Android app store, which can be frustrating and inconvenient. However, there is a way to get apps on your Huawei phone without the Google Play Store. It is called Huawei APK Download, and it involves using alternative sources of apps, such as Huawei AppGallery or Huawei Phone Clone. In this article, we will explain what Huawei APK Download is, why you need it, and how to use it.</p>
|
4 |
-
<h2>huawei apk download</h2><br /><p><b><b>Download</b> >>> <a href="https://urlin.us/2uSXn8">https://urlin.us/2uSXn8</a></b></p><br /><br />
|
5 |
-
<h2>What is Huawei APK Download?</h2>
|
6 |
-
<p>Huawei APK Download is a term that refers to downloading apps on your Huawei phone from sources other than the Google Play Store. These sources can be websites, app stores, or other devices that have the apps you want. The apps that you download are in the form of APK files, which are the installation packages for Android apps.</p>
|
7 |
-
<h3>What is an APK file?</h3>
|
8 |
-
<p>An APK file is a file that contains all the components of an Android app, such as the code, resources, and manifest. It has the extension .apk and can be installed on any Android device that supports it. You can think of an APK file as a zip file that contains everything you need to run an app.</p>
|
9 |
-
<h3>Why do you need Huawei APK Download?</h3>
|
10 |
-
<p>You need Huawei APK Download because your Huawei phone does not have access to the Google Play Store, which is the official and most popular source of Android apps. This is because Huawei has been banned from using Google services and products due to US sanctions. As a result, Huawei phones run on a modified version of Android called EMUI, which does not include Google apps or services.</p>
|
11 |
-
<p>This means that you cannot use apps that rely on Google services, such as Gmail, YouTube, Maps, or Chrome. It also means that you cannot download apps from the Google Play Store, which has millions of apps for various purposes and categories. Therefore, you need to find alternative ways to get apps on your Huawei phone, which is where Huawei APK Download comes in.</p>
|
12 |
-
<h2>How to Use Huawei APK Download?</h2>
|
13 |
-
<p>There are two main methods to use Huawei APK Download: using Huawei AppGallery or using Huawei Phone Clone. We will explain each method in detail below.</p>
|
14 |
-
<p>huawei appgallery apk download<br />
|
15 |
-
huawei mobile services apk download<br />
|
16 |
-
huawei health apk download<br />
|
17 |
-
huawei themes apk download<br />
|
18 |
-
huawei browser apk download<br />
|
19 |
-
huawei music apk download<br />
|
20 |
-
huawei assistant apk download<br />
|
21 |
-
huawei cloud apk download<br />
|
22 |
-
huawei video apk download<br />
|
23 |
-
huawei wallet apk download<br />
|
24 |
-
huawei petal search apk download<br />
|
25 |
-
huawei petal maps apk download<br />
|
26 |
-
huawei quick apps apk download<br />
|
27 |
-
huawei backup apk download<br />
|
28 |
-
huawei phone clone apk download<br />
|
29 |
-
huawei support apk download<br />
|
30 |
-
huawei member center apk download<br />
|
31 |
-
huawei smart life apk download<br />
|
32 |
-
huawei finder apk download<br />
|
33 |
-
huawei screen recorder apk download<br />
|
34 |
-
huawei gallery apk download<br />
|
35 |
-
huawei camera apk download<br />
|
36 |
-
huawei keyboard apk download<br />
|
37 |
-
huawei notepad apk download<br />
|
38 |
-
huawei calculator apk download<br />
|
39 |
-
huawei file manager apk download<br />
|
40 |
-
huawei contacts apk download<br />
|
41 |
-
huawei dialer apk download<br />
|
42 |
-
huawei messages apk download<br />
|
43 |
-
huawei email apk download<br />
|
44 |
-
huawei calendar apk download<br />
|
45 |
-
huawei weather apk download<br />
|
46 |
-
huawei clock apk download<br />
|
47 |
-
huawei compass apk download<br />
|
48 |
-
huawei sound recorder apk download<br />
|
49 |
-
huawei fm radio apk download<br />
|
50 |
-
huawei torchlight apk download<br />
|
51 |
-
huawei mirror apk download<br />
|
52 |
-
huawei magnifier apk download<br />
|
53 |
-
huawei scanner apk download<br />
|
54 |
-
huawei game center apk download<br />
|
55 |
-
huawei app advisor apk download<br />
|
56 |
-
huawei app search apk download<br />
|
57 |
-
huawei app updater apk download<br />
|
58 |
-
huawei app store manager apk download<br />
|
59 |
-
huawei app lock pro apk download <br />
|
60 |
-
huawei app optimizer pro apk download <br />
|
61 |
-
huawei app cleaner pro apk download <br />
|
62 |
-
huawei app booster pro apk download</p>
|
63 |
-
<h3>Method 1: Use Huawei AppGallery</h3>
|
64 |
-
<h4>What is Huawei AppGallery?</h4>
|
65 |
-
<p>Huawei AppGallery is the official app distribution platform for Huawei devices, boasting a collection of 18 app categories featuring premium content curated globally. It is pre-installed on your Huawei phone and offers a variety of apps for different needs and preferences. You can find apps for entertainment, social media, gaming, education, health, finance, and more. You can also enjoy exclusive benefits and rewards from using Huawei AppGallery, such as discounts, coupons, free trials, and gifts.</p>
|
66 |
-
<h4>How to download apps from Huawei AppGallery?</h4>
|
67 |
-
<p>To download apps from Huawei AppGallery, follow these steps:</p>
|
68 |
-
<ol>
|
69 |
-
<li>Open the AppGallery app on your Huawei phone.</li>
|
70 |
-
<li>Search for the app you want or browse through the categories and recommendations.</li>
|
71 |
-
<li>Tap on the app you want and then tap on Install.</li>
|
72 |
-
<li>Wait for the app to download and install on your phone.</li>
|
73 |
-
<li>Enjoy using the app.</li>
|
74 |
-
</ol>
|
75 |
-
<p>Note: Some apps may require additional permissions or settings before they can run properly on your phone. Follow the instructions on the screen or <p>contact the app developer for support.</p>
|
76 |
-
<h3>Method 2: Use Huawei Phone Clone</h3>
|
77 |
-
<h4>What is Huawei Phone Clone?</h4>
|
78 |
-
<p>Huawei Phone Clone is a free app that allows you to transfer data from your old phone to your new Huawei phone, including apps, contacts, messages, photos, videos, and more. It supports both Android and iOS devices and does not require a network connection or cables. It is a fast and convenient way to migrate your data and apps to your Huawei phone without losing any quality or settings.</p>
|
79 |
-
<h4>How to transfer apps from another phone to your Huawei phone using Phone Clone?</h4>
|
80 |
-
<p>To transfer apps from another phone to your Huawei phone using Phone Clone, follow these steps:</p>
|
81 |
-
<ol>
|
82 |
-
<li>Download and install the Phone Clone app on both phones from the AppGallery or the Google Play Store.</li>
|
83 |
-
<li>Open the Phone Clone app on both phones and agree to the terms and conditions.</li>
|
84 |
-
<li>Select "This is the new phone" on your Huawei phone and "This is the old phone" on your other phone.</li>
|
85 |
-
<li>Scan the QR code displayed on your Huawei phone with your other phone to establish a connection.</li>
|
86 |
-
<li>Select the apps you want to transfer from your other phone and tap on Transfer.</li>
|
87 |
-
<li>Wait for the apps to be transferred to your Huawei phone.</li>
|
88 |
-
<li>Enjoy using the apps.</li>
|
89 |
-
</ol>
|
90 |
-
<p>Note: Some apps may not be compatible with your Huawei phone or may require Google services to function properly. You may need to update or reinstall them from other sources or use alternative apps instead.</p>
|
91 |
-
<h2>Conclusion</h2>
|
92 |
-
<p>Huawei APK Download is a way to get apps on your Huawei phone without the Google Play Store. You can use Huawei AppGallery or Huawei Phone Clone to download apps from alternative sources or transfer them from another phone. Both methods are easy and safe to use, and offer a variety of apps for different needs and preferences. However, you should be aware that some apps may not work well on your Huawei phone or may require Google services, which are not available on Huawei devices. In that case, you may need to look for other solutions or use similar apps instead.</p>
|
93 |
-
<p>If you want to learn more about Huawei APK Download, you can visit the official website of Huawei or contact their customer service. You can also check out some of the reviews and guides online that can help you find the best apps for your Huawei phone. We hope this article has been helpful and informative for you. Thank you for reading!</p>
|
94 |
-
<h3>FAQs</h3>
|
95 |
-
<p>Here are some of the frequently asked questions about Huawei APK Download:</p>
|
96 |
-
<ul>
|
97 |
-
<li><b>Is Huawei APK Download safe?</b></li>
|
98 |
-
<p>Yes, Huawei APK Download is safe as long as you download apps from trusted sources, such as Huawei AppGallery or Phone Clone. You should also scan the APK files for viruses or malware before installing them on your phone. However, you should be careful when downloading apps from unknown websites or third-party app stores, as they may contain harmful or malicious content.</p>
|
99 |
-
<li><b>Is Huawei APK Download legal?</b></li>
|
100 |
-
<p>Yes, Huawei APK Download is legal as long as you do not violate any intellectual property rights or terms of service of the app developers or owners. You should also respect the privacy and security of the app users and data. However, you should be aware that some countries or regions may have different laws or regulations regarding downloading apps from alternative sources, so you should check them before using Huawei APK Download.</p>
|
101 |
-
<li><b>What are the advantages of Huawei APK Download?</b></li>
|
102 |
-
<p>The advantages of Huawei APK Download are that you can get apps on your Huawei phone without the Google Play Store, which is not available on Huawei devices due to US sanctions. You can also enjoy exclusive benefits and rewards from using Huawei AppGallery, such as discounts, coupons, free trials, and gifts. You can also transfer apps from another phone to your Huawei phone using Phone Clone, which is fast and convenient.</p>
|
103 |
-
<li><b>What are the disadvantages of Huawei APK Download?</b></li>
|
104 |
-
<p>The disadvantages of Huawei APK Download are that some apps may not be compatible with your Huawei phone or may require Google services to function properly, which are not available on Huawei devices. You may also encounter some issues or errors when installing or using some apps from alternative sources. You may also need to update or reinstall some apps manually from time to time.</p>
|
105 |
-
<li><b>How can I update the apps I downloaded from Huawei APK Download?</b></li>
|
106 |
-
<p>You can update the apps you downloaded from Huawei APK Download by following these steps:</p>
|
107 |
-
<ol>
|
108 |
-
<li>Open the AppGallery app on your Huawei phone.</li>
|
109 |
-
<li>Tap on Me > Updates.</li>
|
110 |
-
<li>Select the apps you want to update and tap on Update.</li>
|
111 |
-
<li>Wait for the apps to be updated on your phone.</li>
|
112 |
-
<li>Enjoy using the updated apps.</li>
|
113 |
-
</ol>
|
114 |
-
<p>Note: Some apps may not have updates available from AppGallery, in which case you may need to download the latest version of the APK file from other sources or use Phone Clone to transfer the updated app from another phone.</p> 197e85843d<br />
|
115 |
-
<br />
|
116 |
-
<br />
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/1phancelerku/anime-remove-background/4x4 Off Road Rally 9 MOD APK Download and Enjoy the Ultimate Racing Experience.md
DELETED
@@ -1,99 +0,0 @@
|
|
1 |
-
<br />
|
2 |
-
<h1>4x4 Off Road Rally 9 Mod APK: A Thrilling Off-Road Racing Game</h1>
|
3 |
-
<p>If you are a fan of off-road racing games, you will love 4x4 Off Road Rally 9, a realistic and immersive game that will test your driving skills on various terrains and environments. In this game, you will have to overcome mud, water, snow, rocks, and other obstacles as you race against time and other drivers. You will also have to customize and upgrade your 4x4 vehicle to suit your preferences and needs. But what if you want to enjoy the game without any limitations or restrictions? That's where 4x4 Off Road Rally 9 Mod APK comes in handy. In this article, we will tell you what this modded version of the game offers, how to download and install it, and some tips and tricks to help you master the game.</p>
|
4 |
-
<h2>4x4 off road rally 9 mod apk</h2><br /><p><b><b>DOWNLOAD</b> ★★★ <a href="https://jinyurl.com/2uNUuB">https://jinyurl.com/2uNUuB</a></b></p><br /><br />
|
5 |
-
<h2>What is 4x4 Off Road Rally 9?</h2>
|
6 |
-
<p>4x4 Off Road Rally 9 is a racing game developed by Electronic Hand, a studio that specializes in off-road games. The game is available for Android devices and has over 10 million downloads on Google Play Store. The game features stunning graphics, realistic physics, various 4x4 vehicles, and different off-road racing challenges.</p>
|
7 |
-
<h3>Features of the game</h3>
|
8 |
-
<p>Some of the features of 4x4 Off Road Rally 9 are:</p>
|
9 |
-
<ul>
|
10 |
-
<li>Different modes of gameplay, such as career mode, free mode, time trial mode, and multiplayer mode.</li>
|
11 |
-
<li>A variety of 4x4 vehicles with different driving characteristics, such as SUVs, trucks, pickups, jeeps, and more.</li>
|
12 |
-
<li>A wide range of terrains and environments to explore, such as forests, deserts, mountains, swamps, and more.</li>
|
13 |
-
<li>A realistic driving physics system that simulates the effects of mud, water, snow, rocks, and other obstacles on your vehicle.</li>
|
14 |
-
<li>An endless tuning and customization system that allows you to modify your vehicle's engine, suspension, tires, wheels, paint, stickers, and more.</li>
|
15 |
-
<li>A simple and convenient in-game map that shows you the route and the checkpoints.</li>
|
16 |
-
<li>A real car sound system that enhances the immersion and realism of the game.</li>
|
17 |
-
</ul>
|
18 |
-
<h3>How to play the game</h3>
|
19 |
-
<p>The gameplay of 4x4 Off Road Rally 9 is simple but challenging. You have to use the on-screen buttons to control your vehicle's steering, acceleration, braking, and gear shifting. You also have to use the camera button to change the view angle and the map button to see the route. Your goal is to reach the finish line as fast as possible without getting stuck or damaged. You can also compete with other players online or offline in multiplayer mode. You can earn coins and rewards by completing races and challenges. You can use these coins to buy new vehicles or upgrade your existing ones.</p>
|
20 |
-
<p>4x4 off road rally ultimate mod apk<br />
|
21 |
-
4x4 off road rally 9 hack apk<br />
|
22 |
-
4x4 off road rally 9 unlimited money<br />
|
23 |
-
4x4 off road rally 9 cheats android<br />
|
24 |
-
4x4 off road rally 9 download apk<br />
|
25 |
-
4x4 off road rally 9 mod apk latest version<br />
|
26 |
-
4x4 off road rally 9 free download<br />
|
27 |
-
4x4 off road rally 9 gameplay<br />
|
28 |
-
4x4 off road rally 9 mod menu<br />
|
29 |
-
4x4 off road rally 9 apk obb<br />
|
30 |
-
4x4 off road rally 9 mod apk revdl<br />
|
31 |
-
4x4 off road rally 9 mod apk android 1<br />
|
32 |
-
4x4 off road rally 9 mod apk rexdl<br />
|
33 |
-
4x4 off road rally 9 mod apk happymod<br />
|
34 |
-
4x4 off road rally 9 mod apk an1<br />
|
35 |
-
4x4 off road rally 9 mod apk offline<br />
|
36 |
-
4x4 off road rally 9 mod apk no root<br />
|
37 |
-
4x4 off road rally 9 mod apk unlimited coins<br />
|
38 |
-
4x4 off road rally 9 mod apk unlimited gems<br />
|
39 |
-
4x4 off road rally 9 mod apk all cars unlocked<br />
|
40 |
-
4x4 off road rally 9 mod apk all levels unlocked<br />
|
41 |
-
4x4 off road rally 9 mod apk all vehicles unlocked<br />
|
42 |
-
4x4 off road rally 9 mod apk mega mod<br />
|
43 |
-
4x4 off road rally 9 mod apk premium unlocked<br />
|
44 |
-
4x4 off road rally 9 mod apk pro unlocked<br />
|
45 |
-
how to install 4x4 off road rally 9 mod apk<br />
|
46 |
-
how to play 4x4 off road rally 9 mod apk<br />
|
47 |
-
how to download 4x4 off road rally 9 mod apk<br />
|
48 |
-
how to update 4x4 off road rally 9 mod apk<br />
|
49 |
-
how to hack 4x4 off road rally 9 mod apk<br />
|
50 |
-
how to get unlimited money in 4x4 off road rally 9 mod apk<br />
|
51 |
-
how to get unlimited gems in 4x4 off road rally 9 mod apk<br />
|
52 |
-
how to unlock all cars in 4x4 off road rally 9 mod apk<br />
|
53 |
-
how to unlock all levels in 4x4 off road rally 9 mod apk<br />
|
54 |
-
how to unlock all vehicles in 4x4 off road rally 9 mod apk<br />
|
55 |
-
best cars in 4x4 off road rally 9 mod apk<br />
|
56 |
-
best vehicles in 4x4 off road rally 9 mod apk<br />
|
57 |
-
best levels in 4x4 off road rally 9 mod apk<br />
|
58 |
-
best tips and tricks for playing with the latest version of the game.</p>
|
59 |
-
<h2>Why download 4x4 Off Road Rally 9 Mod APK?</h2>
|
60 |
-
<p>Although 4x4 Off Road Rally 9 is a fun and addictive game, it also has some drawbacks. For example, some vehicles and features are locked behind a paywall or require a lot of grinding. You also have to watch ads to get extra coins or rewards. Moreover, some levels are too hard or frustrating to complete. That's why many players prefer to download 4x4 Off Road Rally 9 Mod APK instead of the original version.</p>
|
61 |
-
<h3>Benefits of the modded version</h3>
|
62 |
-
<p>Some of the benefits of downloading 4x4 Off Road Rally 9 Mod APK are:</p>
|
63 |
-
<ul>
|
64 |
-
<li>You get unlimited coins and gems to buy and upgrade any vehicle you want.</li>
|
65 |
-
<li>You get all the vehicles and features unlocked from the start.</li>
|
66 |
-
<li>You get to enjoy the game without any ads or interruptions.</li>
|
67 |
-
<li>You get to access some exclusive features and options that are not available in the original version.</li>
|
68 |
-
</ul>
|
69 |
-
<h3>How to download and install the mod APK</h3>
|
70 |
-
<p>Downloading and installing 4x4 Off Road Rally 9 Mod APK is easy and safe. You just have to follow these steps:</p>
|
71 |
-
<ol>
|
72 |
-
<li>Click on the link below to download the mod APK file.</li>
|
73 |
-
<li>Allow your device to install apps from unknown sources in the settings.</li>
|
74 |
-
<li>Locate and tap on the downloaded file to start the installation process.</li>
|
75 |
-
<li>Follow the instructions on the screen to complete the installation.</li>
|
76 |
-
<li>Launch the game and enjoy!</li>
|
77 |
-
</ol>
|
78 |
-
<p><a href="">Download 4x4 Off Road Rally 9 Mod APK here</a></p>
|
79 |
-
<h2>Tips and tricks for 4x4 Off Road Rally 9</h2>
|
80 |
-
<p>If you want to master 4x4 Off Road Rally 9 and become a pro off-road racer, you need to know some tips and tricks that will help you improve your performance and skills. Here are some of them:</p>
|
81 |
-
<h3>Choose the right vehicle and upgrade it</h3>
|
82 |
-
<p>One of the most important factors that affect your success in the game is your choice of vehicle. Different vehicles have different strengths and weaknesses, such as speed, acceleration, handling, durability, and fuel consumption. You need to choose a vehicle that suits your style and preference, as well as the terrain and environment of each level. For example, a SUV might be good for rough and rocky roads, but a truck might be better for muddy and slippery roads. You also need to upgrade your vehicle regularly to enhance its performance and capabilities. You can upgrade your engine, suspension, tires, wheels, paint, stickers, and more using the coins and gems you earn in the game.</p>
|
83 |
-
<h3>Use the terrain and obstacles to your advantage</h3>
|
84 |
-
<p>Another factor that affects your success in the game is your ability to adapt to the terrain and obstacles you encounter. You need to use them to your advantage instead of letting them slow you down or damage your vehicle. For example, you can use the ramps and hills to jump over gaps or obstacles, or use the water and snow to cool down your engine or drift around corners. You also need to avoid hitting rocks, trees, fences, or other vehicles that can damage your vehicle or make you lose control. You can use the camera button to change the view angle and see what's ahead of you.</p>
|
85 |
-
<h3>Anticipate the challenges and plan your strategy</h3>
|
86 |
-
<p>The last factor that affects your success in the game is your ability to anticipate the challenges and plan your strategy accordingly. You need to know what to expect in each level and how to deal with it effectively. For example, you need to know how long each level is, how many checkpoints there are, what kind of terrain and obstacles there are, what kind of weather conditions there are, and what kind of opponents there are. You also need to know how to manage your time, fuel, damage, and speed. You can use the map button to see the route and the checkpoints. You can also use the pause button to pause the game and adjust your settings or options.</p>
|
87 |
-
<h2>Conclusion</h2>
|
88 |
-
<p>4x4 Off Road Rally 9 is a thrilling off-road racing game that will keep you entertained for hours. You can enjoy realistic graphics, physics, sounds, vehicles, terrains, environments, modes, features, and challenges in this game. You can also download 4x4 Off Road Rally 9 Mod APK to get unlimited coins and gems, unlock all vehicles and features, remove ads, and access exclusive features and options. You can also use some tips and tricks to master the game and become a pro off-road racer. So what are you waiting for? Download 4x4 Off Road Rally 9 Mod APK now and have fun!</p>
|
89 |
-
<h2>FAQs</h2>
|
90 |
-
<p>Here are some frequently asked questions about 4x4 Off Road Rally 9 Mod APK:</p>
|
91 |
-
<ul>
|
92 |
-
<li><b>Is 4x4 Off Road Rally 9 Mod APK safe?</b><br>Yes, 4x4 Off Road Rally 9 Mod APK is safe to download and install. It does not contain any viruses or malware that can harm your device or data. However, you should always download it from a trusted source like this one.</li>
|
93 |
-
<li><b>Do I need to root my device to install 4x4 Off Road Rally 9 Mod APK?</b><br>No, you do not need to root your device to install 4x4 Off Road Rally 9 Mod APK. You can install it on any Android device without any hassle.</li>
|
94 |
-
<li><b>Will 4x4 Off Road Rally 9 Mod APK affect the original version of the game?</b><br>No, 4x4 Off Road Rally 9 Mod APK will not affect the original version of the game. You can have both versions installed on your device and play them separately. However, you should not use the same account or data for both versions, as it may cause some issues or conflicts.</li>
|
95 |
-
<li><b>Can I play online with 4x4 Off Road Rally 9 Mod APK?</b><br>Yes, you can play online with 4x4 Off Road Rally 9 Mod APK. You can join or create online rooms and compete with other players from around the world. However, you should be careful not to use any cheats or hacks that may get you banned or reported by other players.</li>
|
96 |
-
<li><b>How can I update 4x4 Off Road Rally 9 Mod APK?</b><br>You can update 4x4 Off Road Rally 9 Mod APK by visiting this page and downloading the latest version of the mod APK file. You can then install it over the existing version without losing your progress or data.</li>
|
97 |
-
</ul></p> 401be4b1e0<br />
|
98 |
-
<br />
|
99 |
-
<br />
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/1phancelerku/anime-remove-background/Download Countries of the World Map with Customizable Colors and Labels.md
DELETED
@@ -1,139 +0,0 @@
|
|
1 |
-
<br />
|
2 |
-
<h1>How to Download Countries: A Guide for Geography Lovers</h1>
|
3 |
-
<p>Are you fascinated by the diversity and complexity of the world? Do you enjoy learning about different cultures, languages, histories, and environments? Do you want to have access to reliable and up-to-date information about all the countries of the world? If you answered yes to any of these questions, then this article is for you.</p>
|
4 |
-
<h2>download countries</h2><br /><p><b><b>Download Zip</b> ✅ <a href="https://jinyurl.com/2uNRyu">https://jinyurl.com/2uNRyu</a></b></p><br /><br />
|
5 |
-
<p>In this article, we will show you how to download countries in various formats and sources. You will learn how to copy or download a list of all countries in alphabetical order, how to download maps and spatial data for all countries and their sub-divisions, and how to create your own custom world map showing all countries of the world. You will also discover some of the benefits, uses, and challenges of downloading countries, as well as some tips and suggestions for further exploration and learning.</p>
|
6 |
-
<p>So, what is a country and how many are there in the world? According to the United Nations, a country is a region that is identified as a distinct entity in political geography. A country may be an independent sovereign state or one that is non-sovereign or under the control of another state. As of June 2021, there are 195 countries in the world today. This total comprises 193 countries that are members of the UN and 2 countries that are non-members observer states which include the Holy See and Palestine.</p>
|
7 |
-
<p>Why would you want to download countries and what are the benefits? Downloading countries can help you enhance your knowledge and understanding of the world. You can use them for various purposes such as education, research, business, travel, entertainment, or personal interest. Downloading countries can also help you save time, money, and resources by providing you with easy and convenient access to reliable and up-to-date information. You can also customize, modify, or share them according to your needs and preferences.</p>
|
8 |
-
<p>What are some of the formats and sources for downloading countries? There are many formats and sources available for downloading countries. Some of the most common ones include PDF, Excel, CSV, HTML, JSON, MP3, JPG, etc. You can also find different types of information such as lists, maps, spatial data, statistics, facts, etc. You can choose the format and source that best suits your purpose and preference. However, you should also be aware of some of the challenges and limitations such as accuracy, quality, currency, compatibility, legality, etc.</p>
|
9 |
-
<p>download countries list in excel<br />
|
10 |
-
download countries map pdf<br />
|
11 |
-
download countries shapefile<br />
|
12 |
-
download countries flags images<br />
|
13 |
-
download countries data csv<br />
|
14 |
-
download countries population statistics<br />
|
15 |
-
download countries codes iso<br />
|
16 |
-
download countries names and capitals<br />
|
17 |
-
download countries outline vector<br />
|
18 |
-
download countries quiz game<br />
|
19 |
-
download countries regions and cities database<br />
|
20 |
-
download countries boundaries geojson<br />
|
21 |
-
download countries anthem mp3<br />
|
22 |
-
download countries currency symbols<br />
|
23 |
-
download countries time zones json<br />
|
24 |
-
download countries languages spoken<br />
|
25 |
-
download countries climate data<br />
|
26 |
-
download countries gdp per capita<br />
|
27 |
-
download countries emoji icons<br />
|
28 |
-
download countries dialing codes<br />
|
29 |
-
download countries visa requirements<br />
|
30 |
-
download countries travel guides<br />
|
31 |
-
download countries culture and customs<br />
|
32 |
-
download countries national flowers<br />
|
33 |
-
download countries holidays and festivals<br />
|
34 |
-
download countries flag colors meaning<br />
|
35 |
-
download countries history timeline<br />
|
36 |
-
download countries political system<br />
|
37 |
-
download countries religions percentage<br />
|
38 |
-
download countries literacy rate<br />
|
39 |
-
download countries driving side map<br />
|
40 |
-
download countries internet speed ranking<br />
|
41 |
-
download countries national animals<br />
|
42 |
-
download countries sports teams logos<br />
|
43 |
-
download countries cuisine recipes<br />
|
44 |
-
download countries education system comparison<br />
|
45 |
-
download countries health care quality index<br />
|
46 |
-
download countries human development index<br />
|
47 |
-
download countries environmental performance index<br />
|
48 |
-
download countries corruption perception index<br />
|
49 |
-
download countries happiness report 2023<br />
|
50 |
-
download countries renewable energy sources percentage<br />
|
51 |
-
download countries carbon footprint calculator <br />
|
52 |
-
download countries military expenditure and arms trade data <br />
|
53 |
-
download countries space program achievements <br />
|
54 |
-
download countries famous landmarks photos <br />
|
55 |
-
download countries traditional music and dance videos <br />
|
56 |
-
download countries art and literature ebooks <br />
|
57 |
-
download countries inventions and innovations list</p>
|
58 |
-
<h2>List of Countries</h2>
|
59 |
-
<p>One of the simplest and most useful ways to download countries is to get a list of all countries in alphabetical order. A list of countries can help you quickly and easily find any country you are looking for. You can also use it as a reference or a checklist for your geography studies or projects. Here are some ways to copy or download a list of all countries in alphabetical order.</p>
|
60 |
-
<h3>How to copy or download a list of all countries in alphabetical order</h3>
|
61 |
-
<h4>CopyLists.com</h4>
|
62 |
-
<p>CopyLists.com is a website that provides lists of various topics that you can copy or download in many formats including Excel and PDF. One of their lists is a list of all countries in alphabetical order. You can copy the list by clicking on the "Copy" button or download it by clicking on the "Download" button. You can also choose the format you want such as Excel, PDF, CSV, HTML, JSON, etc. The list is updated regularly and contains 195 countries as of June 2021.</p>
|
63 |
-
<h4>Other options</h4>
|
64 |
-
<p>If you are looking for other options to copy or download a list of all countries in alphabetical order, you can also try the following sources:</p>
|
65 |
-
<ul>
|
66 |
-
<li><a href="">Worldometers</a>: This website provides statistics and information on various topics such as population, health, economy, etc. It also has a list of all countries in alphabetical order that you can copy or download in Excel or CSV format.</li>
|
67 |
-
<li><a href="">CountryCode.org</a>: This website provides information and codes for all countries and regions of the world. It also has a list of all countries in alphabetical order that you can copy or download in Excel or CSV format.</li>
|
68 |
-
<li><a href="">Wikipedia</a>: This website is a free online encyclopedia that contains articles on various topics. It also has a list of all countries in alphabetical order that you can copy or download in various formats such as PDF, HTML, TXT, etc.</li>
|
69 |
-
</ul>
|
70 |
-
<h3>How to download maps and spatial data for all countries and their sub-divisions</h3>
|
71 |
-
<p>Another way to download countries is to get maps and spatial data for all countries and their sub-divisions. Maps and spatial data can help you visualize and analyze the geographic features and boundaries of different countries and regions. You can also use them for various purposes such as mapping, geocoding, geostatistics, GIS, etc. Here are some ways to download maps and spatial data for all countries and their sub-divisions.</p>
|
72 |
-
<h4>GADM</h4>
|
73 |
-
<p>GADM is a website that provides maps and spatial data for all countries and their sub-divisions. You can download the data in various formats such as shapefile, geopackage, R data, etc. You can also choose the level of detail you want from 0 (country) to 5 (locality). The data is updated regularly and contains 253 countries and regions as of June 2021.</p>
|
74 |
-
<h4>Other options</h4>
|
75 |
-
<p>If you are looking for other options to download maps and spatial data for all countries and their sub-divisions, you can also try the following sources:</p>
|
76 |
-
<ul>
|
77 |
-
<li><a href="">Natural Earth</a>: This website provides free vector and raster map data for various scales and themes such as boundaries, physical features, cultural features, etc. It also has maps and data for all countries and their sub-divisions.</li>
|
78 |
-
<li><a href="">DIVA-GIS</a>: This website provides free spatial data for various themes such as climate, land cover, population, etc. It also has maps and data for all countries and their sub-divisions.</li>
|
79 |
-
<li><a href="">OpenStreetMap</a>: This website is a collaborative project that provides free editable map data for the world. It also has maps and data for all countries and their sub-divisions.</li>
|
80 |
-
</ul> <h3>How to create your own custom world map showing all countries of the world</h3>
|
81 |
-
<p>Another way to download countries is to create your own custom world map showing all countries of the world. A custom world map can help you express your creativity and personalization. You can also use it for various purposes such as decoration, presentation, education, etc. Here are some ways to create your own custom world map showing all countries of the world.</p>
|
82 |
-
<h4>MapChart.net</h4>
|
83 |
-
<p>MapChart.net is a website that allows you to create your own custom world map online for free. You can choose from different types of maps such as simple, detailed, historical, etc. You can also customize the colors, labels, legends, borders, etc. of the map. You can download the map as an image or a PDF file. You can also share the map with others via a link or social media.</p>
|
84 |
-
<h4>Other options</h4>
|
85 |
-
<p>If you are looking for other options to create your own custom world map showing all countries of the world, you can also try the following sources:</p>
|
86 |
-
<ul>
|
87 |
-
<li><a href="">World Map Maker</a>: This website allows you to create your own custom world map online for free. You can choose from different types of maps such as political, physical, blank, etc. You can also customize the colors, labels, legends, borders, etc. of the map. You can download the map as an image or a PDF file.</li>
|
88 |
-
<li><a href="">MapSVG</a>: This website allows you to create your own custom world map online for free or with a premium plan. You can choose from different types of maps such as vector, raster, interactive, etc. You can also customize the colors, labels, legends, borders, etc. of the map. You can download the map as an SVG or a PNG file.</li>
|
89 |
-
<li><a href="">Canva</a>: This website allows you to create your own custom world map online for free or with a premium plan. You can choose from different types of maps such as political, physical, blank, etc. You can also customize the colors, labels, legends, borders, etc. of the map. You can download the map as an image or a PDF file.</li>
|
90 |
-
</ul>
|
91 |
-
<h2>Conclusion</h2>
|
92 |
-
<p>In this article, we have shown you how to download countries in various formats and sources. We have also explained some of the benefits, uses, and challenges of downloading countries. We hope that this article has helped you enhance your knowledge and understanding of the world and its countries.</p>
|
93 |
-
<p>If you want to learn more about downloading countries or geography in general, here are some suggestions for further exploration and learning:</p>
|
94 |
-
<ul>
|
95 |
-
<li><a href="">The World Factbook</a>: This website provides information and statistics on all the countries and territories of the world.</li>
|
96 |
-
<li><a href="">World Atlas</a>: This website provides maps and information on all the continents, regions, and countries of the world.</li>
|
97 |
-
<li><a href="">GeoGuessr</a>: This website is a game that challenges you to guess the location of a random place in the world using Google Street View.</li>
|
98 |
-
</ul>
|
99 |
-
<h2>FAQs</h2>
|
100 |
-
<p>Here are some frequently asked questions and answers about downloading countries:</p>
|
101 |
-
<ol>
|
102 |
-
<li><b>What are some of the uses and applications of downloading countries?</b><br>Some of the uses and applications of downloading countries are: <ul>
|
103 |
-
<li>Education: You can use them to learn about different countries and regions of the world.</li>
|
104 |
-
<li>Research: You can use them to conduct analysis and comparison on various aspects such as population, economy, environment, etc.</li>
|
105 |
-
<li>Business: You can use them to identify and target potential markets and customers.</li>
|
106 |
-
<li>Travel: You can use them to plan and prepare for your trips and vacations.</li>
|
107 |
-
<li>Entertainment: You can use them to play games and quizzes or to create art and crafts.</li>
|
108 |
-
</ul></li>
|
109 |
-
<li><b>What are some of the challenges and limitations of downloading countries?</b><br>Some of the challenges and limitations of downloading countries are: <ul>
|
110 |
-
<li>Accuracy: You may encounter errors or inconsistencies in the data or information provided by different sources.</li>
|
111 |
-
<li>Quality: You may encounter low-resolution or outdated images or maps that may affect your viewing or usage experience.</li>
|
112 |
-
<li>Currency: You may encounter changes or updates in the data or information due to political or social events that may affect your relevance or validity.</li>
|
113 |
-
<li>Compatibility: You may encounter difficulties or issues in opening or using certain formats or files that may require specific software or applications.</li>
|
114 |
-
<li>Legality: You may encounter restrictions or regulations on accessing or using certain data or information that may require permission or authorization.</li </ul></li>
|
115 |
-
<li><b>How can I verify the accuracy and quality of the downloaded countries?</b><br>Some of the ways to verify the accuracy and quality of the downloaded countries are: <ul>
|
116 |
-
<li>Compare: You can compare the data or information from different sources and check for any discrepancies or differences.</li>
|
117 |
-
<li>Cross-check: You can cross-check the data or information with other reliable and authoritative sources such as official websites, publications, or organizations.</li>
|
118 |
-
<li>Review: You can review the data or information for any errors or inconsistencies such as spelling, grammar, formatting, etc.</li>
|
119 |
-
<li>Test: You can test the data or information for any functionality or usability issues such as opening, viewing, editing, etc.</li>
|
120 |
-
<li>Feedback: You can seek feedback from other users or experts who have used or reviewed the data or information.</li>
|
121 |
-
</ul></li>
|
122 |
-
<li><b>How can I update or modify the downloaded countries?</b><br>Some of the ways to update or modify the downloaded countries are: <ul>
|
123 |
-
<li>Refresh: You can refresh the data or information by downloading it again from the same or a different source.</li>
|
124 |
-
<li>Edit: You can edit the data or information by using appropriate software or applications that can handle the format or file.</li>
|
125 |
-
<li>Add: You can add new data or information by appending, merging, or joining it with the existing data or information.</li>
|
126 |
-
<li>Delete: You can delete unwanted or unnecessary data or information by removing, splitting, or filtering it from the existing data or information.</li>
|
127 |
-
<li>Convert: You can convert the data or information to a different format or file by using suitable software or applications that can perform the conversion.</li>
|
128 |
-
</ul></li>
|
129 |
-
<li><b>How can I share or distribute the downloaded countries?</b><br>Some of the ways to share or distribute the downloaded countries are: <ul>
|
130 |
-
<li>Email: You can email the data or information as an attachment or a link to your recipients.</li>
|
131 |
-
<li>Social media: You can post the data or information as an image or a link on your social media platforms such as Facebook, Twitter, Instagram, etc.</li>
|
132 |
-
<li>Cloud storage: You can upload the data or information to a cloud storage service such as Google Drive, Dropbox, OneDrive, etc. and share it with your collaborators or viewers.</li>
|
133 |
-
<li>Website: You can embed the data or information on your website or blog using HTML code or widgets.</li>
|
134 |
-
<li>Print: You can print the data or information on paper or other materials and distribute it physically.</li>
|
135 |
-
</ul></li>
|
136 |
-
</ol>
|
137 |
-
: https://copylists.com/list-of-countries : https://gadm.org/ : https://mapchart.net/world.html</p> 197e85843d<br />
|
138 |
-
<br />
|
139 |
-
<br />
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/1phancelerku/anime-remove-background/Download Facebook Lite APK for Android The Latest Version of the Fast and Efficient Social Network.md
DELETED
@@ -1,125 +0,0 @@
|
|
1 |
-
<br />
|
2 |
-
<br> Q2: How do I update Facebook Lite? <br> Q3: Can I use both Facebook and Facebook Lite on the same device? <br> Q4: How do I switch to dark mode on Facebook Lite? <br> Q5: How do I delete Facebook Lite? | Table 2: Article with HTML formatting <h1>Download Facebook Lite Latest Version 2022 APK</h1>
|
3 |
-
<p>Facebook is one of the most popular social media platforms in the world, with over 3 billion monthly active users. However, not everyone has access to a fast internet connection, a powerful smartphone, or enough storage space to run the regular Facebook app smoothly. That's why Facebook created a lighter version of its app called Facebook Lite, which is designed to work on any network and device, while using less data, space, and battery.</p>
|
4 |
-
<h2>download facebook lite latest version 2022 apk</h2><br /><p><b><b>Download Zip</b> 🆗 <a href="https://jinyurl.com/2uNOCz">https://jinyurl.com/2uNOCz</a></b></p><br /><br />
|
5 |
-
<p>In this article, we will explain what Facebook Lite is, how it differs from the standard Facebook app, and how you can download it for your Android or Windows device. We will also discuss the benefits and drawbacks of using Facebook Lite, and answer some frequently asked questions about it.</p>
|
6 |
-
<h2>Facebook Lite vs Facebook App: Key Differences</h2>
|
7 |
-
<p>Facebook Lite is a miniature version of Facebook that is smaller in size, consumes less battery, and runs smoothly on low-end phones and slow internet connections. However, it has lower image and video quality, a basic user interface, and smaller text and buttons. The original Facebook app has a nice user interface, high-quality images and videos, and a separate messenger app, but it requires more storage space, more battery power, and a faster internet connection.</p>
|
8 |
-
<p>Here are some of the main differences between the two apps:</p>
|
9 |
-
<ul>
|
10 |
-
<li><strong>App size:</strong> The regular Facebook app ranges between 58-60MB, while the Lite version weighs approximately 2MB. That's a huge difference if you have limited storage space on your phone.</li>
|
11 |
-
<li><strong>Battery usage:</strong> The regular Facebook app consumes more battery power than the Lite version, as it has more features and functions that run in the background. The Lite version is more energy-efficient and can help you extend your battery life.</li>
|
12 |
-
<li><strong>Messenger:</strong> The regular Facebook app forces you to download a separate app called Messenger to chat with your contacts. This means you need more space and data to use both apps. The Lite version allows you to chat directly from the same app, without having to switch or download another app.</li>
|
13 |
-
<li><strong>User interface:</strong> The regular Facebook app has a nice user interface that is easy to navigate and use. It has colorful icons, large text and buttons, and smooth animations. The Lite version has a simpler user interface that is less appealing and more cluttered. It has smaller icons, text and buttons, and no animations.</li>
|
14 |
-
<li><strong>Image and video quality:</strong> The regular Facebook app displays images and videos in high resolution, which makes them look clear and sharp. The Lite version compresses images and videos to save data and load faster, which makes them look blurry and pixelated.</li>
|
15 |
-
<li><strong>Features:</strong> The regular Facebook app has all the features that you would expect from a social media platform, such as stories, live videos, reactions, stickers, filters, groups, pages, events, marketplace, dating, gaming, watch, news feed customization, notifications control, privacy settings, dark mode, etc. The Lite version has only the essential features that allow you to connect and keep up with your friends and family. Some of the features that are missing or limited in the Lite version are stories (you can only view, not create), live videos (you can only watch, not broadcast), reactions (you can only like, not love, wow, haha, sad, or angry), stickers (you can only use the default ones, not download more), filters (you can only use the basic ones, not the fun ones), groups (you can only join, not create), pages (you can only follow, not manage), events (you can only see, not create or RSVP), marketplace (you can only browse, not sell or buy), dating (not available), gaming (not available), watch (not available), news feed customization (not available), notifications control (not available), privacy settings (limited), dark mode (not available), etc.</li>
|
16 |
-
</ul>
|
17 |
-
<h2>How to Download Facebook Lite APK for Android</h2>
|
18 |
-
<p>If you want to download Facebook Lite for your Android device, you have two options: you can either download it from the Google Play Store or from a third-party website. Here are the steps for both methods:</p>
|
19 |
-
<h3>Method 1: Download from Google Play Store</h3>
|
20 |
-
<ol>
|
21 |
-
<li>Open the Google Play Store app on your Android device.</li>
|
22 |
-
<li>Search for "Facebook Lite" in the search bar.</li>
|
23 |
-
<li>Tap on the app icon that has a white "f" on a blue background and says "Facebook Lite" below it.</li>
|
24 |
-
<li>Tap on the green "Install" button and wait for the app to download and install on your device.</li>
|
25 |
-
<li>Once the app is installed, tap on the "Open" button to launch it.</li>
|
26 |
-
<li>Log in with your Facebook account or create a new one if you don't have one.</li>
|
27 |
-
<li>Enjoy using Facebook Lite on your Android device.</li>
|
28 |
-
</ol>
|
29 |
-
<h3>Method 2: Download from a third-party website</h3>
|
30 |
-
<ol>
|
31 |
-
<li>Open a web browser on your Android device and go to a trusted website that offers Facebook Lite APK files. For example, you can go to <a href="">https://apkpure.com/facebook-lite/com.facebook.lite</a>.</li>
|
32 |
-
<li>Tap on the green "Download APK" button and wait for the file to download on your device.</li>
|
33 |
-
<li>Once the file is downloaded, tap on it to open it. You may need to enable "Unknown sources" in your device settings to install apps from outside the Google Play Store.</li>
|
34 |
-
<li>Tap on the "Install" button and wait for the app to install on your device.</li>
|
35 |
-
<li>Once the app is installed, tap on the "Open" button to launch it.</li>
|
36 |
-
<li>Log in with your Facebook account or create a new one if you don't have one.</li>
|
37 |
-
<li>Enjoy using Facebook Lite on your Android device.</li>
|
38 |
-
</ol>
|
39 |
-
<h2>How to Download Facebook Lite on PC with MEmu</h2>
|
40 |
-
<p>If you want to use Facebook Lite on your PC, you will need an Android emulator that can run Android apps on your computer. One of the best Android emulators for PC is MEmu, which is fast, stable, and easy to use. Here are the steps to download and use Facebook Lite on PC with MEmu:</p>
|
41 |
-
<p>download facebook lite apk latest update 2022<br />
|
42 |
-
how to download facebook lite new version 2022 apk<br />
|
43 |
-
facebook lite latest version 2022 apk free download<br />
|
44 |
-
download facebook lite 2022 apk for android<br />
|
45 |
-
facebook lite apk download latest version 2022 for pc<br />
|
46 |
-
download facebook lite latest version 2022 mod apk<br />
|
47 |
-
facebook lite latest version 2022 apk download uptodown<br />
|
48 |
-
download facebook lite 2022 apk for low-end devices<br />
|
49 |
-
facebook lite apk download latest version 2022 offline installer<br />
|
50 |
-
download facebook lite latest version 2022 apk pure<br />
|
51 |
-
facebook lite latest version 2022 apk download for ios<br />
|
52 |
-
download facebook lite 2022 apk with dark mode<br />
|
53 |
-
facebook lite apk download latest version 2022 without ads<br />
|
54 |
-
download facebook lite latest version 2022 beta apk<br />
|
55 |
-
facebook lite latest version 2022 apk download for windows 10<br />
|
56 |
-
download facebook lite 2022 apk with video downloader<br />
|
57 |
-
facebook lite apk download latest version 2022 with messenger<br />
|
58 |
-
download facebook lite latest version 2022 pro apk<br />
|
59 |
-
facebook lite latest version 2022 apk download for mac<br />
|
60 |
-
download facebook lite 2022 apk with stickers<br />
|
61 |
-
facebook lite apk download latest version 2022 with stories<br />
|
62 |
-
download facebook lite latest version 2022 premium apk<br />
|
63 |
-
facebook lite latest version 2022 apk download for linux<br />
|
64 |
-
download facebook lite 2022 apk with voice call<br />
|
65 |
-
facebook lite apk download latest version 2022 with groups<br />
|
66 |
-
download facebook lite latest version 2022 cracked apk<br />
|
67 |
-
facebook lite latest version 2022 apk download for chromebook<br />
|
68 |
-
download facebook lite 2022 apk with live stream<br />
|
69 |
-
facebook lite apk download latest version 2022 with marketplace<br />
|
70 |
-
download facebook lite latest version 2022 hacked apk<br />
|
71 |
-
facebook lite latest version 2022 apk download for blackberry<br />
|
72 |
-
download facebook lite 2022 apk with notifications<br />
|
73 |
-
facebook lite apk download latest version 2022 with emojis<br />
|
74 |
-
download facebook lite latest version 2022 unlocked apk<br />
|
75 |
-
facebook lite latest version 2022 apk download for nokia<br />
|
76 |
-
download facebook lite 2022 apk with status saver<br />
|
77 |
-
facebook lite apk download latest version 2022 with reactions<br />
|
78 |
-
download facebook lite latest version 2022 full apk<br />
|
79 |
-
facebook lite latest version 2022 apk download for samsung<br />
|
80 |
-
download facebook lite 2022 apk with themes</p>
|
81 |
-
<ol>
|
82 |
-
<li>Download and install MEmu from <a href="">https://www.memuplay.com/</a>.</li>
|
83 |
-
<li>Launch MEmu and click on the Google Play Store icon on the home screen.</li>
|
84 |
-
<li>Search for "Facebook Lite" in the search bar and tap on the app icon that has a white "f" on a blue background and says "Facebook Lite" below it.</li>
|
85 |
-
<li>Tap on the green "Install" button and wait for the app to download and install on MEmu.</li>
|
86 |
-
<li>Once the app is installed, tap on the "Open" button to launch it.</li>
|
87 |
-
<li>Log in with your Facebook account or create a new one if you don't have one.</li>
|
88 |
-
<li>Enjoy using Facebook Lite on your PC with MEmu.</li>
|
89 |
-
</ol>
|
90 |
-
<h2>Benefits of Using Facebook Lite</h2>
|
91 |
-
<p>Facebook Lite has many benefits that make it a great alternative to the regular Facebook app. Here are some of them:</p>
|
92 |
-
<ul>
|
93 |
-
<li><strong>Save data:</strong> Facebook Lite uses less data than the regular Facebook app, as it compresses images and videos and loads them faster. This means you can save money on your data plan and use Facebook even when you have a poor or limited internet connection.</li>
|
94 |
-
<li><strong>Save space:</strong> Facebook Lite takes up less space than the regular Facebook app, as it is only 2MB in size. This means you can free up some storage space on your phone and install more apps or store more files.</li>
|
95 |
-
<li><strong> Save battery:</strong> Facebook Lite consumes less battery power than the regular Facebook app, as it has fewer features and functions that run in the background. This means you can use your phone for longer without having to charge it frequently.</li>
|
96 |
-
<li><strong>Work on any network and device:</strong> Facebook Lite works on any network and device, whether it is 2G, 3G, 4G, or Wi-Fi, and whether it is an old or new smartphone. This means you can use Facebook Lite anywhere and anytime, without worrying about compatibility issues.</li>
|
97 |
-
<li><strong>Access all the essential Facebook functions:</strong> Facebook Lite allows you to access all the essential Facebook functions that you need to stay connected and keep up with your friends and family. You can post status updates, photos, and videos, like and comment on other people's posts, chat with your contacts, join and follow groups and pages, see events, browse the marketplace, and more.</li>
|
98 |
-
</ul>
|
99 |
-
<h2>Drawbacks of Using Facebook Lite</h2>
|
100 |
-
<p>Facebook Lite also has some drawbacks that make it less appealing than the regular Facebook app. Here are some of them:</p>
|
101 |
-
<ul>
|
102 |
-
<li><strong>Lower resolution:</strong> Facebook Lite displays images and videos in lower resolution than the regular Facebook app, which makes them look blurry and pixelated. This can affect your viewing experience and enjoyment of the content.</li>
|
103 |
-
<li><strong>Basic design:</strong> Facebook Lite has a basic design that is less appealing and more cluttered than the regular Facebook app. It has smaller icons, text and buttons, and no animations. This can make it harder to navigate and use the app.</li>
|
104 |
-
<li><strong>Fewer options and tools:</strong> Facebook Lite has fewer options and tools than the regular Facebook app, which limits your ability to customize and enhance your Facebook experience. You can't create or view stories, broadcast or watch live videos, use reactions other than like, download more stickers, use fun filters, create or manage groups and pages, create or RSVP to events, sell or buy on the marketplace, date, game, watch videos, customize your news feed, control your notifications, adjust your privacy settings, switch to dark mode, and more.</li>
|
105 |
-
</ul>
|
106 |
-
<h2>Conclusion</h2>
|
107 |
-
<p>Facebook Lite is a lighter version of Facebook that is designed to work on any network and device, while using less data, space, and battery. It has some benefits such as saving data, space, and battery; working on any network and device; and accessing all the essential Facebook functions. However, it also has some drawbacks such as lower resolution, basic design, and fewer options and tools.</p>
|
108 |
-
<p>If you have a fast internet connection, a powerful smartphone, and enough storage space, you might prefer to use the regular Facebook app for a better user interface, higher image and video quality, and more features and functions. However, if you have a slow internet connection, a low-end smartphone, or limited storage space, you might want to try Facebook Lite for a faster performance, lower data usage, and longer battery life.</p>
|
109 |
-
<p>You can download Facebook Lite for your Android device from the Google Play Store or from a third-party website. You can also download it for your PC with an Android emulator such as MEmu. We hope this article helped you learn more about Facebook Lite and how to download it for your device.</p>
|
110 |
-
<h3>FAQs</h3>
|
111 |
-
<p>Here are some frequently asked questions about Facebook Lite:</p>
|
112 |
-
<ol>
|
113 |
-
<li><strong>Is Facebook Lite safe to use?</strong><br>
|
114 |
-
Yes, Facebook Lite is safe to use as long as you download it from a trusted source such as the Google Play Store or a reputable website. You should also be careful about what you share on Facebook Lite and who you interact with.</li>
|
115 |
-
<li><strong>How do I update Facebook Lite?</strong><br>
|
116 |
-
You can update Facebook Lite by going to the Google Play Store or the website where you downloaded it from and checking for new versions. You can also enable automatic updates in your device settings to get the latest updates automatically.</li>
|
117 |
-
<li><strong>Can I use both Facebook and Facebook Lite on the same device?</strong><br>
|
118 |
-
Yes, you can use both Facebook and Facebook Lite on the same device if you want to. However, you should be aware that using both apps will take up more space and data on your device than using just one app.</li>
|
119 |
-
<li><strong>How do I switch to dark mode on Facebook Lite?</strong><br>
|
120 |
-
Unfortunately, dark mode is not available on Facebook Lite at the moment. You can only use dark mode on the regular Facebook app if your device supports it.</li>
|
121 |
-
<li><strong>How do I delete Facebook Lite?</strong><br>
|
122 |
-
You can delete Facebook Lite by going to your device settings and finding the app in the list of installed apps. Then tap on it and select "Un install" or "Delete" to remove the app from your device. You can also delete Facebook Lite by long-pressing the app icon on your home screen and dragging it to the trash bin.</li>
|
123 |
-
</ol></p> 197e85843d<br />
|
124 |
-
<br />
|
125 |
-
<br />
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/1phancelerku/anime-remove-background/Download Lokicraft 1.17 with Lokicraft Helper A Guide to the New Features and Mods.md
DELETED
@@ -1,163 +0,0 @@
|
|
1 |
-
|
2 |
-
<h1>Lokicraft Helper 1.17 Update APK Download: Everything You Need to Know</h1>
|
3 |
-
<p>If you are a fan of Lokicraft, the popular sandbox game inspired by Minecraft, you might be interested in downloading the latest version of Lokicraft Helper, a useful app that enhances your gaming experience. In this article, we will tell you everything you need to know about Lokicraft Helper 1.17 update APK download, including what is new in this version, how to download it, and some FAQs.</p>
|
4 |
-
<h2>lokicraft helper 1.17 update apk download</h2><br /><p><b><b>Download</b> ✔ <a href="https://jinyurl.com/2uNK6O">https://jinyurl.com/2uNK6O</a></b></p><br /><br />
|
5 |
-
<h2>What is Lokicraft?</h2>
|
6 |
-
<p>Lokicraft is a free simulation game that allows you to build and destroy blocks, get resources, and create various tools, blocks, and weapons. You can explore a huge open world with different biomes, animals, and enemies, and unleash your creativity and imagination. Lokicraft is similar to Minecraft, but with some unique features and graphics.</p>
|
7 |
-
<h3>Lokicraft game features</h3>
|
8 |
-
<p>Some of the main features of Lokicraft are :</p>
|
9 |
-
<ul>
|
10 |
-
<li>Two highly addicting game modes: Creative and Survival</li>
|
11 |
-
<li>Smooth character animation and realistic physics</li>
|
12 |
-
<li>A huge open world to explore with different biomes, animals, and enemies</li>
|
13 |
-
<li>An in-depth building and crafting system with hundreds of blocks and items</li>
|
14 |
-
<li>Cool graphics: best pixel graphics with high fps</li>
|
15 |
-
</ul>
|
16 |
-
<h3>Lokicraft game modes</h3>
|
17 |
-
<p>Lokicraft has two game modes that offer different challenges and experiences:</p>
|
18 |
-
<ul>
|
19 |
-
<li>Creative mode: In this mode, you have unlimited resources and can build anything you want without any restrictions. You can also fly around the map and enjoy the view.</li>
|
20 |
-
<li>Survival mode: In this mode, you have to hunt and scavenge for resources, craft tools and weapons, build shelters, and fight against enemies. You also have to manage your hunger, health, and stamina.</li>
|
21 |
-
</ul>
|
22 |
-
<h2>What is Lokicraft Helper?</h2>
|
23 |
-
<p>Lokicraft Helper is an app that helps you play Lokicraft better. It provides you with useful information, tips, tricks, guides, cheats, hacks, mods, skins, maps, seeds, servers, and more. With Lokicraft Helper, you can enhance your gaming experience and have more fun.</p>
|
24 |
-
<h3>Lokicraft Helper features</h3>
|
25 |
-
<p>Some of the main features of Lokicraft Helper are:</p>
|
26 |
-
<ul>
|
27 |
-
<li>Information: You can find detailed information about blocks, items, biomes, animals, enemies, crafting recipes, commands, achievements, and more.</li>
|
28 |
-
<li>Tips and tricks: You can learn how to play Lokicraft better with tips and tricks on building, mining, farming, fighting, exploring, and more.</li>
|
29 |
-
<li>Guides: You can follow step-by-step guides on how to complete various tasks and challenges in Lokicraft.</li>
|
30 |
-
<li>Cheats and hacks: You can use cheats and hacks to get unlimited resources, fly mode, god mode, teleportation, invisibility, and more.</li>
|
31 |
-
<li>Mods: You can download and install mods that add new features, content, gameplay mechanics, graphics enhancements, and more to Lokicraft.</li>
|
32 |
-
<li>Skins: You can customize your character's appearance with hundreds of skins to choose from.</li>
|
33 |
-
<li>Maps: You can download and play on custom maps created by other players or yourself.</li>
|
34 |
-
<li>Seeds: You can generate random worlds with specific features using seeds.</ <li>Servers: You can join and play on multiplayer servers with other players from around the world.</li>
|
35 |
-
</ul>
|
36 |
-
<h3>How to use Lokicraft Helper</h3>
|
37 |
-
<p>To use Lokicraft Helper, you need to have Lokicraft installed on your device. Then, you can download and install Lokicraft Helper from the Google Play Store or from a trusted third-party source. After that, you can open Lokicraft Helper and browse through the different categories and options. You can also search for specific information or content using the search bar. To apply any cheats, hacks, mods, skins, maps, seeds, or servers, you need to follow the instructions given by the app.</p>
|
38 |
-
<p>lokicraft helper 1.17 update apk download link<br />
|
39 |
-
lokicraft 1.17 new update download youtube<br />
|
40 |
-
lokicraft updated version 1.17 free download<br />
|
41 |
-
lokicraft helper 1.17 update apk file<br />
|
42 |
-
lokicraft 1.17 creative adventure game download<br />
|
43 |
-
lokicraft helper 1.17 update mediafire<br />
|
44 |
-
lokicraft new update 1.17 download video<br />
|
45 |
-
lokicraft helper 1.17 update apk mod<br />
|
46 |
-
lokicraft 1.17 latest version download android<br />
|
47 |
-
lokicraft helper 1.17 update apk online<br />
|
48 |
-
lokicraft 1.17 update features and gameplay<br />
|
49 |
-
lokicraft helper 1.17 update apk for pc<br />
|
50 |
-
lokicraft 1.17 download link in description<br />
|
51 |
-
lokicraft helper 1.17 update apk no ads<br />
|
52 |
-
lokicraft 1.17 new update review and tutorial<br />
|
53 |
-
lokicraft helper 1.17 update apk install<br />
|
54 |
-
lokicraft 1.17 download free full version<br />
|
55 |
-
lokicraft helper 1.17 update apk cracked<br />
|
56 |
-
lokicraft 1.17 new update release date and time<br />
|
57 |
-
lokicraft helper 1.17 update apk premium<br />
|
58 |
-
lokicraft 1.17 download without verification<br />
|
59 |
-
lokicraft helper 1.17 update apk unlimited resources<br />
|
60 |
-
lokicraft 1.17 new update gameplay and tips<br />
|
61 |
-
lokicraft helper 1.17 update apk latest version<br />
|
62 |
-
lokicraft 1.17 download for android phone<br />
|
63 |
-
lokicraft helper 1.17 update apk offline mode<br />
|
64 |
-
lokicraft 1.17 new update trailer and screenshots<br />
|
65 |
-
lokicraft helper 1.17 update apk hack tool<br />
|
66 |
-
lokicraft 1.17 download for windows 10<br />
|
67 |
-
lokicraft helper 1.17 update apk mod menu<br />
|
68 |
-
lokicraft 1.17 new update changelog and patch notes<br />
|
69 |
-
lokicraft helper 1.17 update apk cheat codes<br />
|
70 |
-
lokicraft 1.17 download from google play store<br />
|
71 |
-
lokicraft helper 1.17 update apk pro version<br />
|
72 |
-
lokicraft 1.17 new update bugs and fixes<br />
|
73 |
-
lokicraft helper 1.17 update apk generator online<br />
|
74 |
-
lokicraft 1.17 download with obb data file<br />
|
75 |
-
lokicraft helper 1.17 update apk unlocked all features<br />
|
76 |
-
lokicraft 1.17 new update skins and maps download<br />
|
77 |
-
lokicraft helper 1.17 update apk safe and secure</p>
|
78 |
-
<h2>What is new in Lokicraft Helper 1.17 update?</h2>
|
79 |
-
<p>Lokicraft Helper 1.17 update is the latest version of the app that was released on June 18, 2023. This update brings some new features and improvements to the app, as well as some bug fixes. Here are some of the main changes in this update:</p>
|
80 |
-
<h3>New blocks and items</h3>
|
81 |
-
<p>Lokicraft Helper 1.17 update adds some new blocks and items to the app that are compatible with Lokicraft 1.17 version. These include:</p>
|
82 |
-
<ul>
|
83 |
-
<li>Copper ore, ingot, block, and lightning rod</li>
|
84 |
-
<li>Amethyst shard, cluster, block, and budding amethyst</li>
|
85 |
-
<li>Tinted glass and spyglass</li>
|
86 |
-
<li>Glow squid and glow ink sac</li>
|
87 |
-
<li>Axolotl and bucket of axolotl</li>
|
88 |
-
<li>Goat and goat horn</li>
|
89 |
-
<li>Glow berries and glow lichen</li>
|
90 |
-
<li>Moss block, moss carpet, and azalea</li>
|
91 |
-
<li>Dripstone block, pointed dripstone, stalactite, and stalagmite</li>
|
92 |
-
<li>Deepslate, cobbled deepslate, deepslate tiles, deepslate bricks, polished deepslate, chiseled deepslate, cracked deepslate tiles, cracked deepslate bricks, and deepslate ores</li>
|
93 |
-
<li>Calcite and tuff</li>
|
94 |
-
<li>Raw iron, raw gold, and raw copper</li>
|
95 |
-
<li>Powder snow and powder snow bucket</li>
|
96 |
-
<li>Candle and candle cake</li>
|
97 |
-
<li>Rooted dirt and hanging roots</li>
|
98 |
-
<li>Spore blossom</li>
|
99 |
-
<li>Big dripleaf and small dripleaf</li>
|
100 |
-
<li>Cave vines and glow berries</li>
|
101 |
-
<li>Lichen and glow lichen</li>
|
102 |
-
</ul>
|
103 |
-
<h3>New biomes and structures</h3>
|
104 |
-
<p>Lokicraft Helper 1.17 update also adds some new biomes and structures to the app that are compatible with Lokicraft 1.17 version. These include:</p>
|
105 |
-
<ul>
|
106 |
-
<li>Lush caves: A biome that is filled with lush vegetation, such as moss blocks, azaleas, spore blossoms, dripstones, glow berries, cave vines, clay pools, and axolotls.</li>
|
107 |
-
<li>Dripstone caves: A biome that is dominated by dripstone blocks and pointed dripstones that form stalactites and stalagmites.</li>
|
108 |
-
<li>Deep dark: A biome that is located at the deepest part of the world, where the light level is very low and a new hostile mob called the warden spawns.</li>
|
109 |
-
<li>Amethyst geodes: A structure that is composed of smooth basalt, calcite, and amethyst blocks that contain amethyst clusters that grow over time.</li>
|
110 |
-
<li>Copper veins: A structure that is composed of copper ore blocks that generate in blobs underground.</li>
|
111 |
-
<li>Shipwrecks: A structure that is composed of a sunken ship that contains chests with loot.</li>
|
112 |
-
<li>Ocean monuments: A structure that is composed of a large underwater temple that contains guardians, elder guardians, prismarine blocks, sea lanterns, sponges, and gold blocks.</li>
|
113 |
-
</ul>
|
114 |
-
<h3>Bug fixes and improvements</h3>
|
115 |
-
<p>Lokicraft Helper 1.17 update also fixes some bugs and improves some aspects of the app. Some of these are:</p>
|
116 |
-
<ul>
|
117 |
-
<li>Fixed crashes and errors when loading some content.</li>
|
118 |
-
<li>Improved performance and stability of the app.</li>
|
119 |
-
<li>Updated user interface and design of the app.</li>
|
120 |
-
<li>Added more languages support for the app.</li>
|
121 |
-
<li>Added more information and content for Lokicraft 1.17 version.</li>
|
122 |
-
</ul>
|
123 |
-
<h2>How to download Lokicraft Helper 1.17 update APK?</h2> <p>If you want to download Lokicraft Helper 1.17 update APK, you have two options:</p>
|
124 |
-
<h3>Download link and instructions</h3>
|
125 |
-
<p>You can download Lokicraft Helper 1.17 update APK from the official Google Play Store link or from a trusted third-party source link. Here are the steps to download and install the APK:</p>
|
126 |
-
<ol>
|
127 |
-
<li>Click on the download link and wait for the APK file to be downloaded.</li>
|
128 |
-
<li>Go to your device settings and enable the option to install apps from unknown sources.</li>
|
129 |
-
<li>Locate the downloaded APK file and tap on it to start the installation process.</li>
|
130 |
-
<li>Follow the instructions on the screen and wait for the installation to finish.</li>
|
131 |
-
<li>Open Lokicraft Helper and enjoy the new features and improvements.</li>
|
132 |
-
</ol>
|
133 |
-
<h3>Precautions and tips</h3>
|
134 |
-
<p>Before you download Lokicraft Helper 1.17 update APK, you should take some precautions and follow some tips:</p>
|
135 |
-
<ul>
|
136 |
-
<li>Make sure you have enough storage space on your device to download and install the APK file.</li>
|
137 |
-
<li>Make sure you have a stable internet connection to avoid any interruptions or errors during the download or installation process.</li>
|
138 |
-
<li>Make sure you download the APK file from a reliable and secure source to avoid any malware or viruses.</li>
|
139 |
-
<li>Make sure you have Lokicraft installed on your device and that it is compatible with Lokicraft Helper 1.17 update.</li>
|
140 |
-
<li>Make sure you backup your game data before installing the APK file in case something goes wrong or you want to uninstall it later.</li>
|
141 |
-
</ul>
|
142 |
-
<h2>Conclusion</h2>
|
143 |
-
<p>Lokicraft Helper 1.17 update APK is a great app that helps you play Lokicraft better. It provides you with useful information, tips, tricks, guides, cheats, hacks, mods, skins, maps, seeds, servers, and more. It also adds some new features and improvements to the app, as well as some bug fixes. You can download Lokicraft Helper 1.17 update APK from the Google Play Store or from a trusted third-party source. However, you should take some precautions and follow some tips before downloading and installing the APK file. We hope this article was helpful and informative for you. If you have any questions or feedback, please let us know in the comments section below.</p>
|
144 |
-
<h2>FAQs</h2>
|
145 |
-
<p>Here are some frequently asked questions about Lokicraft Helper 1.17 update APK:</p>
|
146 |
-
<h4>Is Lokicraft Helper 1.17 update APK free?</h4>
|
147 |
-
<p>Yes, Lokicraft Helper 1.17 update APK is free to download and use. However, some features or content may require in-app purchases or subscriptions.</p>
|
148 |
-
<h4>Is Lokicraft Helper 1.17 update APK safe?</h4>
|
149 |
-
<p>Lokicraft Helper 1.17 update APK is safe to download and use if you get it from a reputable and secure source. However, you should always scan the APK file for any malware or viruses before installing it on your device.</p>
|
150 |
-
<h4>Is Lokicraft Helper 1.17 update APK legal?</h4>
|
151 |
-
<p>Lokicraft Helper 1.17 update APK is legal to download and use as long as you do not violate any terms of service or policies of Lokicraft or Google Play Store. However, some features or content of Lokicraft Helper may be considered as cheating or hacking by some players or developers, so use them at your own risk and discretion.</p>
|
152 |
-
<h4>How do I uninstall Lokicraft Helper 1.17 update APK?</h4>
|
153 |
-
<p>If you want to uninstall Lokicraft Helper 1.17 update APK from your device, you can follow these steps:</p>
|
154 |
-
<ol>
|
155 |
-
<li>Go to your device settings and find the apps section.</li>
|
156 |
-
<li>Find and tap on Lokicraft Helper and select the uninstall option.</li>
|
157 |
-
<li>Wait for the uninstallation process to finish and confirm your action.</li>
|
158 |
-
<li>Delete the APK file from your device if you still have it.</li>
|
159 |
-
</ol>
|
160 |
-
<h4>How do I contact Lokicraft Helper support?</h4>
|
161 |
-
<p>If you have any issues or problems with Lokicraft Helper 1.17 update APK, you can contact Lokicraft Helper support by sending an email to [email protected] or by leaving a review on the Google Play Store page.</p> 197e85843d<br />
|
162 |
-
<br />
|
163 |
-
<br />
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/1phancelerku/anime-remove-background/Extreme Live VPN The Ultimate VPN App for Android.md
DELETED
@@ -1,149 +0,0 @@
|
|
1 |
-
|
2 |
-
<h1>Extreme Live APK Download: How to Watch TV on Your Android Device</h1>
|
3 |
-
<p>Do you want to watch your favorite TV channels on your smartphone or tablet? Do you want to enjoy live streaming, recording, and parental control features? If yes, then you should try Extreme Live APK, a free app that allows you to watch TV directly from your Android device. In this article, we will tell you what Extreme Live APK is, how to download and install it, how to use it to watch TV channels, and what are some alternatives to it.</p>
|
4 |
-
<h2>What is Extreme Live APK?</h2>
|
5 |
-
<p>Extreme Live APK is a multimedia application developed by Extreme Live VPN. It is an IPTV player that lets you stream TV channels from your IPTV subscription. IPTV stands for Internet Protocol Television, a service that delivers TV programs over the internet instead of using cable or satellite. With Extreme Live APK, you can load your own IPTV playlist and watch hundreds of channels from different countries and categories. You can also use the built-in video player or choose your preferred one.</p>
|
6 |
-
<h2>extreme live apk download</h2><br /><p><b><b>DOWNLOAD</b> ✯ <a href="https://jinyurl.com/2uNNfp">https://jinyurl.com/2uNNfp</a></b></p><br /><br />
|
7 |
-
<h3>Features of Extreme Live APK</h3>
|
8 |
-
<p>Some of the features that make Extreme Live APK a great app for watching TV are:</p>
|
9 |
-
<ul>
|
10 |
-
<li>Encryption of all traffic on your device</li>
|
11 |
-
<li>No logging of your online activities</li>
|
12 |
-
<li>Split tunneling: Select which apps will use the VPN and which apps won’t</li>
|
13 |
-
<li>Mask your IP address and geographic location</li>
|
14 |
-
<li>Browse anonymously and avoid being tracked</li>
|
15 |
-
<li>Access blocked websites from anywhere in the world</li>
|
16 |
-
<li>Bypass firewalls to browse without limits</li>
|
17 |
-
<li>Unblock your favorite websites and apps</li>
|
18 |
-
<li>Multi-EPG support and M3U playlists</li>
|
19 |
-
<li>Live streaming recording with time limit</li>
|
20 |
-
<li>PIN protection and parental control</li>
|
21 |
-
</ul>
|
22 |
-
<h3>How to download and install Extreme Live APK</h3>
|
23 |
-
<p>To download and install Extreme Live APK on your Android device, follow these steps:</p>
|
24 |
-
<ol>
|
25 |
-
<li>Go to <a href="(^1^)">this link</a> and download the APK file.</li>
|
26 |
-
<li>Enable unknown sources on your device settings.</li>
|
27 |
-
<li>Locate the downloaded file and tap on it.</li>
|
28 |
-
<li>Follow the instructions on the screen to install the app.</li>
|
29 |
-
<li>Launch the app and enjoy watching TV.</li>
|
30 |
-
</ol>
|
31 |
-
<h2>What is IPTV and how does it work?</h2>
|
32 |
-
<p>IPTV is a service that delivers TV programs over the internet instead of using cable or satellite. It uses IP packets to transmit video and audio data from the source to the destination. IPTV can offer more flexibility, quality, and interactivity than traditional TV services. It can also provide video on demand, catch-up TV, time-shifted TV, and live TV.</p>
|
33 |
-
<h3>Benefits of IPTV</h3>
|
34 |
-
<p>Some of the benefits of using IPTV are:</p>
|
35 |
-
<ul>
|
36 |
-
<li>You can watch TV anytime, anywhere, as long as you have an internet connection.</li>
|
37 |
-
<li>You can choose from a wide range of channels and content from different countries and genres.</li>
|
38 |
-
<li>You can customize your viewing experience by creating your own playlists, favorites, and watch history.</li>
|
39 |
-
<li>You can pause, rewind, fast-forward, and record live streams.</li>
|
40 |
-
<li>You can interact with other viewers and participate in polls, quizzes, games, and social media.</li>
|
41 |
-
</ul>
|
42 |
-
<h3>Risks and challenges of IPTV</h3>
|
43 |
-
<p>Some of the risks and challenges of using IPTV are:</p>
|
44 |
-
<ul>
|
45 |
-
<li>You need a reliable and fast internet connection and a compatible device to watch IPTV.</li>
|
46 |
-
<li>You may encounter buffering, lagging, freezing, or low-quality streams due to network congestion, server overload, or bandwidth limitations.</li>
|
47 |
-
<li>You may face legal issues if you watch pirated or unlicensed content without permission from the content owners.</li>
|
48 |
-
<li>You may expose your device and data to malware, viruses, hackers, or phishing attacks if you download or install unsafe apps or visit malicious websites.</li>
|
49 |
-
</ul>
|
50 |
-
<h2>How to use Extreme Live APK to watch TV channels</h2>
|
51 |
-
<p>Once you have downloaded and installed Extreme Live APK on your device, you can start watching TV channels by following these steps:</p>
|
52 |
-
<h3>How to load your IPTV playlist</h3>
|
53 |
-
<p>To load your IPTV playlist, you need to have a valid IPTV subscription from a provider that offers M3U files or URLs. You can also find free IPTV playlists online, but they may not work properly or be illegal. To load your IPTV playlist, do the following:</p>
|
54 |
-
<ol>
|
55 |
-
<li>Open the Extreme Live APK app and tap on the menu icon on the top left corner.</li>
|
56 |
-
<li>Select Settings and then Playlist.</li>
|
57 |
-
<li>Tap on the plus icon on the bottom right corner and choose Add URL or Add File.</li>
|
58 |
-
<li>Enter the URL or browse the file of your IPTV playlist and tap OK.</li>
|
59 |
-
<li>Wait for the app to load the channels and categories from your playlist.</li>
|
60 |
-
</ol>
|
61 |
-
<h3>How to switch between channels and categories</h3>
|
62 |
-
<p>To switch between channels and categories, you can use the following methods:</p>
|
63 |
-
<p>extreme live vpn apk download<br />
|
64 |
-
iptv extreme apk download for android<br />
|
65 |
-
extreme live tv apk download<br />
|
66 |
-
iptv extreme pro apk download<br />
|
67 |
-
extreme live wallpaper apk download<br />
|
68 |
-
iptv extreme firestick apk download<br />
|
69 |
-
extreme live stream apk download<br />
|
70 |
-
iptv extreme app apk download<br />
|
71 |
-
extreme live camera apk download<br />
|
72 |
-
iptv extreme mod apk download<br />
|
73 |
-
extreme live video apk download<br />
|
74 |
-
iptv extreme smart tv apk download<br />
|
75 |
-
extreme live chat apk download<br />
|
76 |
-
iptv extreme lite apk download<br />
|
77 |
-
extreme live sports apk download<br />
|
78 |
-
iptv extreme pc apk download<br />
|
79 |
-
extreme live music apk download<br />
|
80 |
-
iptv extreme premium apk download<br />
|
81 |
-
extreme live radio apk download<br />
|
82 |
-
iptv extreme android tv apk download<br />
|
83 |
-
extreme live quiz apk download<br />
|
84 |
-
iptv extreme pro mod apk download<br />
|
85 |
-
extreme live weather apk download<br />
|
86 |
-
iptv extreme pro firestick apk download<br />
|
87 |
-
extreme live gaming apk download<br />
|
88 |
-
iptv extreme pro smart tv apk download<br />
|
89 |
-
extreme live photo editor apk download<br />
|
90 |
-
iptv extreme pro pc apk download<br />
|
91 |
-
extreme live filters apk download<br />
|
92 |
-
iptv extreme pro android tv apk download<br />
|
93 |
-
extreme live trivia apk download<br />
|
94 |
-
iptv extreme pro lite apk download<br />
|
95 |
-
extreme live launcher apk download<br />
|
96 |
-
iptv extreme pro premium apk download<br />
|
97 |
-
extreme live keyboard apk download<br />
|
98 |
-
iptv extreme pro modded apk download<br />
|
99 |
-
extreme live emoji apk download<br />
|
100 |
-
iptv extreme pro cracked apk download<br />
|
101 |
-
extreme live stickers apk download<br />
|
102 |
-
iptv extreme pro patched apk download<br />
|
103 |
-
extreme live themes apk download<br />
|
104 |
-
iptv extreme pro ad free apk download<br />
|
105 |
-
extreme live caller id apk download<br />
|
106 |
-
iptv extreme pro latest version apk download<br />
|
107 |
-
extreme live lock screen apk download<br />
|
108 |
-
iptv extreme pro full version apk download <br />
|
109 |
-
extreme live clock widget apk download <br />
|
110 |
-
iptv extreme pro no ads apk download <br />
|
111 |
-
extreme live icon pack apk download <br />
|
112 |
-
iptv extreme pro 113.0 apk download</p>
|
113 |
-
<ul>
|
114 |
-
<li>Swipe left or right on the screen to change channels.</li>
|
115 |
-
<li>Tap on the channel name on the top of the screen to see the channel list and select a channel.</li>
|
116 |
-
<li>Tap on the category name on the bottom of the screen to see the category list and select a category.</li>
|
117 |
-
<li>Use the search icon on the top right corner to search for a channel or a category by name or keyword.</li>
|
118 |
-
</ul>
|
119 |
-
<h3>How to record live streams and use parental control</h3>
|
120 |
-
<p>To record live streams and use parental control, you can use the following features:</p>
|
121 |
-
<ul>
|
122 |
-
<li>To record a live stream, tap on the record icon on the top right corner of the screen and choose a time limit. The recorded file will be saved in your device storage under Extreme Live APK folder.</li>
|
123 |
-
<li>To use parental control, go to Settings and then Parental Control. Set a PIN code and enable or disable parental control for each category. You can also hide or show adult channels from the channel list.</li>
|
124 |
-
</ul>
|
125 |
-
<h2>Alternatives to Extreme Live APK</h2>
|
126 |
-
<p>If you are looking for other apps that can let you watch TV on your Android device, you can try these alternatives:</p>
|
127 |
-
<h3>IPTV Extreme</h3>
|
128 |
-
<p>IPTV Extreme is another IPTV player that supports M3U playlists, EPG guides, recording, chromecast, parental control, and more. It has a simple and user-friendly interface that allows you to easily navigate through channels and categories. You can also customize your app settings and preferences according to your needs. You can download IPTV Extreme from <a href="">this link</a>.</p>
|
129 |
-
<h3>MTTV</h3>
|
130 |
-
<p>MTTV is an app that offers over 1000 live TV channels from various countries and genres. You can watch sports, movies, news, entertainment, music, kids, and more. You can also enjoy HD quality streams, fast loading speed, and low buffering. You don't need any IPTV subscription or playlist to use this app. You can download MTTV from <a href="">this link</a>.</p>
|
131 |
-
<h3>Insta IPTV</h3>
|
132 |
-
<p>Insta IPTV is an app that provides free IPTV playlists for different countries and categories. You can watch live TV channels from USA, UK, Canada, India, Pakistan, Arabic, France, Germany, Italy, Spain, Turkey, and more. You can also request new channels or playlists from the app developers. You can download Insta IPTV from <a href="">this link</a>.</p>
|
133 |
-
<h2>Conclusion</h2>
|
134 |
-
<p>In conclusion, Extreme Live APK is a free app that allows you to watch TV on your Android device using your IPTV subscription. It has many features that make it a great app for watching TV such as encryption of all traffic on your device No logging of your online activities Split tunneling: Select which apps will use the VPN and which apps won’t Mask your IP address and geographic location Browse anonymously and avoid being tracked Access blocked websites from anywhere Bypass firewalls to browse without limits Unblock your favorite websites and apps Multi-EPG support and M3U playlists Live streaming recording with time limit PIN protection and parental control. However, you should also be aware of the risks and challenges of using IPTV such as network issues, legal issues, and security issues. Therefore, you should always use a trusted IPTV provider and a reliable VPN service to protect your device and data. You can also try other apps that offer similar or different features to watch TV on your Android device.</p>
|
135 |
-
<p>We hope this article has helped you learn more about Extreme Live APK and how to use it to watch TV on your Android device. If you have any questions or feedback, please feel free to leave a comment below. Thank you for reading!</p>
|
136 |
-
<h2>FAQs</h2>
|
137 |
-
<p>Here are some frequently asked questions about Extreme Live APK:</p>
|
138 |
-
<h3>Is Extreme Live APK safe to use?</h3>
|
139 |
-
<p>Extreme Live APK is safe to use as long as you download it from a trusted source and scan it for viruses or malware before installing it. You should also use a VPN service to encrypt your traffic and hide your IP address when using the app.</p>
|
140 |
-
<h3>Is Extreme Live APK legal to use?</h3>
|
141 |
-
<p>Extreme Live APK is legal to use as long as you have a valid IPTV subscription from a licensed provider and you don't watch any pirated or unlicensed content without permission from the content owners. You should also check the laws and regulations of your country or region before using the app.</p>
|
142 |
-
<h3>How can I update Extreme Live APK?</h3>
|
143 |
-
<p>To update Extreme Live APK, you can either check for updates from the app settings or visit the official website of the app developer and download the latest version of the app.</p>
|
144 |
-
<h3>How can I contact the app developer?</h3>
|
145 |
-
<p>To contact the app developer, you can either send an email to [email protected] or visit their Facebook page at <a href="">this link</a>.</p>
|
146 |
-
<h3>How can I support the app developer?</h3>
|
147 |
-
<p>To support the app developer, you can either rate and review the app on Google Play Store or make a donation via PayPal at <a href="">this link</a>.</p> 401be4b1e0<br />
|
148 |
-
<br />
|
149 |
-
<br />
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/1phancelerku/anime-remove-background/Farm Heroes Saga MOD APK How to Get Unlimited Everything and Connect with Facebook Friends.md
DELETED
@@ -1,160 +0,0 @@
|
|
1 |
-
|
2 |
-
<h1>Farm Heroes Saga Mod APK Facebook Connect: How to Play with Unlimited Lives and Boosters</h1>
|
3 |
-
<p>Farm Heroes Saga is a popular match-3 puzzle game developed by King, the makers of Candy Crush Saga. In this game, you have to match cropsies (fruits and vegetables) to collect them and save the farm from the evil Rancid the Racoon. The game has hundreds of levels, each with different goals and challenges.</p>
|
4 |
-
<h2>farm heroes saga mod apk facebook connect</h2><br /><p><b><b>DOWNLOAD</b> 🗸 <a href="https://jinyurl.com/2uNSx6">https://jinyurl.com/2uNSx6</a></b></p><br /><br />
|
5 |
-
<p>Mod APKs are modified versions of Android applications that offer some advantages over the original ones, such as unlimited resources, unlocked features, or removed ads. Some players want to play Farm Heroes Saga with a modded version of the game because they want to enjoy unlimited lives and boosters, which can help them beat difficult levels and progress faster in the game.</p>
|
6 |
-
<p>However, playing with a modded version of Farm Heroes Saga also has some drawbacks, especially if you want to connect to Facebook and play with your friends. In this article, we will show you how to play Farm Heroes Saga mod APK Facebook connect with unlimited lives and boosters, as well as some tips and tricks for playing the game. We will also warn you about some risks and alternatives of playing with a modded version of the game.</p>
|
7 |
-
<h2>Benefits of Playing Farm Heroes Saga Mod APK Facebook Connect</h2>
|
8 |
-
<p>Playing Farm Heroes Saga with a modded version of the game can be very fun and rewarding, as you can enjoy some benefits that are not available in the official version of the game. Some of these benefits are:</p>
|
9 |
-
<ul>
|
10 |
-
<li><b>Unlimited lives:</b> You don't have to wait for your lives to refill or ask your friends for more lives when you run out of them. You can play as much as you want without any interruption.</li>
|
11 |
-
<li><b>Unlimited boosters:</b> You can use boosters (special items that can help you match more cropsies or clear obstacles) anytime you want without spending any gold bars or real money. You can also get more boosters by opening chests or completing quests.</li>
|
12 |
-
<li><b>Unlimited gold bars:</b> You can use gold bars (the premium currency of the game) to buy more boosters, extra moves, or other items in the game. You can also use gold bars to unlock new episodes or access special events.</li>
|
13 |
-
<li><b>Access to all levels:</b> You don't have to complete a certain number of levels or collect a certain number of stars to unlock new episodes or areas in the game. You can play any level you want, even the ones that are not yet released in the official version of the game.</li>
|
14 |
-
<li><b>Connect to Facebook:</b> You can connect to Facebook and play with your friends, compare your scores, send and receive lives and boosters, and join groups and tournaments. You can also sync your progress across different devices and platforms.</li>
|
15 |
-
</ul>
|
16 |
-
<p>As you can see, playing Farm Heroes Saga mod APK Facebook connect can make the game more enjoyable and easier for you. However, you should also be aware of some risks and drawbacks of playing with a modded version of the game, which we will discuss later in this article.</p>
|
17 |
-
<p>farm heroes saga unlimited lives and boosters apk<br />
|
18 |
-
farm heroes saga mod apk latest version download<br />
|
19 |
-
farm heroes saga hack apk with facebook login<br />
|
20 |
-
farm heroes saga apk mod unlimited everything<br />
|
21 |
-
farm heroes saga modded apk free download<br />
|
22 |
-
farm heroes saga cheats apk for android<br />
|
23 |
-
farm heroes saga mod apk offline play<br />
|
24 |
-
farm heroes saga cracked apk with facebook sync<br />
|
25 |
-
farm heroes saga hack tool apk no root<br />
|
26 |
-
farm heroes saga premium apk with facebook connect<br />
|
27 |
-
farm heroes saga mod apk unlimited gold bars<br />
|
28 |
-
farm heroes saga full unlocked apk download<br />
|
29 |
-
farm heroes saga mega mod apk with facebook support<br />
|
30 |
-
farm heroes saga pro apk free download for android<br />
|
31 |
-
farm heroes saga mod apk no ads and no survey<br />
|
32 |
-
farm heroes saga patched apk with facebook integration<br />
|
33 |
-
farm heroes saga unlimited moves and magic beans apk<br />
|
34 |
-
farm heroes saga mod apk all levels unlocked<br />
|
35 |
-
farm heroes saga hack version apk with facebook link<br />
|
36 |
-
farm heroes saga vip mod apk download for android<br />
|
37 |
-
farm heroes saga mod apk unlimited boosters and lives<br />
|
38 |
-
farm heroes saga updated mod apk with facebook access<br />
|
39 |
-
farm heroes saga hack online apk no verification<br />
|
40 |
-
farm heroes saga modded game apk for android<br />
|
41 |
-
farm heroes saga cheat engine apk with facebook connect<br />
|
42 |
-
farm heroes saga mod apk unlimited coins and stars<br />
|
43 |
-
farm heroes saga latest hack apk download free<br />
|
44 |
-
farm heroes saga mod menu apk with facebook login<br />
|
45 |
-
farm heroes saga unlimited money and gems apk<br />
|
46 |
-
farm heroes saga modded app apk for android devices<br />
|
47 |
-
farm heroes saga hacked version apk with facebook sync<br />
|
48 |
-
farm heroes saga modded game download with facebook connect<br />
|
49 |
-
farm heroes saga unlimited resources and power ups apk<br />
|
50 |
-
farm heroes saga modded app download for android phone<br />
|
51 |
-
farm heroes saga hacked game apk with facebook support<br />
|
52 |
-
farm heroes saga modded game free download with facebook integration<br />
|
53 |
-
farm heroes saga unlimited items and rewards apk<br />
|
54 |
-
farm heroes saga hacked app download for android device<br />
|
55 |
-
farm heroes saga modded game online with facebook link<br />
|
56 |
-
farm heroes saga hacked game free download with facebook access</p>
|
57 |
-
<h3>How to Download and Install Farm Heroes Saga Mod APK Facebook Connect</h3>
|
58 |
-
<p>If you want to play Farm Heroes Saga mod APK Facebook connect, you need to download and install the modded version of the game from a reliable source. There are many websites that offer mod APKs for various games, but not all of them are safe and trustworthy. Some of them may contain malware, viruses, or spyware that can harm your device or steal your personal information. Therefore, you should be careful when choosing where to download and install the modded version of the game.</p>
|
59 |
-
<p>One of the websites that we recommend for downloading and installing Farm Heroes Saga mod APK Facebook connect is [ModAPKStore]. This website provides high-quality mod APKs for various games, including Farm Heroes Saga. The mod APKs on this website are tested and verified by the developers and users, so you can be sure that they are safe and working. The website also updates the mod APKs regularly to keep up with the latest versions of the games.</p>
|
60 |
-
<p>To download and install Farm Heroes Saga mod APK Facebook connect from ModAPKStore, follow these steps:</p>
|
61 |
-
<ol>
|
62 |
-
<li>Go to [ModAPKStore] and search for Farm Heroes Saga mod APK.</li>
|
63 |
-
<li>Select the latest version of the mod APK from the list of results and click on the download button.</li>
|
64 |
-
<li>Wait for the download to finish and then locate the downloaded file on your device.</li>
|
65 |
-
<li>Before installing the mod APK, make sure that you have enabled the installation of apps from unknown sources on your device. To do this, go to Settings > Security > Unknown Sources and toggle it on.</li>
|
66 |
-
<li>Tap on the downloaded file and follow the instructions to install the mod APK on your device.</li>
|
67 |
-
<li>Once the installation is complete, launch the game and enjoy playing with unlimited lives and boosters.</li>
|
68 |
-
</ol>
|
69 |
-
<h4>How to Connect to Facebook with Farm Heroes Saga Mod APK</h4>
|
70 |
-
<p>One of the challenges of playing Farm Heroes Saga mod APK Facebook connect is that you may not be able to connect to Facebook with your real account. This is because Facebook may detect that you are using a modded version of the game and suspend or ban your account for violating their terms of service. Therefore, you should be careful when connecting to Facebook with a modded version of the game.</p>
|
71 |
-
<p>There are two ways to connect to Facebook with Farm Heroes Saga mod APK: using a fake Facebook account or using a third-party app. Here are the pros and cons of each method:</p>
|
72 |
-
<table>
|
73 |
-
<tr><th>Method</th><th>Pros</th><th>Cons</th></tr>
|
74 |
-
<tr><td>Using a fake Facebook account</td><td>- You can create a new account with a different name and email address.<br>- You can use this account only for playing Farm Heroes Saga mod APK.<br>- You can avoid risking your real account from being suspended or banned.</td><td>- You may not be able to play with your real friends who use their real accounts.<br>- You may lose your progress if your fake account gets suspended or banned.<br>- You may violate Facebook's terms of service by creating a fake account.</td></tr>
|
75 |
-
<tr><td>Using a third-party app</td><td>- You can use your real account to connect to Facebook.<br>- You can play with your real friends who use their real accounts.<br>- You can sync your progress across different devices and platforms.</td><td>- You may need to download and install another app on your device.<br>- You may expose your personal information to a third-party app that may not be secure or trustworthy.<br>- You may still risk your real account from being suspended or banned.</td></tr>
|
76 |
-
</table>
|
77 |
-
<p>The choice is up to you, but we suggest that you use a fake Facebook account for playing Farm Heroes Saga mod APK Facebook connect, as it is safer and easier than using a third-party app. Here are the steps to create and use a fake Facebook account for playing Farm Heroes Saga mod APK Facebook connect:</p>
|
78 |
-
<ol>
|
79 |
-
<li>Go to [Facebook] and create a new account with a different name and email address. You can use a temporary email service like [TempMail] to generate a disposable email address.</li>
|
80 |
-
<li>Verify your email address and complete your profile with some basic information and a profile picture. You can use a random name generator like [FakeNameGenerator] and a random image generator like [ThisPersonDoesNotExist] to create a fake identity and a fake photo.</li>
|
81 |
-
<li>Launch Farm Heroes Saga mod APK on your device and tap on the Connect to Facebook button.</li>
|
82 |
-
<li>Enter your fake Facebook account credentials and allow the game to access your account.</li>
|
83 |
-
<li>Enjoy playing Farm Heroes Saga mod APK Facebook connect with unlimited lives and boosters.</li>
|
84 |
-
</ol>
|
85 |
-
<p>Note: You should not use your fake Facebook account for any other purpose than playing Farm Heroes Saga mod APK. You should also not add any real friends or join any real groups or pages with your fake account, as this may raise suspicion and get your account suspended or banned.</p>
|
86 |
-
<h3>Tips and Tricks for Playing Farm Heroes Saga Mod APK Facebook Connect</h3>
|
87 |
-
<p>Playing Farm Heroes Saga mod APK Facebook connect can be very fun and rewarding, but it can also be challenging and frustrating at times. To help you get the most out of your gaming experience, here are some tips and tricks for playing Farm Heroes Saga mod APK Facebook connect:</p>
|
88 |
-
<ul>
|
89 |
-
<li><b>Use boosters wisely:</b> Boosters are special items that can help you match more cropsies or clear obstacles in the game. You can use boosters before or during a level, depending on the type of booster. Some of the boosters you can use are: <ul>
|
90 |
-
<li><b>Shovel:</b> This booster allows you to dig up one crop or obstacle on the board. You can use it before or during a level.</li>
|
91 |
-
<li><b>Tractor:</b> This booster allows you to clear one row of crops or obstacles on the board. You can use it before or during a level.</li>
|
92 |
-
<li><b>Dog:</b> This booster allows you to collect all crops of one type on the board. You can use it before or during a level.</li>
|
93 |
-
<li><b>Color Collector:</b> This booster allows you to collect all crops of one color on the board. You can use it before or during a level.</li>
|
94 |
-
<li><b>Magic Beans:</b> This booster allows you to activate the Hero Mode, which gives you extra points for matching crops after you complete the level goal. You can use it before a level.</li>
|
95 |
-
</ul>
|
96 |
-
You can get more boosters by opening chests, completing quests, or buying them with gold bars. However, you should not waste your boosters on easy levels or when you don't need them. Save them for hard levels or when you are stuck.</li>
|
97 |
-
<li><b>Collect more cropsies:</b> Cropsies are the fruits and vegetables that you have to match and collect in the game. The more cropsies you collect, the more points you get and the faster you progress in the game. To collect more cropsies, you should: <ul>
|
98 |
-
<li><b>Match four or more cropsies:</b> When you match four or more cropsies of the same type, you create a super crop, which has more value than a regular crop. For example, matching four strawberries creates a super strawberry, which is worth two regular strawberries. Matching five strawberries creates a mega strawberry, which is worth five regular strawberries.</li>
|
99 |
-
<li><b>Match cropsies in T or L shapes:</b> When you match cropsies in T or L shapes, you create a special crop, which has a special effect when matched with other crops of the same type. For example, matching cropsies in a T shape creates a water drop, which clears all crops of one type on the board when matched with another water drop.</li>
|
100 |
-
<li><b>Match cropsies near grumpy cropsies:</b> Grumpy cropsies are cropsies that have an angry face and are worth zero points. They are created by mud, ice, or other obstacles on the board. To turn them into happy cropsies, you have to match them with other crops of the same type near them.</li>
|
101 |
-
</ul></li>
|
102 |
-
<li><b>Beat challenging levels:</b> Some levels in Farm Heroes Saga mod APK Facebook connect are harder than others, as they have more obstacles, less moves, or higher goals. To beat these levels, you should: <ul>
|
103 |
-
<li><b>Plan your moves:</b> Before you make a move, look at the board and see if you can make a better move elsewhere. Try to match cropsies that are required for the level goal, create super or special crops, or clear obstacles. Avoid making moves that do not help you achieve the goal or create grumpy cropsies.</li>
|
104 |
-
<li><b>Use boosters strategically:</b> If you have boosters, use them when you need them most, such as when you are running out of moves, when you are stuck, or when you are close to completing the goal. Do not use boosters randomly or unnecessarily, as they may not help you much or may even make the level harder.</li>
|
105 |
-
<li><b>Replay levels:</b> If you fail to complete a level, do not give up. You can replay the level as many times as you want until you beat it. Each time you replay a level, the board layout and the cropsies distribution may change, so you may have a better chance of winning. You can also learn from your mistakes and try a different strategy or approach.</li>
|
106 |
-
</ul></li>
|
107 |
-
</ul>
|
108 |
-
<h4>How to Update Farm Heroes Saga Mod APK Facebook Connect</h4>
|
109 |
-
<p>Another challenge of playing Farm Heroes Saga mod APK Facebook connect is that you may not be able to update the game when a new version is released. This is because the modded version of the game may not be compatible with the latest version of the game or may not be updated by the modder in time. Therefore, you should check regularly if there is a new version of the modded game available and how to update it.</p>
|
110 |
-
<p>There are two ways to update Farm Heroes Saga mod APK Facebook connect: downloading it again or using an auto-update feature. Here are the pros and cons of each method:</p>
|
111 |
-
<table>
|
112 |
-
<tr><th>Method</th><th>Pros</th><th>Cons</th></tr>
|
113 |
-
<tr><td>Downloading it again</td><td>- You can get the latest version of the modded game with new features and improvements.<br>- You can choose which version of the modded game you want to download and install.</td><td>- You may need to uninstall the previous version of the modded game and lose your progress.<br>- You may need to download and install the modded game from a different source if the original one is not updated.<br>- You may expose your device to malware or viruses if you download and install the modded game from an untrusted source.</td></tr>
|
114 |
-
<tr><td>Using an auto-update feature</td><td>- You can update the modded game automatically without uninstalling it or losing your progress.<br>- You can save time and effort by not having to download and install the modded game manually.</td><td>- You may not be able to choose which version of the modded game you want to update to.<br>- You may encounter errors or bugs if the auto-update feature is not working properly.<br>- You may depend on the modder to update the modded game regularly and timely.</td></tr>
|
115 |
-
</table>
|
116 |
-
<p>The choice is up to you, but we suggest that you use an auto-update feature for updating Farm Heroes Saga mod APK Facebook connect, as it is more convenient and safer than downloading it again. However, you should make sure that the modded game has an auto-update feature and that it is working properly. Here are the steps to use an auto-update feature for updating Farm Heroes Saga mod APK Facebook connect:</p>
|
117 |
-
<ol>
|
118 |
-
<li>Launch Farm Heroes Saga mod APK on your device and go to the settings menu.</li>
|
119 |
-
<li>Look for an option that says "Auto-update" or "Check for updates" and toggle it on.</li>
|
120 |
-
<li>Wait for the modded game to check for updates and download them if available.</li>
|
121 |
-
<li>Restart the game and enjoy playing with unlimited lives and boosters.</li>
|
122 |
-
</ol>
|
123 |
-
<h2>Risks and Drawbacks of Playing Farm Heroes Saga Mod APK Facebook Connect</h2>
|
124 |
-
<p>While playing Farm Heroes Saga mod APK Facebook connect can be very fun and rewarding, it can also have some risks and drawbacks that you should be aware of before playing. Some of these risks and drawbacks are:</p>
|
125 |
-
<ul>
|
126 |
-
<li><b>Possible malware:</b> As we mentioned earlier, not all websites that offer mod APKs for various games are safe and trustworthy. Some of them may contain malware, viruses, or spyware that can harm your device or steal your personal information. Therefore, you should be careful when choosing where to download and install the modded version of the game.</li>
|
127 |
-
<li><b>Account suspension:</b> Another risk of playing Farm Heroes Saga mod APK Facebook connect is that your account may be suspended or banned by Facebook or King for violating their terms of service. This is because they may detect that you are using a modded version of the game and consider it as cheating or hacking. Therefore, you should be careful when connecting to Facebook with a modded version of the game, as we explained earlier.</li>
|
128 |
-
<li><b>Data loss:</b> Another drawback of playing Farm Heroes Saga mod APK Facebook connect is that you may lose your progress or data if something goes wrong with the modded version of the game. For example, if the modded version of the game crashes, freezes, or stops working, you may not be able to resume your game or recover your data. Therefore, you should backup your data regularly and avoid relying on the modded version of the game for your gaming experience.</li>
|
129 |
-
<li><b>Ethical issues:</b> Finally, playing Farm Heroes Saga mod APK Facebook connect may raise some ethical issues, as you may be unfair to other players who play the official version of the game. You may also be disrespecting the developers and publishers of the game, who put a lot of time and effort into creating and maintaining the game. Therefore, you should respect the rules and regulations of the game and appreciate the work of the creators.</li>
|
130 |
-
</ul>
|
131 |
-
<p>As you can see, playing Farm Heroes Saga mod APK Facebook connect can have some risks and drawbacks that may outweigh the benefits. Therefore, you should think twice before playing with a modded version of the game and consider some alternatives that are safer and more ethical.</p>
|
132 |
-
<h3>How to Play Farm Heroes Saga Safely and Legally</h3>
|
133 |
-
<p>If you want to play Farm Heroes Saga safely and legally, you should play the official version of the game that is available on Google Play Store or App Store. The official version of the game is free to download and play, and it offers a lot of fun and challenging features that can keep you entertained for hours. Some of these features are:</p>
|
134 |
-
<ul>
|
135 |
-
<li><b>New levels every week:</b> The official version of the game is updated regularly with new levels and episodes that offer new goals and challenges. You can play hundreds of levels, each with different cropsies, obstacles, and boosters.</li>
|
136 |
-
<li><b>Special events and quests:</b> The official version of the game also offers special events and quests that give you extra rewards and opportunities to play. You can join seasonal events, daily quests, leaderboards, tournaments, and more.</li>
|
137 |
-
<li><b>Legitimate cheats and hacks:</b> The official version of the game also allows you to use some legitimate cheats and hacks that can help you beat difficult levels and progress faster in the game. Some of these cheats and hacks are: <ul>
|
138 |
-
<li><b>Time lapse:</b> This cheat allows you to refill your lives faster by changing the time on your device. To use this cheat, you have to exit the game, go to your device settings, and move the time forward by a few hours. Then, go back to the game and see your lives refilled.</li>
|
139 |
-
<li><b>Free boosters:</b> This hack allows you to get free boosters by watching ads or completing surveys. To use this hack, you have to go to the shop menu in the game and look for an option that says "Watch video for free boosters" or "Complete survey for free boosters". Then, follow the instructions and get your free boosters.</li>
|
140 |
-
<li><b>Free gold bars:</b> This hack allows you to get free gold bars by inviting your friends to play the game or by using a referral code. To use this hack, you have to go to the settings menu in the game and look for an option that says "Invite friends" or "Enter referral code". Then, follow the instructions and get your free gold bars.</li>
|
141 |
-
</ul></li>
|
142 |
-
<li><b>Other similar games:</b> If you want to play other games that are similar to Farm Heroes Saga, you can try some of these games that are also available on Google Play Store or App Store: <ul>
|
143 |
-
<li><b>Candy Crush Saga:</b> This is another match-3 puzzle game developed by King, where you have to match candies to clear levels and save Candy Kingdom from the evil Tiffi and Mr. Toffee.</li>
|
144 |
-
<li><b>Gardenscapes:</b> This is a match-3 puzzle game developed by Playrix, where you have to match fruits and flowers to restore a beautiful garden and uncover its secrets.</li>
|
145 |
-
<li><b>FarmVille 2: Country Escape:</b> This is a farming simulation game developed by Zynga, where you have to build your own farm, grow crops, raise animals, and trade with other players.</li>
|
146 |
-
</ul></li>
|
147 |
-
</ul>
|
148 |
-
<h2>Conclusion</h2>
|
149 |
-
<p>In conclusion, Farm Heroes Saga mod APK Facebook connect is a way to play Farm Heroes Saga with unlimited lives and boosters, as well as connect to Facebook and play with your friends. However, it also has some risks and drawbacks, such as possible malware, account suspension, data loss, and ethical issues. Therefore, you should be careful when playing with a modded version of the game and consider some alternatives that are safer and more ethical, such as playing the official version of the game, using legitimate cheats and hacks, or playing other similar games. We hope that this article has helped you understand how to play Farm Heroes Saga mod APK Facebook connect and enjoy the game. If you have any questions or comments, please feel free to share them in the comments section below.</p>
|
150 |
-
<h2>FAQs</h2>
|
151 |
-
<p>Here are some frequently asked questions about Farm Heroes Saga mod APK Facebook connect:</p>
|
152 |
-
<ol>
|
153 |
-
<li><b>What is Farm Heroes Saga?</b><br>Farm Heroes Saga is a match-3 puzzle game developed by King, where you have to match cropsies (fruits and vegetables) to collect them and save the farm from the evil Rancid the Racoon.</li>
|
154 |
-
<li><b>What is a mod APK?</b><br>A mod APK is a modified version of an Android application that offers some advantages over the original one, such as unlimited resources, unlocked features, or removed ads.</li>
|
155 |
-
<li><b>How to play Farm Heroes Saga mod APK Facebook connect?</b><br>To play Farm Heroes Saga mod APK Facebook connect, you need to download and install the modded version of the game from a reliable source, such as [ModAPKStore]. Then, you need to connect to Facebook with either a fake account or a third-party app.</li>
|
156 |
-
<li><b>What are the benefits of playing Farm Heroes Saga mod APK Facebook connect?</b><br>Some of the benefits of playing Farm Heroes Saga mod APK Facebook connect are unlimited lives, boosters, gold bars, and access to all levels. You can also play with your friends on Facebook and sync your progress across different devices and platforms.</li>
|
157 |
-
<li><b>What are the risks and drawbacks of playing Farm Heroes Saga mod APK Facebook connect?</b><br>Some of the risks and drawbacks of playing Farm Heroes Saga mod APK Facebook connect are possible malware, account suspension, data loss, and ethical issues. You may also not be able to update the game when a new version is released.</li>
|
158 |
-
</ol></p> 401be4b1e0<br />
|
159 |
-
<br />
|
160 |
-
<br />
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/4Taps/SadTalker/src/face3d/models/arcface_torch/configs/__init__.py
DELETED
File without changes
|
spaces/4Taps/SadTalker/src/face3d/models/networks.py
DELETED
@@ -1,521 +0,0 @@
|
|
1 |
-
"""This script defines deep neural networks for Deep3DFaceRecon_pytorch
|
2 |
-
"""
|
3 |
-
|
4 |
-
import os
|
5 |
-
import numpy as np
|
6 |
-
import torch.nn.functional as F
|
7 |
-
from torch.nn import init
|
8 |
-
import functools
|
9 |
-
from torch.optim import lr_scheduler
|
10 |
-
import torch
|
11 |
-
from torch import Tensor
|
12 |
-
import torch.nn as nn
|
13 |
-
try:
|
14 |
-
from torch.hub import load_state_dict_from_url
|
15 |
-
except ImportError:
|
16 |
-
from torch.utils.model_zoo import load_url as load_state_dict_from_url
|
17 |
-
from typing import Type, Any, Callable, Union, List, Optional
|
18 |
-
from .arcface_torch.backbones import get_model
|
19 |
-
from kornia.geometry import warp_affine
|
20 |
-
|
21 |
-
def resize_n_crop(image, M, dsize=112):
|
22 |
-
# image: (b, c, h, w)
|
23 |
-
# M : (b, 2, 3)
|
24 |
-
return warp_affine(image, M, dsize=(dsize, dsize), align_corners=True)
|
25 |
-
|
26 |
-
def filter_state_dict(state_dict, remove_name='fc'):
|
27 |
-
new_state_dict = {}
|
28 |
-
for key in state_dict:
|
29 |
-
if remove_name in key:
|
30 |
-
continue
|
31 |
-
new_state_dict[key] = state_dict[key]
|
32 |
-
return new_state_dict
|
33 |
-
|
34 |
-
def get_scheduler(optimizer, opt):
|
35 |
-
"""Return a learning rate scheduler
|
36 |
-
|
37 |
-
Parameters:
|
38 |
-
optimizer -- the optimizer of the network
|
39 |
-
opt (option class) -- stores all the experiment flags; needs to be a subclass of BaseOptions.
|
40 |
-
opt.lr_policy is the name of learning rate policy: linear | step | plateau | cosine
|
41 |
-
|
42 |
-
For other schedulers (step, plateau, and cosine), we use the default PyTorch schedulers.
|
43 |
-
See https://pytorch.org/docs/stable/optim.html for more details.
|
44 |
-
"""
|
45 |
-
if opt.lr_policy == 'linear':
|
46 |
-
def lambda_rule(epoch):
|
47 |
-
lr_l = 1.0 - max(0, epoch + opt.epoch_count - opt.n_epochs) / float(opt.n_epochs + 1)
|
48 |
-
return lr_l
|
49 |
-
scheduler = lr_scheduler.LambdaLR(optimizer, lr_lambda=lambda_rule)
|
50 |
-
elif opt.lr_policy == 'step':
|
51 |
-
scheduler = lr_scheduler.StepLR(optimizer, step_size=opt.lr_decay_epochs, gamma=0.2)
|
52 |
-
elif opt.lr_policy == 'plateau':
|
53 |
-
scheduler = lr_scheduler.ReduceLROnPlateau(optimizer, mode='min', factor=0.2, threshold=0.01, patience=5)
|
54 |
-
elif opt.lr_policy == 'cosine':
|
55 |
-
scheduler = lr_scheduler.CosineAnnealingLR(optimizer, T_max=opt.n_epochs, eta_min=0)
|
56 |
-
else:
|
57 |
-
return NotImplementedError('learning rate policy [%s] is not implemented', opt.lr_policy)
|
58 |
-
return scheduler
|
59 |
-
|
60 |
-
|
61 |
-
def define_net_recon(net_recon, use_last_fc=False, init_path=None):
|
62 |
-
return ReconNetWrapper(net_recon, use_last_fc=use_last_fc, init_path=init_path)
|
63 |
-
|
64 |
-
def define_net_recog(net_recog, pretrained_path=None):
|
65 |
-
net = RecogNetWrapper(net_recog=net_recog, pretrained_path=pretrained_path)
|
66 |
-
net.eval()
|
67 |
-
return net
|
68 |
-
|
69 |
-
class ReconNetWrapper(nn.Module):
|
70 |
-
fc_dim=257
|
71 |
-
def __init__(self, net_recon, use_last_fc=False, init_path=None):
|
72 |
-
super(ReconNetWrapper, self).__init__()
|
73 |
-
self.use_last_fc = use_last_fc
|
74 |
-
if net_recon not in func_dict:
|
75 |
-
return NotImplementedError('network [%s] is not implemented', net_recon)
|
76 |
-
func, last_dim = func_dict[net_recon]
|
77 |
-
backbone = func(use_last_fc=use_last_fc, num_classes=self.fc_dim)
|
78 |
-
if init_path and os.path.isfile(init_path):
|
79 |
-
state_dict = filter_state_dict(torch.load(init_path, map_location='cpu'))
|
80 |
-
backbone.load_state_dict(state_dict)
|
81 |
-
print("loading init net_recon %s from %s" %(net_recon, init_path))
|
82 |
-
self.backbone = backbone
|
83 |
-
if not use_last_fc:
|
84 |
-
self.final_layers = nn.ModuleList([
|
85 |
-
conv1x1(last_dim, 80, bias=True), # id layer
|
86 |
-
conv1x1(last_dim, 64, bias=True), # exp layer
|
87 |
-
conv1x1(last_dim, 80, bias=True), # tex layer
|
88 |
-
conv1x1(last_dim, 3, bias=True), # angle layer
|
89 |
-
conv1x1(last_dim, 27, bias=True), # gamma layer
|
90 |
-
conv1x1(last_dim, 2, bias=True), # tx, ty
|
91 |
-
conv1x1(last_dim, 1, bias=True) # tz
|
92 |
-
])
|
93 |
-
for m in self.final_layers:
|
94 |
-
nn.init.constant_(m.weight, 0.)
|
95 |
-
nn.init.constant_(m.bias, 0.)
|
96 |
-
|
97 |
-
def forward(self, x):
|
98 |
-
x = self.backbone(x)
|
99 |
-
if not self.use_last_fc:
|
100 |
-
output = []
|
101 |
-
for layer in self.final_layers:
|
102 |
-
output.append(layer(x))
|
103 |
-
x = torch.flatten(torch.cat(output, dim=1), 1)
|
104 |
-
return x
|
105 |
-
|
106 |
-
|
107 |
-
class RecogNetWrapper(nn.Module):
|
108 |
-
def __init__(self, net_recog, pretrained_path=None, input_size=112):
|
109 |
-
super(RecogNetWrapper, self).__init__()
|
110 |
-
net = get_model(name=net_recog, fp16=False)
|
111 |
-
if pretrained_path:
|
112 |
-
state_dict = torch.load(pretrained_path, map_location='cpu')
|
113 |
-
net.load_state_dict(state_dict)
|
114 |
-
print("loading pretrained net_recog %s from %s" %(net_recog, pretrained_path))
|
115 |
-
for param in net.parameters():
|
116 |
-
param.requires_grad = False
|
117 |
-
self.net = net
|
118 |
-
self.preprocess = lambda x: 2 * x - 1
|
119 |
-
self.input_size=input_size
|
120 |
-
|
121 |
-
def forward(self, image, M):
|
122 |
-
image = self.preprocess(resize_n_crop(image, M, self.input_size))
|
123 |
-
id_feature = F.normalize(self.net(image), dim=-1, p=2)
|
124 |
-
return id_feature
|
125 |
-
|
126 |
-
|
127 |
-
# adapted from https://github.com/pytorch/vision/edit/master/torchvision/models/resnet.py
|
128 |
-
__all__ = ['ResNet', 'resnet18', 'resnet34', 'resnet50', 'resnet101',
|
129 |
-
'resnet152', 'resnext50_32x4d', 'resnext101_32x8d',
|
130 |
-
'wide_resnet50_2', 'wide_resnet101_2']
|
131 |
-
|
132 |
-
|
133 |
-
model_urls = {
|
134 |
-
'resnet18': 'https://download.pytorch.org/models/resnet18-f37072fd.pth',
|
135 |
-
'resnet34': 'https://download.pytorch.org/models/resnet34-b627a593.pth',
|
136 |
-
'resnet50': 'https://download.pytorch.org/models/resnet50-0676ba61.pth',
|
137 |
-
'resnet101': 'https://download.pytorch.org/models/resnet101-63fe2227.pth',
|
138 |
-
'resnet152': 'https://download.pytorch.org/models/resnet152-394f9c45.pth',
|
139 |
-
'resnext50_32x4d': 'https://download.pytorch.org/models/resnext50_32x4d-7cdf4587.pth',
|
140 |
-
'resnext101_32x8d': 'https://download.pytorch.org/models/resnext101_32x8d-8ba56ff5.pth',
|
141 |
-
'wide_resnet50_2': 'https://download.pytorch.org/models/wide_resnet50_2-95faca4d.pth',
|
142 |
-
'wide_resnet101_2': 'https://download.pytorch.org/models/wide_resnet101_2-32ee1156.pth',
|
143 |
-
}
|
144 |
-
|
145 |
-
|
146 |
-
def conv3x3(in_planes: int, out_planes: int, stride: int = 1, groups: int = 1, dilation: int = 1) -> nn.Conv2d:
|
147 |
-
"""3x3 convolution with padding"""
|
148 |
-
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
|
149 |
-
padding=dilation, groups=groups, bias=False, dilation=dilation)
|
150 |
-
|
151 |
-
|
152 |
-
def conv1x1(in_planes: int, out_planes: int, stride: int = 1, bias: bool = False) -> nn.Conv2d:
|
153 |
-
"""1x1 convolution"""
|
154 |
-
return nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=stride, bias=bias)
|
155 |
-
|
156 |
-
|
157 |
-
class BasicBlock(nn.Module):
|
158 |
-
expansion: int = 1
|
159 |
-
|
160 |
-
def __init__(
|
161 |
-
self,
|
162 |
-
inplanes: int,
|
163 |
-
planes: int,
|
164 |
-
stride: int = 1,
|
165 |
-
downsample: Optional[nn.Module] = None,
|
166 |
-
groups: int = 1,
|
167 |
-
base_width: int = 64,
|
168 |
-
dilation: int = 1,
|
169 |
-
norm_layer: Optional[Callable[..., nn.Module]] = None
|
170 |
-
) -> None:
|
171 |
-
super(BasicBlock, self).__init__()
|
172 |
-
if norm_layer is None:
|
173 |
-
norm_layer = nn.BatchNorm2d
|
174 |
-
if groups != 1 or base_width != 64:
|
175 |
-
raise ValueError('BasicBlock only supports groups=1 and base_width=64')
|
176 |
-
if dilation > 1:
|
177 |
-
raise NotImplementedError("Dilation > 1 not supported in BasicBlock")
|
178 |
-
# Both self.conv1 and self.downsample layers downsample the input when stride != 1
|
179 |
-
self.conv1 = conv3x3(inplanes, planes, stride)
|
180 |
-
self.bn1 = norm_layer(planes)
|
181 |
-
self.relu = nn.ReLU(inplace=True)
|
182 |
-
self.conv2 = conv3x3(planes, planes)
|
183 |
-
self.bn2 = norm_layer(planes)
|
184 |
-
self.downsample = downsample
|
185 |
-
self.stride = stride
|
186 |
-
|
187 |
-
def forward(self, x: Tensor) -> Tensor:
|
188 |
-
identity = x
|
189 |
-
|
190 |
-
out = self.conv1(x)
|
191 |
-
out = self.bn1(out)
|
192 |
-
out = self.relu(out)
|
193 |
-
|
194 |
-
out = self.conv2(out)
|
195 |
-
out = self.bn2(out)
|
196 |
-
|
197 |
-
if self.downsample is not None:
|
198 |
-
identity = self.downsample(x)
|
199 |
-
|
200 |
-
out += identity
|
201 |
-
out = self.relu(out)
|
202 |
-
|
203 |
-
return out
|
204 |
-
|
205 |
-
|
206 |
-
class Bottleneck(nn.Module):
|
207 |
-
# Bottleneck in torchvision places the stride for downsampling at 3x3 convolution(self.conv2)
|
208 |
-
# while original implementation places the stride at the first 1x1 convolution(self.conv1)
|
209 |
-
# according to "Deep residual learning for image recognition"https://arxiv.org/abs/1512.03385.
|
210 |
-
# This variant is also known as ResNet V1.5 and improves accuracy according to
|
211 |
-
# https://ngc.nvidia.com/catalog/model-scripts/nvidia:resnet_50_v1_5_for_pytorch.
|
212 |
-
|
213 |
-
expansion: int = 4
|
214 |
-
|
215 |
-
def __init__(
|
216 |
-
self,
|
217 |
-
inplanes: int,
|
218 |
-
planes: int,
|
219 |
-
stride: int = 1,
|
220 |
-
downsample: Optional[nn.Module] = None,
|
221 |
-
groups: int = 1,
|
222 |
-
base_width: int = 64,
|
223 |
-
dilation: int = 1,
|
224 |
-
norm_layer: Optional[Callable[..., nn.Module]] = None
|
225 |
-
) -> None:
|
226 |
-
super(Bottleneck, self).__init__()
|
227 |
-
if norm_layer is None:
|
228 |
-
norm_layer = nn.BatchNorm2d
|
229 |
-
width = int(planes * (base_width / 64.)) * groups
|
230 |
-
# Both self.conv2 and self.downsample layers downsample the input when stride != 1
|
231 |
-
self.conv1 = conv1x1(inplanes, width)
|
232 |
-
self.bn1 = norm_layer(width)
|
233 |
-
self.conv2 = conv3x3(width, width, stride, groups, dilation)
|
234 |
-
self.bn2 = norm_layer(width)
|
235 |
-
self.conv3 = conv1x1(width, planes * self.expansion)
|
236 |
-
self.bn3 = norm_layer(planes * self.expansion)
|
237 |
-
self.relu = nn.ReLU(inplace=True)
|
238 |
-
self.downsample = downsample
|
239 |
-
self.stride = stride
|
240 |
-
|
241 |
-
def forward(self, x: Tensor) -> Tensor:
|
242 |
-
identity = x
|
243 |
-
|
244 |
-
out = self.conv1(x)
|
245 |
-
out = self.bn1(out)
|
246 |
-
out = self.relu(out)
|
247 |
-
|
248 |
-
out = self.conv2(out)
|
249 |
-
out = self.bn2(out)
|
250 |
-
out = self.relu(out)
|
251 |
-
|
252 |
-
out = self.conv3(out)
|
253 |
-
out = self.bn3(out)
|
254 |
-
|
255 |
-
if self.downsample is not None:
|
256 |
-
identity = self.downsample(x)
|
257 |
-
|
258 |
-
out += identity
|
259 |
-
out = self.relu(out)
|
260 |
-
|
261 |
-
return out
|
262 |
-
|
263 |
-
|
264 |
-
class ResNet(nn.Module):
|
265 |
-
|
266 |
-
def __init__(
|
267 |
-
self,
|
268 |
-
block: Type[Union[BasicBlock, Bottleneck]],
|
269 |
-
layers: List[int],
|
270 |
-
num_classes: int = 1000,
|
271 |
-
zero_init_residual: bool = False,
|
272 |
-
use_last_fc: bool = False,
|
273 |
-
groups: int = 1,
|
274 |
-
width_per_group: int = 64,
|
275 |
-
replace_stride_with_dilation: Optional[List[bool]] = None,
|
276 |
-
norm_layer: Optional[Callable[..., nn.Module]] = None
|
277 |
-
) -> None:
|
278 |
-
super(ResNet, self).__init__()
|
279 |
-
if norm_layer is None:
|
280 |
-
norm_layer = nn.BatchNorm2d
|
281 |
-
self._norm_layer = norm_layer
|
282 |
-
|
283 |
-
self.inplanes = 64
|
284 |
-
self.dilation = 1
|
285 |
-
if replace_stride_with_dilation is None:
|
286 |
-
# each element in the tuple indicates if we should replace
|
287 |
-
# the 2x2 stride with a dilated convolution instead
|
288 |
-
replace_stride_with_dilation = [False, False, False]
|
289 |
-
if len(replace_stride_with_dilation) != 3:
|
290 |
-
raise ValueError("replace_stride_with_dilation should be None "
|
291 |
-
"or a 3-element tuple, got {}".format(replace_stride_with_dilation))
|
292 |
-
self.use_last_fc = use_last_fc
|
293 |
-
self.groups = groups
|
294 |
-
self.base_width = width_per_group
|
295 |
-
self.conv1 = nn.Conv2d(3, self.inplanes, kernel_size=7, stride=2, padding=3,
|
296 |
-
bias=False)
|
297 |
-
self.bn1 = norm_layer(self.inplanes)
|
298 |
-
self.relu = nn.ReLU(inplace=True)
|
299 |
-
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
|
300 |
-
self.layer1 = self._make_layer(block, 64, layers[0])
|
301 |
-
self.layer2 = self._make_layer(block, 128, layers[1], stride=2,
|
302 |
-
dilate=replace_stride_with_dilation[0])
|
303 |
-
self.layer3 = self._make_layer(block, 256, layers[2], stride=2,
|
304 |
-
dilate=replace_stride_with_dilation[1])
|
305 |
-
self.layer4 = self._make_layer(block, 512, layers[3], stride=2,
|
306 |
-
dilate=replace_stride_with_dilation[2])
|
307 |
-
self.avgpool = nn.AdaptiveAvgPool2d((1, 1))
|
308 |
-
|
309 |
-
if self.use_last_fc:
|
310 |
-
self.fc = nn.Linear(512 * block.expansion, num_classes)
|
311 |
-
|
312 |
-
for m in self.modules():
|
313 |
-
if isinstance(m, nn.Conv2d):
|
314 |
-
nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
|
315 |
-
elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):
|
316 |
-
nn.init.constant_(m.weight, 1)
|
317 |
-
nn.init.constant_(m.bias, 0)
|
318 |
-
|
319 |
-
|
320 |
-
|
321 |
-
# Zero-initialize the last BN in each residual branch,
|
322 |
-
# so that the residual branch starts with zeros, and each residual block behaves like an identity.
|
323 |
-
# This improves the model by 0.2~0.3% according to https://arxiv.org/abs/1706.02677
|
324 |
-
if zero_init_residual:
|
325 |
-
for m in self.modules():
|
326 |
-
if isinstance(m, Bottleneck):
|
327 |
-
nn.init.constant_(m.bn3.weight, 0) # type: ignore[arg-type]
|
328 |
-
elif isinstance(m, BasicBlock):
|
329 |
-
nn.init.constant_(m.bn2.weight, 0) # type: ignore[arg-type]
|
330 |
-
|
331 |
-
def _make_layer(self, block: Type[Union[BasicBlock, Bottleneck]], planes: int, blocks: int,
|
332 |
-
stride: int = 1, dilate: bool = False) -> nn.Sequential:
|
333 |
-
norm_layer = self._norm_layer
|
334 |
-
downsample = None
|
335 |
-
previous_dilation = self.dilation
|
336 |
-
if dilate:
|
337 |
-
self.dilation *= stride
|
338 |
-
stride = 1
|
339 |
-
if stride != 1 or self.inplanes != planes * block.expansion:
|
340 |
-
downsample = nn.Sequential(
|
341 |
-
conv1x1(self.inplanes, planes * block.expansion, stride),
|
342 |
-
norm_layer(planes * block.expansion),
|
343 |
-
)
|
344 |
-
|
345 |
-
layers = []
|
346 |
-
layers.append(block(self.inplanes, planes, stride, downsample, self.groups,
|
347 |
-
self.base_width, previous_dilation, norm_layer))
|
348 |
-
self.inplanes = planes * block.expansion
|
349 |
-
for _ in range(1, blocks):
|
350 |
-
layers.append(block(self.inplanes, planes, groups=self.groups,
|
351 |
-
base_width=self.base_width, dilation=self.dilation,
|
352 |
-
norm_layer=norm_layer))
|
353 |
-
|
354 |
-
return nn.Sequential(*layers)
|
355 |
-
|
356 |
-
def _forward_impl(self, x: Tensor) -> Tensor:
|
357 |
-
# See note [TorchScript super()]
|
358 |
-
x = self.conv1(x)
|
359 |
-
x = self.bn1(x)
|
360 |
-
x = self.relu(x)
|
361 |
-
x = self.maxpool(x)
|
362 |
-
|
363 |
-
x = self.layer1(x)
|
364 |
-
x = self.layer2(x)
|
365 |
-
x = self.layer3(x)
|
366 |
-
x = self.layer4(x)
|
367 |
-
|
368 |
-
x = self.avgpool(x)
|
369 |
-
if self.use_last_fc:
|
370 |
-
x = torch.flatten(x, 1)
|
371 |
-
x = self.fc(x)
|
372 |
-
return x
|
373 |
-
|
374 |
-
def forward(self, x: Tensor) -> Tensor:
|
375 |
-
return self._forward_impl(x)
|
376 |
-
|
377 |
-
|
378 |
-
def _resnet(
|
379 |
-
arch: str,
|
380 |
-
block: Type[Union[BasicBlock, Bottleneck]],
|
381 |
-
layers: List[int],
|
382 |
-
pretrained: bool,
|
383 |
-
progress: bool,
|
384 |
-
**kwargs: Any
|
385 |
-
) -> ResNet:
|
386 |
-
model = ResNet(block, layers, **kwargs)
|
387 |
-
if pretrained:
|
388 |
-
state_dict = load_state_dict_from_url(model_urls[arch],
|
389 |
-
progress=progress)
|
390 |
-
model.load_state_dict(state_dict)
|
391 |
-
return model
|
392 |
-
|
393 |
-
|
394 |
-
def resnet18(pretrained: bool = False, progress: bool = True, **kwargs: Any) -> ResNet:
|
395 |
-
r"""ResNet-18 model from
|
396 |
-
`"Deep Residual Learning for Image Recognition" <https://arxiv.org/pdf/1512.03385.pdf>`_.
|
397 |
-
|
398 |
-
Args:
|
399 |
-
pretrained (bool): If True, returns a model pre-trained on ImageNet
|
400 |
-
progress (bool): If True, displays a progress bar of the download to stderr
|
401 |
-
"""
|
402 |
-
return _resnet('resnet18', BasicBlock, [2, 2, 2, 2], pretrained, progress,
|
403 |
-
**kwargs)
|
404 |
-
|
405 |
-
|
406 |
-
def resnet34(pretrained: bool = False, progress: bool = True, **kwargs: Any) -> ResNet:
|
407 |
-
r"""ResNet-34 model from
|
408 |
-
`"Deep Residual Learning for Image Recognition" <https://arxiv.org/pdf/1512.03385.pdf>`_.
|
409 |
-
|
410 |
-
Args:
|
411 |
-
pretrained (bool): If True, returns a model pre-trained on ImageNet
|
412 |
-
progress (bool): If True, displays a progress bar of the download to stderr
|
413 |
-
"""
|
414 |
-
return _resnet('resnet34', BasicBlock, [3, 4, 6, 3], pretrained, progress,
|
415 |
-
**kwargs)
|
416 |
-
|
417 |
-
|
418 |
-
def resnet50(pretrained: bool = False, progress: bool = True, **kwargs: Any) -> ResNet:
|
419 |
-
r"""ResNet-50 model from
|
420 |
-
`"Deep Residual Learning for Image Recognition" <https://arxiv.org/pdf/1512.03385.pdf>`_.
|
421 |
-
|
422 |
-
Args:
|
423 |
-
pretrained (bool): If True, returns a model pre-trained on ImageNet
|
424 |
-
progress (bool): If True, displays a progress bar of the download to stderr
|
425 |
-
"""
|
426 |
-
return _resnet('resnet50', Bottleneck, [3, 4, 6, 3], pretrained, progress,
|
427 |
-
**kwargs)
|
428 |
-
|
429 |
-
|
430 |
-
def resnet101(pretrained: bool = False, progress: bool = True, **kwargs: Any) -> ResNet:
|
431 |
-
r"""ResNet-101 model from
|
432 |
-
`"Deep Residual Learning for Image Recognition" <https://arxiv.org/pdf/1512.03385.pdf>`_.
|
433 |
-
|
434 |
-
Args:
|
435 |
-
pretrained (bool): If True, returns a model pre-trained on ImageNet
|
436 |
-
progress (bool): If True, displays a progress bar of the download to stderr
|
437 |
-
"""
|
438 |
-
return _resnet('resnet101', Bottleneck, [3, 4, 23, 3], pretrained, progress,
|
439 |
-
**kwargs)
|
440 |
-
|
441 |
-
|
442 |
-
def resnet152(pretrained: bool = False, progress: bool = True, **kwargs: Any) -> ResNet:
|
443 |
-
r"""ResNet-152 model from
|
444 |
-
`"Deep Residual Learning for Image Recognition" <https://arxiv.org/pdf/1512.03385.pdf>`_.
|
445 |
-
|
446 |
-
Args:
|
447 |
-
pretrained (bool): If True, returns a model pre-trained on ImageNet
|
448 |
-
progress (bool): If True, displays a progress bar of the download to stderr
|
449 |
-
"""
|
450 |
-
return _resnet('resnet152', Bottleneck, [3, 8, 36, 3], pretrained, progress,
|
451 |
-
**kwargs)
|
452 |
-
|
453 |
-
|
454 |
-
def resnext50_32x4d(pretrained: bool = False, progress: bool = True, **kwargs: Any) -> ResNet:
|
455 |
-
r"""ResNeXt-50 32x4d model from
|
456 |
-
`"Aggregated Residual Transformation for Deep Neural Networks" <https://arxiv.org/pdf/1611.05431.pdf>`_.
|
457 |
-
|
458 |
-
Args:
|
459 |
-
pretrained (bool): If True, returns a model pre-trained on ImageNet
|
460 |
-
progress (bool): If True, displays a progress bar of the download to stderr
|
461 |
-
"""
|
462 |
-
kwargs['groups'] = 32
|
463 |
-
kwargs['width_per_group'] = 4
|
464 |
-
return _resnet('resnext50_32x4d', Bottleneck, [3, 4, 6, 3],
|
465 |
-
pretrained, progress, **kwargs)
|
466 |
-
|
467 |
-
|
468 |
-
def resnext101_32x8d(pretrained: bool = False, progress: bool = True, **kwargs: Any) -> ResNet:
|
469 |
-
r"""ResNeXt-101 32x8d model from
|
470 |
-
`"Aggregated Residual Transformation for Deep Neural Networks" <https://arxiv.org/pdf/1611.05431.pdf>`_.
|
471 |
-
|
472 |
-
Args:
|
473 |
-
pretrained (bool): If True, returns a model pre-trained on ImageNet
|
474 |
-
progress (bool): If True, displays a progress bar of the download to stderr
|
475 |
-
"""
|
476 |
-
kwargs['groups'] = 32
|
477 |
-
kwargs['width_per_group'] = 8
|
478 |
-
return _resnet('resnext101_32x8d', Bottleneck, [3, 4, 23, 3],
|
479 |
-
pretrained, progress, **kwargs)
|
480 |
-
|
481 |
-
|
482 |
-
def wide_resnet50_2(pretrained: bool = False, progress: bool = True, **kwargs: Any) -> ResNet:
|
483 |
-
r"""Wide ResNet-50-2 model from
|
484 |
-
`"Wide Residual Networks" <https://arxiv.org/pdf/1605.07146.pdf>`_.
|
485 |
-
|
486 |
-
The model is the same as ResNet except for the bottleneck number of channels
|
487 |
-
which is twice larger in every block. The number of channels in outer 1x1
|
488 |
-
convolutions is the same, e.g. last block in ResNet-50 has 2048-512-2048
|
489 |
-
channels, and in Wide ResNet-50-2 has 2048-1024-2048.
|
490 |
-
|
491 |
-
Args:
|
492 |
-
pretrained (bool): If True, returns a model pre-trained on ImageNet
|
493 |
-
progress (bool): If True, displays a progress bar of the download to stderr
|
494 |
-
"""
|
495 |
-
kwargs['width_per_group'] = 64 * 2
|
496 |
-
return _resnet('wide_resnet50_2', Bottleneck, [3, 4, 6, 3],
|
497 |
-
pretrained, progress, **kwargs)
|
498 |
-
|
499 |
-
|
500 |
-
def wide_resnet101_2(pretrained: bool = False, progress: bool = True, **kwargs: Any) -> ResNet:
|
501 |
-
r"""Wide ResNet-101-2 model from
|
502 |
-
`"Wide Residual Networks" <https://arxiv.org/pdf/1605.07146.pdf>`_.
|
503 |
-
|
504 |
-
The model is the same as ResNet except for the bottleneck number of channels
|
505 |
-
which is twice larger in every block. The number of channels in outer 1x1
|
506 |
-
convolutions is the same, e.g. last block in ResNet-50 has 2048-512-2048
|
507 |
-
channels, and in Wide ResNet-50-2 has 2048-1024-2048.
|
508 |
-
|
509 |
-
Args:
|
510 |
-
pretrained (bool): If True, returns a model pre-trained on ImageNet
|
511 |
-
progress (bool): If True, displays a progress bar of the download to stderr
|
512 |
-
"""
|
513 |
-
kwargs['width_per_group'] = 64 * 2
|
514 |
-
return _resnet('wide_resnet101_2', Bottleneck, [3, 4, 23, 3],
|
515 |
-
pretrained, progress, **kwargs)
|
516 |
-
|
517 |
-
|
518 |
-
func_dict = {
|
519 |
-
'resnet18': (resnet18, 512),
|
520 |
-
'resnet50': (resnet50, 2048)
|
521 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/AIFILMS/generate_human_motion/pyrender/docs/make.bat
DELETED
@@ -1,35 +0,0 @@
|
|
1 |
-
@ECHO OFF
|
2 |
-
|
3 |
-
pushd %~dp0
|
4 |
-
|
5 |
-
REM Command file for Sphinx documentation
|
6 |
-
|
7 |
-
if "%SPHINXBUILD%" == "" (
|
8 |
-
set SPHINXBUILD=sphinx-build
|
9 |
-
)
|
10 |
-
set SOURCEDIR=source
|
11 |
-
set BUILDDIR=build
|
12 |
-
|
13 |
-
if "%1" == "" goto help
|
14 |
-
|
15 |
-
%SPHINXBUILD% >NUL 2>NUL
|
16 |
-
if errorlevel 9009 (
|
17 |
-
echo.
|
18 |
-
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
|
19 |
-
echo.installed, then set the SPHINXBUILD environment variable to point
|
20 |
-
echo.to the full path of the 'sphinx-build' executable. Alternatively you
|
21 |
-
echo.may add the Sphinx directory to PATH.
|
22 |
-
echo.
|
23 |
-
echo.If you don't have Sphinx installed, grab it from
|
24 |
-
echo.http://sphinx-doc.org/
|
25 |
-
exit /b 1
|
26 |
-
)
|
27 |
-
|
28 |
-
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
|
29 |
-
goto end
|
30 |
-
|
31 |
-
:help
|
32 |
-
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS%
|
33 |
-
|
34 |
-
:end
|
35 |
-
popd
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/AIGText/GlyphControl/scripts/rendertext_tool.py
DELETED
@@ -1,206 +0,0 @@
|
|
1 |
-
from cldm.ddim_hacked import DDIMSampler
|
2 |
-
import torch
|
3 |
-
from annotator.render_images import render_text_image_custom
|
4 |
-
from pytorch_lightning import seed_everything
|
5 |
-
# save_memory = False
|
6 |
-
# from cldm.hack import disable_verbosity
|
7 |
-
# disable_verbosity()
|
8 |
-
import random
|
9 |
-
import einops
|
10 |
-
import numpy as np
|
11 |
-
from ldm.util import instantiate_from_config
|
12 |
-
from cldm.model import load_state_dict
|
13 |
-
from torchvision.transforms import ToTensor
|
14 |
-
from contextlib import nullcontext
|
15 |
-
|
16 |
-
def load_model_from_config(cfg, ckpt, verbose=False, not_use_ckpt=False):
|
17 |
-
|
18 |
-
# if "model_ema.input_blocks10in_layers0weight" not in sd:
|
19 |
-
# print("missing model_ema.input_blocks10in_layers0weight. set use_ema as False")
|
20 |
-
# cfg.model.params.use_ema = False
|
21 |
-
model = instantiate_from_config(cfg.model)
|
22 |
-
|
23 |
-
if ckpt.endswith("model_states.pt"):
|
24 |
-
sd = torch.load(ckpt, map_location='cpu')["module"]
|
25 |
-
else:
|
26 |
-
sd = load_state_dict(ckpt, location='cpu')
|
27 |
-
|
28 |
-
keys_ = list(sd.keys())[:]
|
29 |
-
for k in keys_:
|
30 |
-
if k.startswith("module."):
|
31 |
-
nk = k[7:]
|
32 |
-
sd[nk] = sd[k]
|
33 |
-
del sd[k]
|
34 |
-
|
35 |
-
if not not_use_ckpt:
|
36 |
-
m, u = model.load_state_dict(sd, strict=False)
|
37 |
-
if len(m) > 0 and verbose:
|
38 |
-
print("missing keys: {}".format(len(m)))
|
39 |
-
print(m)
|
40 |
-
if len(u) > 0 and verbose:
|
41 |
-
print("unexpected keys: {}".format(len(u)))
|
42 |
-
print(u)
|
43 |
-
|
44 |
-
if torch.cuda.is_available():
|
45 |
-
model.cuda()
|
46 |
-
model.eval()
|
47 |
-
return model
|
48 |
-
|
49 |
-
def load_model_ckpt(model, ckpt, verbose=True):
|
50 |
-
map_location = "cpu" if not torch.cuda.is_available() else "cuda"
|
51 |
-
print("checkpoint map location:", map_location)
|
52 |
-
if ckpt.endswith("model_states.pt"):
|
53 |
-
sd = torch.load(ckpt, map_location=map_location)["module"]
|
54 |
-
else:
|
55 |
-
sd = load_state_dict(ckpt, location=map_location)
|
56 |
-
|
57 |
-
keys_ = list(sd.keys())[:]
|
58 |
-
for k in keys_:
|
59 |
-
if k.startswith("module."):
|
60 |
-
nk = k[7:]
|
61 |
-
sd[nk] = sd[k]
|
62 |
-
del sd[k]
|
63 |
-
|
64 |
-
m, u = model.load_state_dict(sd, strict=False)
|
65 |
-
if len(m) > 0 and verbose:
|
66 |
-
print("missing keys: {}".format(len(m)))
|
67 |
-
print(m)
|
68 |
-
if len(u) > 0 and verbose:
|
69 |
-
print("unexpected keys: {}".format(len(u)))
|
70 |
-
print(u)
|
71 |
-
model.eval()
|
72 |
-
return model
|
73 |
-
|
74 |
-
class Render_Text:
|
75 |
-
def __init__(self,
|
76 |
-
model,
|
77 |
-
precision_scope=nullcontext,
|
78 |
-
transform=ToTensor(),
|
79 |
-
save_memory = False,
|
80 |
-
):
|
81 |
-
self.model = model
|
82 |
-
self.precision_scope = precision_scope
|
83 |
-
self.transform = transform
|
84 |
-
self.ddim_sampler = DDIMSampler(model)
|
85 |
-
self.save_memory = save_memory
|
86 |
-
|
87 |
-
# process multiple groups of rendered text for building demo
|
88 |
-
def process_multi(self,
|
89 |
-
rendered_txt_values, shared_prompt,
|
90 |
-
width_values, ratio_values,
|
91 |
-
top_left_x_values, top_left_y_values,
|
92 |
-
yaw_values, num_rows_values,
|
93 |
-
shared_num_samples, shared_image_resolution,
|
94 |
-
shared_ddim_steps, shared_guess_mode,
|
95 |
-
shared_strength, shared_scale, shared_seed,
|
96 |
-
shared_eta, shared_a_prompt, shared_n_prompt,
|
97 |
-
only_show_rendered_image=False
|
98 |
-
):
|
99 |
-
if shared_seed == -1:
|
100 |
-
shared_seed = random.randint(0, 65535)
|
101 |
-
seed_everything(shared_seed)
|
102 |
-
with torch.no_grad(), \
|
103 |
-
self.precision_scope("cuda"), \
|
104 |
-
self.model.ema_scope("Sampling on Benchmark Prompts"):
|
105 |
-
print("rendered txt:", str(rendered_txt_values), "[t]")
|
106 |
-
render_none = len([1 for rendered_txt in rendered_txt_values if rendered_txt != ""]) == 0
|
107 |
-
if render_none:
|
108 |
-
# if rendered_txt_values == "":
|
109 |
-
control = None
|
110 |
-
if only_show_rendered_image:
|
111 |
-
return [None]
|
112 |
-
else:
|
113 |
-
def format_bboxes(width_values, ratio_values, top_left_x_values, top_left_y_values, yaw_values):
|
114 |
-
bboxes = []
|
115 |
-
for width, ratio, top_left_x, top_left_y, yaw in zip(width_values, ratio_values, top_left_x_values, top_left_y_values, yaw_values):
|
116 |
-
bbox = {
|
117 |
-
"width": width,
|
118 |
-
"ratio": ratio,
|
119 |
-
# "height": height,
|
120 |
-
"top_left_x": top_left_x,
|
121 |
-
"top_left_y": top_left_y,
|
122 |
-
"yaw": yaw
|
123 |
-
}
|
124 |
-
bboxes.append(bbox)
|
125 |
-
return bboxes
|
126 |
-
|
127 |
-
whiteboard_img = render_text_image_custom(
|
128 |
-
(shared_image_resolution, shared_image_resolution),
|
129 |
-
format_bboxes(width_values, ratio_values, top_left_x_values, top_left_y_values, yaw_values),
|
130 |
-
rendered_txt_values,
|
131 |
-
num_rows_values
|
132 |
-
)
|
133 |
-
whiteboard_img = whiteboard_img.convert("RGB")
|
134 |
-
|
135 |
-
if only_show_rendered_image:
|
136 |
-
return [whiteboard_img]
|
137 |
-
|
138 |
-
control = self.transform(whiteboard_img.copy())
|
139 |
-
if torch.cuda.is_available():
|
140 |
-
control = control.cuda()
|
141 |
-
control = torch.stack([control for _ in range(shared_num_samples)], dim=0)
|
142 |
-
control = control.clone()
|
143 |
-
control = [control]
|
144 |
-
|
145 |
-
H, W = shared_image_resolution, shared_image_resolution
|
146 |
-
|
147 |
-
# if shared_seed == -1:
|
148 |
-
# shared_seed = random.randint(0, 65535)
|
149 |
-
# seed_everything(shared_seed)
|
150 |
-
|
151 |
-
if torch.cuda.is_available() and self.save_memory:
|
152 |
-
print("low_vram_shift: is_diffusing", False)
|
153 |
-
self.model.low_vram_shift(is_diffusing=False)
|
154 |
-
|
155 |
-
print("control is None: {}".format(control is None))
|
156 |
-
if shared_prompt.endswith("."):
|
157 |
-
if shared_a_prompt == "":
|
158 |
-
c_prompt = shared_prompt
|
159 |
-
else:
|
160 |
-
c_prompt = shared_prompt + " " + shared_a_prompt
|
161 |
-
elif shared_prompt.endswith(","):
|
162 |
-
if shared_a_prompt == "":
|
163 |
-
c_prompt = shared_prompt[:-1] + "."
|
164 |
-
else:
|
165 |
-
c_prompt = shared_prompt + " " + shared_a_prompt
|
166 |
-
else:
|
167 |
-
if shared_a_prompt == "":
|
168 |
-
c_prompt = shared_prompt + "."
|
169 |
-
else:
|
170 |
-
c_prompt = shared_prompt + ", " + shared_a_prompt
|
171 |
-
|
172 |
-
# cond_c_cross = self.model.get_learned_conditioning([shared_prompt + ', ' + shared_a_prompt] * shared_num_samples)
|
173 |
-
cond_c_cross = self.model.get_learned_conditioning([c_prompt] * shared_num_samples)
|
174 |
-
print("prompt:", c_prompt)
|
175 |
-
un_cond_cross = self.model.get_learned_conditioning([shared_n_prompt] * shared_num_samples)
|
176 |
-
|
177 |
-
if torch.cuda.is_available() and self.save_memory:
|
178 |
-
print("low_vram_shift: is_diffusing", True)
|
179 |
-
self.model.low_vram_shift(is_diffusing=True)
|
180 |
-
|
181 |
-
cond = {"c_concat": control, "c_crossattn": [cond_c_cross] if not isinstance(cond_c_cross, list) else cond_c_cross}
|
182 |
-
un_cond = {"c_concat": None if shared_guess_mode else control, "c_crossattn": [un_cond_cross] if not isinstance(un_cond_cross, list) else un_cond_cross}
|
183 |
-
shape = (4, H // 8, W // 8)
|
184 |
-
|
185 |
-
if not self.model.learnable_conscale:
|
186 |
-
self.model.control_scales = [shared_strength * (0.825 ** float(12 - i)) for i in range(13)] if shared_guess_mode else ([shared_strength] * 13) # Magic number. IDK why. Perhaps because 0.825**12<0.01 but 0.826**12>0.01
|
187 |
-
else:
|
188 |
-
print("learned control scale: {}".format(str(self.model.control_scales)))
|
189 |
-
samples, intermediates = self.ddim_sampler.sample(shared_ddim_steps, shared_num_samples,
|
190 |
-
shape, cond, verbose=False, eta=shared_eta,
|
191 |
-
unconditional_guidance_scale=shared_scale,
|
192 |
-
unconditional_conditioning=un_cond)
|
193 |
-
if torch.cuda.is_available() and self.save_memory:
|
194 |
-
print("low_vram_shift: is_diffusing", False)
|
195 |
-
self.model.low_vram_shift(is_diffusing=False)
|
196 |
-
|
197 |
-
x_samples = self.model.decode_first_stage(samples)
|
198 |
-
x_samples = (einops.rearrange(x_samples, 'b c h w -> b h w c') * 127.5 + 127.5).cpu().numpy().clip(0, 255).astype(np.uint8)
|
199 |
-
|
200 |
-
results = [x_samples[i] for i in range(shared_num_samples)]
|
201 |
-
# if rendered_txt_values != "":
|
202 |
-
if not render_none:
|
203 |
-
return [whiteboard_img] + results
|
204 |
-
else:
|
205 |
-
return results
|
206 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Abhilashvj/planogram-compliance/utils/plots.py
DELETED
@@ -1,781 +0,0 @@
|
|
1 |
-
# YOLOv5 🚀 by Ultralytics, GPL-3.0 license
|
2 |
-
"""
|
3 |
-
Plotting utils
|
4 |
-
"""
|
5 |
-
|
6 |
-
import contextlib
|
7 |
-
import math
|
8 |
-
import os
|
9 |
-
from copy import copy
|
10 |
-
from pathlib import Path
|
11 |
-
from urllib.error import URLError
|
12 |
-
|
13 |
-
import cv2
|
14 |
-
import matplotlib
|
15 |
-
import matplotlib.pyplot as plt
|
16 |
-
import numpy as np
|
17 |
-
import pandas as pd
|
18 |
-
import seaborn as sn
|
19 |
-
import torch
|
20 |
-
from PIL import Image, ImageDraw, ImageFont
|
21 |
-
|
22 |
-
from utils import TryExcept, threaded
|
23 |
-
from utils.general import (
|
24 |
-
CONFIG_DIR,
|
25 |
-
FONT,
|
26 |
-
LOGGER,
|
27 |
-
check_font,
|
28 |
-
check_requirements,
|
29 |
-
clip_boxes,
|
30 |
-
increment_path,
|
31 |
-
is_ascii,
|
32 |
-
xywh2xyxy,
|
33 |
-
xyxy2xywh,
|
34 |
-
)
|
35 |
-
from utils.metrics import fitness
|
36 |
-
from utils.segment.general import scale_image
|
37 |
-
|
38 |
-
# Settings
|
39 |
-
RANK = int(os.getenv("RANK", -1))
|
40 |
-
matplotlib.rc("font", **{"size": 11})
|
41 |
-
matplotlib.use("Agg") # for writing to files only
|
42 |
-
|
43 |
-
|
44 |
-
class Colors:
|
45 |
-
# Ultralytics color palette https://ultralytics.com/
|
46 |
-
def __init__(self):
|
47 |
-
# hex = matplotlib.colors.TABLEAU_COLORS.values()
|
48 |
-
hexs = (
|
49 |
-
"FF3838",
|
50 |
-
"FF9D97",
|
51 |
-
"FF701F",
|
52 |
-
"FFB21D",
|
53 |
-
"CFD231",
|
54 |
-
"48F90A",
|
55 |
-
"92CC17",
|
56 |
-
"3DDB86",
|
57 |
-
"1A9334",
|
58 |
-
"00D4BB",
|
59 |
-
"2C99A8",
|
60 |
-
"00C2FF",
|
61 |
-
"344593",
|
62 |
-
"6473FF",
|
63 |
-
"0018EC",
|
64 |
-
"8438FF",
|
65 |
-
"520085",
|
66 |
-
"CB38FF",
|
67 |
-
"FF95C8",
|
68 |
-
"FF37C7",
|
69 |
-
)
|
70 |
-
self.palette = [self.hex2rgb(f"#{c}") for c in hexs]
|
71 |
-
self.n = len(self.palette)
|
72 |
-
|
73 |
-
def __call__(self, i, bgr=False):
|
74 |
-
c = self.palette[int(i) % self.n]
|
75 |
-
return (c[2], c[1], c[0]) if bgr else c
|
76 |
-
|
77 |
-
@staticmethod
|
78 |
-
def hex2rgb(h): # rgb order (PIL)
|
79 |
-
return tuple(int(h[1 + i : 1 + i + 2], 16) for i in (0, 2, 4))
|
80 |
-
|
81 |
-
|
82 |
-
colors = Colors() # create instance for 'from utils.plots import colors'
|
83 |
-
|
84 |
-
|
85 |
-
def check_pil_font(font=FONT, size=10):
|
86 |
-
# Return a PIL TrueType Font, downloading to CONFIG_DIR if necessary
|
87 |
-
font = Path(font)
|
88 |
-
font = font if font.exists() else (CONFIG_DIR / font.name)
|
89 |
-
try:
|
90 |
-
return ImageFont.truetype(
|
91 |
-
str(font) if font.exists() else font.name, size
|
92 |
-
)
|
93 |
-
except Exception: # download if missing
|
94 |
-
try:
|
95 |
-
check_font(font)
|
96 |
-
return ImageFont.truetype(str(font), size)
|
97 |
-
except TypeError:
|
98 |
-
check_requirements(
|
99 |
-
"Pillow>=8.4.0"
|
100 |
-
) # known issue https://github.com/ultralytics/yolov5/issues/5374
|
101 |
-
except URLError: # not online
|
102 |
-
return ImageFont.load_default()
|
103 |
-
|
104 |
-
|
105 |
-
class Annotator:
|
106 |
-
# YOLOv5 Annotator for train/val mosaics and jpgs and detect/hub inference annotations
|
107 |
-
def __init__(
|
108 |
-
self,
|
109 |
-
im,
|
110 |
-
line_width=None,
|
111 |
-
font_size=None,
|
112 |
-
font="Arial.ttf",
|
113 |
-
pil=False,
|
114 |
-
example="abc",
|
115 |
-
):
|
116 |
-
assert (
|
117 |
-
im.data.contiguous
|
118 |
-
), "Image not contiguous. Apply np.ascontiguousarray(im) to Annotator() input images."
|
119 |
-
non_ascii = not is_ascii(
|
120 |
-
example
|
121 |
-
) # non-latin labels, i.e. asian, arabic, cyrillic
|
122 |
-
self.pil = pil or non_ascii
|
123 |
-
if self.pil: # use PIL
|
124 |
-
self.im = (
|
125 |
-
im if isinstance(im, Image.Image) else Image.fromarray(im)
|
126 |
-
)
|
127 |
-
self.draw = ImageDraw.Draw(self.im)
|
128 |
-
self.font = check_pil_font(
|
129 |
-
font="Arial.Unicode.ttf" if non_ascii else font,
|
130 |
-
size=font_size
|
131 |
-
or max(round(sum(self.im.size) / 2 * 0.035), 12),
|
132 |
-
)
|
133 |
-
else: # use cv2
|
134 |
-
self.im = im
|
135 |
-
self.lw = line_width or max(
|
136 |
-
round(sum(im.shape) / 2 * 0.003), 2
|
137 |
-
) # line width
|
138 |
-
|
139 |
-
def box_label(
|
140 |
-
self, box, label="", color=(128, 128, 128), txt_color=(255, 255, 255)
|
141 |
-
):
|
142 |
-
# Add one xyxy box to image with label
|
143 |
-
if self.pil or not is_ascii(label):
|
144 |
-
self.draw.rectangle(box, width=self.lw, outline=color) # box
|
145 |
-
if label:
|
146 |
-
w, h = self.font.getsize(label) # text width, height
|
147 |
-
outside = box[1] - h >= 0 # label fits outside box
|
148 |
-
self.draw.rectangle(
|
149 |
-
(
|
150 |
-
box[0],
|
151 |
-
box[1] - h if outside else box[1],
|
152 |
-
box[0] + w + 1,
|
153 |
-
box[1] + 1 if outside else box[1] + h + 1,
|
154 |
-
),
|
155 |
-
fill=color,
|
156 |
-
)
|
157 |
-
# self.draw.text((box[0], box[1]), label, fill=txt_color, font=self.font, anchor='ls') # for PIL>8.0
|
158 |
-
self.draw.text(
|
159 |
-
(box[0], box[1] - h if outside else box[1]),
|
160 |
-
label,
|
161 |
-
fill=txt_color,
|
162 |
-
font=self.font,
|
163 |
-
)
|
164 |
-
else: # cv2
|
165 |
-
p1, p2 = (int(box[0]), int(box[1])), (int(box[2]), int(box[3]))
|
166 |
-
cv2.rectangle(
|
167 |
-
self.im, p1, p2, color, thickness=self.lw, lineType=cv2.LINE_AA
|
168 |
-
)
|
169 |
-
if label:
|
170 |
-
tf = max(self.lw - 1, 1) # font thickness
|
171 |
-
w, h = cv2.getTextSize(
|
172 |
-
label, 0, fontScale=self.lw / 3, thickness=tf
|
173 |
-
)[
|
174 |
-
0
|
175 |
-
] # text width, height
|
176 |
-
outside = p1[1] - h >= 3
|
177 |
-
p2 = p1[0] + w, p1[1] - h - 3 if outside else p1[1] + h + 3
|
178 |
-
cv2.rectangle(
|
179 |
-
self.im, p1, p2, color, -1, cv2.LINE_AA
|
180 |
-
) # filled
|
181 |
-
cv2.putText(
|
182 |
-
self.im,
|
183 |
-
label,
|
184 |
-
(p1[0], p1[1] - 2 if outside else p1[1] + h + 2),
|
185 |
-
0,
|
186 |
-
self.lw / 3,
|
187 |
-
txt_color,
|
188 |
-
thickness=tf,
|
189 |
-
lineType=cv2.LINE_AA,
|
190 |
-
)
|
191 |
-
|
192 |
-
def masks(self, masks, colors, im_gpu, alpha=0.5, retina_masks=False):
|
193 |
-
"""Plot masks at once.
|
194 |
-
Args:
|
195 |
-
masks (tensor): predicted masks on cuda, shape: [n, h, w]
|
196 |
-
colors (List[List[Int]]): colors for predicted masks, [[r, g, b] * n]
|
197 |
-
im_gpu (tensor): img is in cuda, shape: [3, h, w], range: [0, 1]
|
198 |
-
alpha (float): mask transparency: 0.0 fully transparent, 1.0 opaque
|
199 |
-
"""
|
200 |
-
if self.pil:
|
201 |
-
# convert to numpy first
|
202 |
-
self.im = np.asarray(self.im).copy()
|
203 |
-
if len(masks) == 0:
|
204 |
-
self.im[:] = (
|
205 |
-
im_gpu.permute(1, 2, 0).contiguous().cpu().numpy() * 255
|
206 |
-
)
|
207 |
-
colors = (
|
208 |
-
torch.tensor(colors, device=im_gpu.device, dtype=torch.float32)
|
209 |
-
/ 255.0
|
210 |
-
)
|
211 |
-
colors = colors[:, None, None] # shape(n,1,1,3)
|
212 |
-
masks = masks.unsqueeze(3) # shape(n,h,w,1)
|
213 |
-
masks_color = masks * (colors * alpha) # shape(n,h,w,3)
|
214 |
-
|
215 |
-
inv_alph_masks = (1 - masks * alpha).cumprod(0) # shape(n,h,w,1)
|
216 |
-
mcs = (masks_color * inv_alph_masks).sum(
|
217 |
-
0
|
218 |
-
) * 2 # mask color summand shape(n,h,w,3)
|
219 |
-
|
220 |
-
im_gpu = im_gpu.flip(dims=[0]) # flip channel
|
221 |
-
im_gpu = im_gpu.permute(1, 2, 0).contiguous() # shape(h,w,3)
|
222 |
-
im_gpu = im_gpu * inv_alph_masks[-1] + mcs
|
223 |
-
im_mask = (im_gpu * 255).byte().cpu().numpy()
|
224 |
-
self.im[:] = (
|
225 |
-
im_mask
|
226 |
-
if retina_masks
|
227 |
-
else scale_image(im_gpu.shape, im_mask, self.im.shape)
|
228 |
-
)
|
229 |
-
if self.pil:
|
230 |
-
# convert im back to PIL and update draw
|
231 |
-
self.fromarray(self.im)
|
232 |
-
|
233 |
-
def rectangle(self, xy, fill=None, outline=None, width=1):
|
234 |
-
# Add rectangle to image (PIL-only)
|
235 |
-
self.draw.rectangle(xy, fill, outline, width)
|
236 |
-
|
237 |
-
def text(self, xy, text, txt_color=(255, 255, 255), anchor="top"):
|
238 |
-
# Add text to image (PIL-only)
|
239 |
-
if anchor == "bottom": # start y from font bottom
|
240 |
-
w, h = self.font.getsize(text) # text width, height
|
241 |
-
xy[1] += 1 - h
|
242 |
-
self.draw.text(xy, text, fill=txt_color, font=self.font)
|
243 |
-
|
244 |
-
def fromarray(self, im):
|
245 |
-
# Update self.im from a numpy array
|
246 |
-
self.im = im if isinstance(im, Image.Image) else Image.fromarray(im)
|
247 |
-
self.draw = ImageDraw.Draw(self.im)
|
248 |
-
|
249 |
-
def result(self):
|
250 |
-
# Return annotated image as array
|
251 |
-
return np.asarray(self.im)
|
252 |
-
|
253 |
-
|
254 |
-
def feature_visualization(
|
255 |
-
x, module_type, stage, n=32, save_dir=Path("runs/detect/exp")
|
256 |
-
):
|
257 |
-
"""
|
258 |
-
x: Features to be visualized
|
259 |
-
module_type: Module type
|
260 |
-
stage: Module stage within model
|
261 |
-
n: Maximum number of feature maps to plot
|
262 |
-
save_dir: Directory to save results
|
263 |
-
"""
|
264 |
-
if "Detect" not in module_type:
|
265 |
-
(
|
266 |
-
batch,
|
267 |
-
channels,
|
268 |
-
height,
|
269 |
-
width,
|
270 |
-
) = x.shape # batch, channels, height, width
|
271 |
-
if height > 1 and width > 1:
|
272 |
-
f = (
|
273 |
-
save_dir
|
274 |
-
/ f"stage{stage}_{module_type.split('.')[-1]}_features.png"
|
275 |
-
) # filename
|
276 |
-
|
277 |
-
blocks = torch.chunk(
|
278 |
-
x[0].cpu(), channels, dim=0
|
279 |
-
) # select batch index 0, block by channels
|
280 |
-
n = min(n, channels) # number of plots
|
281 |
-
fig, ax = plt.subplots(
|
282 |
-
math.ceil(n / 8), 8, tight_layout=True
|
283 |
-
) # 8 rows x n/8 cols
|
284 |
-
ax = ax.ravel()
|
285 |
-
plt.subplots_adjust(wspace=0.05, hspace=0.05)
|
286 |
-
for i in range(n):
|
287 |
-
ax[i].imshow(blocks[i].squeeze()) # cmap='gray'
|
288 |
-
ax[i].axis("off")
|
289 |
-
|
290 |
-
LOGGER.info(f"Saving {f}... ({n}/{channels})")
|
291 |
-
plt.savefig(f, dpi=300, bbox_inches="tight")
|
292 |
-
plt.close()
|
293 |
-
np.save(str(f.with_suffix(".npy")), x[0].cpu().numpy()) # npy save
|
294 |
-
|
295 |
-
|
296 |
-
def hist2d(x, y, n=100):
|
297 |
-
# 2d histogram used in labels.png and evolve.png
|
298 |
-
xedges, yedges = np.linspace(x.min(), x.max(), n), np.linspace(
|
299 |
-
y.min(), y.max(), n
|
300 |
-
)
|
301 |
-
hist, xedges, yedges = np.histogram2d(x, y, (xedges, yedges))
|
302 |
-
xidx = np.clip(np.digitize(x, xedges) - 1, 0, hist.shape[0] - 1)
|
303 |
-
yidx = np.clip(np.digitize(y, yedges) - 1, 0, hist.shape[1] - 1)
|
304 |
-
return np.log(hist[xidx, yidx])
|
305 |
-
|
306 |
-
|
307 |
-
def butter_lowpass_filtfilt(data, cutoff=1500, fs=50000, order=5):
|
308 |
-
from scipy.signal import butter, filtfilt
|
309 |
-
|
310 |
-
# https://stackoverflow.com/questions/28536191/how-to-filter-smooth-with-scipy-numpy
|
311 |
-
def butter_lowpass(cutoff, fs, order):
|
312 |
-
nyq = 0.5 * fs
|
313 |
-
normal_cutoff = cutoff / nyq
|
314 |
-
return butter(order, normal_cutoff, btype="low", analog=False)
|
315 |
-
|
316 |
-
b, a = butter_lowpass(cutoff, fs, order=order)
|
317 |
-
return filtfilt(b, a, data) # forward-backward filter
|
318 |
-
|
319 |
-
|
320 |
-
def output_to_target(output, max_det=300):
|
321 |
-
# Convert model output to target format [batch_id, class_id, x, y, w, h, conf] for plotting
|
322 |
-
targets = []
|
323 |
-
for i, o in enumerate(output):
|
324 |
-
box, conf, cls = o[:max_det, :6].cpu().split((4, 1, 1), 1)
|
325 |
-
j = torch.full((conf.shape[0], 1), i)
|
326 |
-
targets.append(torch.cat((j, cls, xyxy2xywh(box), conf), 1))
|
327 |
-
return torch.cat(targets, 0).numpy()
|
328 |
-
|
329 |
-
|
330 |
-
@threaded
|
331 |
-
def plot_images(images, targets, paths=None, fname="images.jpg", names=None):
|
332 |
-
# Plot image grid with labels
|
333 |
-
if isinstance(images, torch.Tensor):
|
334 |
-
images = images.cpu().float().numpy()
|
335 |
-
if isinstance(targets, torch.Tensor):
|
336 |
-
targets = targets.cpu().numpy()
|
337 |
-
|
338 |
-
max_size = 1920 # max image size
|
339 |
-
max_subplots = 16 # max image subplots, i.e. 4x4
|
340 |
-
bs, _, h, w = images.shape # batch size, _, height, width
|
341 |
-
bs = min(bs, max_subplots) # limit plot images
|
342 |
-
ns = np.ceil(bs**0.5) # number of subplots (square)
|
343 |
-
if np.max(images[0]) <= 1:
|
344 |
-
images *= 255 # de-normalise (optional)
|
345 |
-
|
346 |
-
# Build Image
|
347 |
-
mosaic = np.full(
|
348 |
-
(int(ns * h), int(ns * w), 3), 255, dtype=np.uint8
|
349 |
-
) # init
|
350 |
-
for i, im in enumerate(images):
|
351 |
-
if i == max_subplots: # if last batch has fewer images than we expect
|
352 |
-
break
|
353 |
-
x, y = int(w * (i // ns)), int(h * (i % ns)) # block origin
|
354 |
-
im = im.transpose(1, 2, 0)
|
355 |
-
mosaic[y : y + h, x : x + w, :] = im
|
356 |
-
|
357 |
-
# Resize (optional)
|
358 |
-
scale = max_size / ns / max(h, w)
|
359 |
-
if scale < 1:
|
360 |
-
h = math.ceil(scale * h)
|
361 |
-
w = math.ceil(scale * w)
|
362 |
-
mosaic = cv2.resize(mosaic, tuple(int(x * ns) for x in (w, h)))
|
363 |
-
|
364 |
-
# Annotate
|
365 |
-
fs = int((h + w) * ns * 0.01) # font size
|
366 |
-
annotator = Annotator(
|
367 |
-
mosaic,
|
368 |
-
line_width=round(fs / 10),
|
369 |
-
font_size=fs,
|
370 |
-
pil=True,
|
371 |
-
example=names,
|
372 |
-
)
|
373 |
-
for i in range(i + 1):
|
374 |
-
x, y = int(w * (i // ns)), int(h * (i % ns)) # block origin
|
375 |
-
annotator.rectangle(
|
376 |
-
[x, y, x + w, y + h], None, (255, 255, 255), width=2
|
377 |
-
) # borders
|
378 |
-
if paths:
|
379 |
-
annotator.text(
|
380 |
-
(x + 5, y + 5),
|
381 |
-
text=Path(paths[i]).name[:40],
|
382 |
-
txt_color=(220, 220, 220),
|
383 |
-
) # filenames
|
384 |
-
if len(targets) > 0:
|
385 |
-
ti = targets[targets[:, 0] == i] # image targets
|
386 |
-
boxes = xywh2xyxy(ti[:, 2:6]).T
|
387 |
-
classes = ti[:, 1].astype("int")
|
388 |
-
labels = ti.shape[1] == 6 # labels if no conf column
|
389 |
-
conf = (
|
390 |
-
None if labels else ti[:, 6]
|
391 |
-
) # check for confidence presence (label vs pred)
|
392 |
-
|
393 |
-
if boxes.shape[1]:
|
394 |
-
if boxes.max() <= 1.01: # if normalized with tolerance 0.01
|
395 |
-
boxes[[0, 2]] *= w # scale to pixels
|
396 |
-
boxes[[1, 3]] *= h
|
397 |
-
elif scale < 1: # absolute coords need scale if image scales
|
398 |
-
boxes *= scale
|
399 |
-
boxes[[0, 2]] += x
|
400 |
-
boxes[[1, 3]] += y
|
401 |
-
for j, box in enumerate(boxes.T.tolist()):
|
402 |
-
cls = classes[j]
|
403 |
-
color = colors(cls)
|
404 |
-
cls = names[cls] if names else cls
|
405 |
-
if labels or conf[j] > 0.25: # 0.25 conf thresh
|
406 |
-
label = f"{cls}" if labels else f"{cls} {conf[j]:.1f}"
|
407 |
-
annotator.box_label(box, label, color=color)
|
408 |
-
annotator.im.save(fname) # save
|
409 |
-
|
410 |
-
|
411 |
-
def plot_lr_scheduler(optimizer, scheduler, epochs=300, save_dir=""):
|
412 |
-
# Plot LR simulating training for full epochs
|
413 |
-
optimizer, scheduler = copy(optimizer), copy(
|
414 |
-
scheduler
|
415 |
-
) # do not modify originals
|
416 |
-
y = []
|
417 |
-
for _ in range(epochs):
|
418 |
-
scheduler.step()
|
419 |
-
y.append(optimizer.param_groups[0]["lr"])
|
420 |
-
plt.plot(y, ".-", label="LR")
|
421 |
-
plt.xlabel("epoch")
|
422 |
-
plt.ylabel("LR")
|
423 |
-
plt.grid()
|
424 |
-
plt.xlim(0, epochs)
|
425 |
-
plt.ylim(0)
|
426 |
-
plt.savefig(Path(save_dir) / "LR.png", dpi=200)
|
427 |
-
plt.close()
|
428 |
-
|
429 |
-
|
430 |
-
def plot_val_txt(): # from utils.plots import *; plot_val()
|
431 |
-
# Plot val.txt histograms
|
432 |
-
x = np.loadtxt("val.txt", dtype=np.float32)
|
433 |
-
box = xyxy2xywh(x[:, :4])
|
434 |
-
cx, cy = box[:, 0], box[:, 1]
|
435 |
-
|
436 |
-
fig, ax = plt.subplots(1, 1, figsize=(6, 6), tight_layout=True)
|
437 |
-
ax.hist2d(cx, cy, bins=600, cmax=10, cmin=0)
|
438 |
-
ax.set_aspect("equal")
|
439 |
-
plt.savefig("hist2d.png", dpi=300)
|
440 |
-
|
441 |
-
fig, ax = plt.subplots(1, 2, figsize=(12, 6), tight_layout=True)
|
442 |
-
ax[0].hist(cx, bins=600)
|
443 |
-
ax[1].hist(cy, bins=600)
|
444 |
-
plt.savefig("hist1d.png", dpi=200)
|
445 |
-
|
446 |
-
|
447 |
-
def plot_targets_txt(): # from utils.plots import *; plot_targets_txt()
|
448 |
-
# Plot targets.txt histograms
|
449 |
-
x = np.loadtxt("targets.txt", dtype=np.float32).T
|
450 |
-
s = ["x targets", "y targets", "width targets", "height targets"]
|
451 |
-
fig, ax = plt.subplots(2, 2, figsize=(8, 8), tight_layout=True)
|
452 |
-
ax = ax.ravel()
|
453 |
-
for i in range(4):
|
454 |
-
ax[i].hist(
|
455 |
-
x[i], bins=100, label=f"{x[i].mean():.3g} +/- {x[i].std():.3g}"
|
456 |
-
)
|
457 |
-
ax[i].legend()
|
458 |
-
ax[i].set_title(s[i])
|
459 |
-
plt.savefig("targets.jpg", dpi=200)
|
460 |
-
|
461 |
-
|
462 |
-
def plot_val_study(
|
463 |
-
file="", dir="", x=None
|
464 |
-
): # from utils.plots import *; plot_val_study()
|
465 |
-
# Plot file=study.txt generated by val.py (or plot all study*.txt in dir)
|
466 |
-
save_dir = Path(file).parent if file else Path(dir)
|
467 |
-
plot2 = False # plot additional results
|
468 |
-
if plot2:
|
469 |
-
ax = plt.subplots(2, 4, figsize=(10, 6), tight_layout=True)[1].ravel()
|
470 |
-
|
471 |
-
fig2, ax2 = plt.subplots(1, 1, figsize=(8, 4), tight_layout=True)
|
472 |
-
# for f in [save_dir / f'study_coco_{x}.txt' for x in ['yolov5n6', 'yolov5s6', 'yolov5m6', 'yolov5l6', 'yolov5x6']]:
|
473 |
-
for f in sorted(save_dir.glob("study*.txt")):
|
474 |
-
y = np.loadtxt(
|
475 |
-
f, dtype=np.float32, usecols=[0, 1, 2, 3, 7, 8, 9], ndmin=2
|
476 |
-
).T
|
477 |
-
x = np.arange(y.shape[1]) if x is None else np.array(x)
|
478 |
-
if plot2:
|
479 |
-
s = [
|
480 |
-
"P",
|
481 |
-
"R",
|
482 |
-
"[email protected]",
|
483 |
-
"[email protected]:.95",
|
484 |
-
"t_preprocess (ms/img)",
|
485 |
-
"t_inference (ms/img)",
|
486 |
-
"t_NMS (ms/img)",
|
487 |
-
]
|
488 |
-
for i in range(7):
|
489 |
-
ax[i].plot(x, y[i], ".-", linewidth=2, markersize=8)
|
490 |
-
ax[i].set_title(s[i])
|
491 |
-
|
492 |
-
j = y[3].argmax() + 1
|
493 |
-
ax2.plot(
|
494 |
-
y[5, 1:j],
|
495 |
-
y[3, 1:j] * 1e2,
|
496 |
-
".-",
|
497 |
-
linewidth=2,
|
498 |
-
markersize=8,
|
499 |
-
label=f.stem.replace("study_coco_", "").replace("yolo", "YOLO"),
|
500 |
-
)
|
501 |
-
|
502 |
-
ax2.plot(
|
503 |
-
1e3 / np.array([209, 140, 97, 58, 35, 18]),
|
504 |
-
[34.6, 40.5, 43.0, 47.5, 49.7, 51.5],
|
505 |
-
"k.-",
|
506 |
-
linewidth=2,
|
507 |
-
markersize=8,
|
508 |
-
alpha=0.25,
|
509 |
-
label="EfficientDet",
|
510 |
-
)
|
511 |
-
|
512 |
-
ax2.grid(alpha=0.2)
|
513 |
-
ax2.set_yticks(np.arange(20, 60, 5))
|
514 |
-
ax2.set_xlim(0, 57)
|
515 |
-
ax2.set_ylim(25, 55)
|
516 |
-
ax2.set_xlabel("GPU Speed (ms/img)")
|
517 |
-
ax2.set_ylabel("COCO AP val")
|
518 |
-
ax2.legend(loc="lower right")
|
519 |
-
f = save_dir / "study.png"
|
520 |
-
print(f"Saving {f}...")
|
521 |
-
plt.savefig(f, dpi=300)
|
522 |
-
|
523 |
-
|
524 |
-
@TryExcept() # known issue https://github.com/ultralytics/yolov5/issues/5395
|
525 |
-
def plot_labels(labels, names=(), save_dir=Path("")):
|
526 |
-
# plot dataset labels
|
527 |
-
LOGGER.info(f"Plotting labels to {save_dir / 'labels.jpg'}... ")
|
528 |
-
c, b = labels[:, 0], labels[:, 1:].transpose() # classes, boxes
|
529 |
-
nc = int(c.max() + 1) # number of classes
|
530 |
-
x = pd.DataFrame(b.transpose(), columns=["x", "y", "width", "height"])
|
531 |
-
|
532 |
-
# seaborn correlogram
|
533 |
-
sn.pairplot(
|
534 |
-
x,
|
535 |
-
corner=True,
|
536 |
-
diag_kind="auto",
|
537 |
-
kind="hist",
|
538 |
-
diag_kws=dict(bins=50),
|
539 |
-
plot_kws=dict(pmax=0.9),
|
540 |
-
)
|
541 |
-
plt.savefig(save_dir / "labels_correlogram.jpg", dpi=200)
|
542 |
-
plt.close()
|
543 |
-
|
544 |
-
# matplotlib labels
|
545 |
-
matplotlib.use("svg") # faster
|
546 |
-
ax = plt.subplots(2, 2, figsize=(8, 8), tight_layout=True)[1].ravel()
|
547 |
-
y = ax[0].hist(c, bins=np.linspace(0, nc, nc + 1) - 0.5, rwidth=0.8)
|
548 |
-
with contextlib.suppress(Exception): # color histogram bars by class
|
549 |
-
[
|
550 |
-
y[2].patches[i].set_color([x / 255 for x in colors(i)])
|
551 |
-
for i in range(nc)
|
552 |
-
] # known issue #3195
|
553 |
-
ax[0].set_ylabel("instances")
|
554 |
-
if 0 < len(names) < 30:
|
555 |
-
ax[0].set_xticks(range(len(names)))
|
556 |
-
ax[0].set_xticklabels(list(names.values()), rotation=90, fontsize=10)
|
557 |
-
else:
|
558 |
-
ax[0].set_xlabel("classes")
|
559 |
-
sn.histplot(x, x="x", y="y", ax=ax[2], bins=50, pmax=0.9)
|
560 |
-
sn.histplot(x, x="width", y="height", ax=ax[3], bins=50, pmax=0.9)
|
561 |
-
|
562 |
-
# rectangles
|
563 |
-
labels[:, 1:3] = 0.5 # center
|
564 |
-
labels[:, 1:] = xywh2xyxy(labels[:, 1:]) * 2000
|
565 |
-
img = Image.fromarray(np.ones((2000, 2000, 3), dtype=np.uint8) * 255)
|
566 |
-
for cls, *box in labels[:1000]:
|
567 |
-
ImageDraw.Draw(img).rectangle(
|
568 |
-
box, width=1, outline=colors(cls)
|
569 |
-
) # plot
|
570 |
-
ax[1].imshow(img)
|
571 |
-
ax[1].axis("off")
|
572 |
-
|
573 |
-
for a in [0, 1, 2, 3]:
|
574 |
-
for s in ["top", "right", "left", "bottom"]:
|
575 |
-
ax[a].spines[s].set_visible(False)
|
576 |
-
|
577 |
-
plt.savefig(save_dir / "labels.jpg", dpi=200)
|
578 |
-
matplotlib.use("Agg")
|
579 |
-
plt.close()
|
580 |
-
|
581 |
-
|
582 |
-
def imshow_cls(
|
583 |
-
im,
|
584 |
-
labels=None,
|
585 |
-
pred=None,
|
586 |
-
names=None,
|
587 |
-
nmax=25,
|
588 |
-
verbose=False,
|
589 |
-
f=Path("images.jpg"),
|
590 |
-
):
|
591 |
-
# Show classification image grid with labels (optional) and predictions (optional)
|
592 |
-
from utils.augmentations import denormalize
|
593 |
-
|
594 |
-
names = names or [f"class{i}" for i in range(1000)]
|
595 |
-
blocks = torch.chunk(
|
596 |
-
denormalize(im.clone()).cpu().float(), len(im), dim=0
|
597 |
-
) # select batch index 0, block by channels
|
598 |
-
n = min(len(blocks), nmax) # number of plots
|
599 |
-
m = min(8, round(n**0.5)) # 8 x 8 default
|
600 |
-
fig, ax = plt.subplots(math.ceil(n / m), m) # 8 rows x n/8 cols
|
601 |
-
ax = ax.ravel() if m > 1 else [ax]
|
602 |
-
# plt.subplots_adjust(wspace=0.05, hspace=0.05)
|
603 |
-
for i in range(n):
|
604 |
-
ax[i].imshow(
|
605 |
-
blocks[i].squeeze().permute((1, 2, 0)).numpy().clip(0.0, 1.0)
|
606 |
-
)
|
607 |
-
ax[i].axis("off")
|
608 |
-
if labels is not None:
|
609 |
-
s = names[labels[i]] + (
|
610 |
-
f"—{names[pred[i]]}" if pred is not None else ""
|
611 |
-
)
|
612 |
-
ax[i].set_title(s, fontsize=8, verticalalignment="top")
|
613 |
-
plt.savefig(f, dpi=300, bbox_inches="tight")
|
614 |
-
plt.close()
|
615 |
-
if verbose:
|
616 |
-
LOGGER.info(f"Saving {f}")
|
617 |
-
if labels is not None:
|
618 |
-
LOGGER.info(
|
619 |
-
"True: "
|
620 |
-
+ " ".join(f"{names[i]:3s}" for i in labels[:nmax])
|
621 |
-
)
|
622 |
-
if pred is not None:
|
623 |
-
LOGGER.info(
|
624 |
-
"Predicted:" + " ".join(f"{names[i]:3s}" for i in pred[:nmax])
|
625 |
-
)
|
626 |
-
return f
|
627 |
-
|
628 |
-
|
629 |
-
def plot_evolve(
|
630 |
-
evolve_csv="path/to/evolve.csv",
|
631 |
-
): # from utils.plots import *; plot_evolve()
|
632 |
-
# Plot evolve.csv hyp evolution results
|
633 |
-
evolve_csv = Path(evolve_csv)
|
634 |
-
data = pd.read_csv(evolve_csv)
|
635 |
-
keys = [x.strip() for x in data.columns]
|
636 |
-
x = data.values
|
637 |
-
f = fitness(x)
|
638 |
-
j = np.argmax(f) # max fitness index
|
639 |
-
plt.figure(figsize=(10, 12), tight_layout=True)
|
640 |
-
matplotlib.rc("font", **{"size": 8})
|
641 |
-
print(f"Best results from row {j} of {evolve_csv}:")
|
642 |
-
for i, k in enumerate(keys[7:]):
|
643 |
-
v = x[:, 7 + i]
|
644 |
-
mu = v[j] # best single result
|
645 |
-
plt.subplot(6, 5, i + 1)
|
646 |
-
plt.scatter(
|
647 |
-
v,
|
648 |
-
f,
|
649 |
-
c=hist2d(v, f, 20),
|
650 |
-
cmap="viridis",
|
651 |
-
alpha=0.8,
|
652 |
-
edgecolors="none",
|
653 |
-
)
|
654 |
-
plt.plot(mu, f.max(), "k+", markersize=15)
|
655 |
-
plt.title(
|
656 |
-
f"{k} = {mu:.3g}", fontdict={"size": 9}
|
657 |
-
) # limit to 40 characters
|
658 |
-
if i % 5 != 0:
|
659 |
-
plt.yticks([])
|
660 |
-
print(f"{k:>15}: {mu:.3g}")
|
661 |
-
f = evolve_csv.with_suffix(".png") # filename
|
662 |
-
plt.savefig(f, dpi=200)
|
663 |
-
plt.close()
|
664 |
-
print(f"Saved {f}")
|
665 |
-
|
666 |
-
|
667 |
-
def plot_results(file="path/to/results.csv", dir=""):
|
668 |
-
# Plot training results.csv. Usage: from utils.plots import *; plot_results('path/to/results.csv')
|
669 |
-
save_dir = Path(file).parent if file else Path(dir)
|
670 |
-
fig, ax = plt.subplots(2, 5, figsize=(12, 6), tight_layout=True)
|
671 |
-
ax = ax.ravel()
|
672 |
-
files = list(save_dir.glob("results*.csv"))
|
673 |
-
assert len(
|
674 |
-
files
|
675 |
-
), f"No results.csv files found in {save_dir.resolve()}, nothing to plot."
|
676 |
-
for f in files:
|
677 |
-
try:
|
678 |
-
data = pd.read_csv(f)
|
679 |
-
s = [x.strip() for x in data.columns]
|
680 |
-
x = data.values[:, 0]
|
681 |
-
for i, j in enumerate([1, 2, 3, 4, 5, 8, 9, 10, 6, 7]):
|
682 |
-
y = data.values[:, j].astype("float")
|
683 |
-
# y[y == 0] = np.nan # don't show zero values
|
684 |
-
ax[i].plot(
|
685 |
-
x, y, marker=".", label=f.stem, linewidth=2, markersize=8
|
686 |
-
)
|
687 |
-
ax[i].set_title(s[j], fontsize=12)
|
688 |
-
# if j in [8, 9, 10]: # share train and val loss y axes
|
689 |
-
# ax[i].get_shared_y_axes().join(ax[i], ax[i - 5])
|
690 |
-
except Exception as e:
|
691 |
-
LOGGER.info(f"Warning: Plotting error for {f}: {e}")
|
692 |
-
ax[1].legend()
|
693 |
-
fig.savefig(save_dir / "results.png", dpi=200)
|
694 |
-
plt.close()
|
695 |
-
|
696 |
-
|
697 |
-
def profile_idetection(start=0, stop=0, labels=(), save_dir=""):
|
698 |
-
# Plot iDetection '*.txt' per-image logs. from utils.plots import *; profile_idetection()
|
699 |
-
ax = plt.subplots(2, 4, figsize=(12, 6), tight_layout=True)[1].ravel()
|
700 |
-
s = [
|
701 |
-
"Images",
|
702 |
-
"Free Storage (GB)",
|
703 |
-
"RAM Usage (GB)",
|
704 |
-
"Battery",
|
705 |
-
"dt_raw (ms)",
|
706 |
-
"dt_smooth (ms)",
|
707 |
-
"real-world FPS",
|
708 |
-
]
|
709 |
-
files = list(Path(save_dir).glob("frames*.txt"))
|
710 |
-
for fi, f in enumerate(files):
|
711 |
-
try:
|
712 |
-
results = np.loadtxt(f, ndmin=2).T[
|
713 |
-
:, 90:-30
|
714 |
-
] # clip first and last rows
|
715 |
-
n = results.shape[1] # number of rows
|
716 |
-
x = np.arange(start, min(stop, n) if stop else n)
|
717 |
-
results = results[:, x]
|
718 |
-
t = results[0] - results[0].min() # set t0=0s
|
719 |
-
results[0] = x
|
720 |
-
for i, a in enumerate(ax):
|
721 |
-
if i < len(results):
|
722 |
-
label = (
|
723 |
-
labels[fi]
|
724 |
-
if len(labels)
|
725 |
-
else f.stem.replace("frames_", "")
|
726 |
-
)
|
727 |
-
a.plot(
|
728 |
-
t,
|
729 |
-
results[i],
|
730 |
-
marker=".",
|
731 |
-
label=label,
|
732 |
-
linewidth=1,
|
733 |
-
markersize=5,
|
734 |
-
)
|
735 |
-
a.set_title(s[i])
|
736 |
-
a.set_xlabel("time (s)")
|
737 |
-
# if fi == len(files) - 1:
|
738 |
-
# a.set_ylim(bottom=0)
|
739 |
-
for side in ["top", "right"]:
|
740 |
-
a.spines[side].set_visible(False)
|
741 |
-
else:
|
742 |
-
a.remove()
|
743 |
-
except Exception as e:
|
744 |
-
print(f"Warning: Plotting error for {f}; {e}")
|
745 |
-
ax[1].legend()
|
746 |
-
plt.savefig(Path(save_dir) / "idetection_profile.png", dpi=200)
|
747 |
-
|
748 |
-
|
749 |
-
def save_one_box(
|
750 |
-
xyxy,
|
751 |
-
im,
|
752 |
-
file=Path("im.jpg"),
|
753 |
-
gain=1.02,
|
754 |
-
pad=10,
|
755 |
-
square=False,
|
756 |
-
BGR=False,
|
757 |
-
save=True,
|
758 |
-
):
|
759 |
-
# Save image crop as {file} with crop size multiple {gain} and {pad} pixels. Save and/or return crop
|
760 |
-
xyxy = torch.tensor(xyxy).view(-1, 4)
|
761 |
-
b = xyxy2xywh(xyxy) # boxes
|
762 |
-
if square:
|
763 |
-
b[:, 2:] = (
|
764 |
-
b[:, 2:].max(1)[0].unsqueeze(1)
|
765 |
-
) # attempt rectangle to square
|
766 |
-
b[:, 2:] = b[:, 2:] * gain + pad # box wh * gain + pad
|
767 |
-
xyxy = xywh2xyxy(b).long()
|
768 |
-
clip_boxes(xyxy, im.shape)
|
769 |
-
crop = im[
|
770 |
-
int(xyxy[0, 1]) : int(xyxy[0, 3]),
|
771 |
-
int(xyxy[0, 0]) : int(xyxy[0, 2]),
|
772 |
-
:: (1 if BGR else -1),
|
773 |
-
]
|
774 |
-
if save:
|
775 |
-
file.parent.mkdir(parents=True, exist_ok=True) # make directory
|
776 |
-
f = str(increment_path(file).with_suffix(".jpg"))
|
777 |
-
# cv2.imwrite(f, crop) # save BGR, https://github.com/ultralytics/yolov5/issues/7007 chroma subsampling issue
|
778 |
-
Image.fromarray(crop[..., ::-1]).save(
|
779 |
-
f, quality=95, subsampling=0
|
780 |
-
) # save RGB
|
781 |
-
return crop
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/AchyuthGamer/Free-Accounts-Generator/README.md
DELETED
@@ -1,11 +0,0 @@
|
|
1 |
-
---
|
2 |
-
title: Free Accounts Generator
|
3 |
-
emoji: 🏢
|
4 |
-
colorFrom: blue
|
5 |
-
colorTo: purple
|
6 |
-
sdk: static
|
7 |
-
pinned: false
|
8 |
-
license: mit
|
9 |
-
---
|
10 |
-
|
11 |
-
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/AgentVerse/agentVerse/ui/src/phaser3-rex-plugins/plugins/scale.d.ts
DELETED
@@ -1,2 +0,0 @@
|
|
1 |
-
import Scale from './behaviors/scale/Scale';
|
2 |
-
export default Scale;
|
|
|
|
|
|
spaces/AgentVerse/agentVerse/ui/src/phaser3-rex-plugins/templates/spinner/pie/Pie.d.ts
DELETED
@@ -1,2 +0,0 @@
|
|
1 |
-
import Base from '../base/Base';
|
2 |
-
export default class Pie extends Base { }
|
|
|
|
|
|
spaces/AgentVerse/agentVerse/ui/src/phaser3-rex-plugins/templates/ui/basesizer/utils/CheckSize.js
DELETED
@@ -1,12 +0,0 @@
|
|
1 |
-
var CheckSize = function (child, parent) {
|
2 |
-
if (child.width < child.childrenWidth) {
|
3 |
-
// Warning
|
4 |
-
console.warn(`Layout width error: Parent=${parent.constructor.name}, Child=${child.constructor.name}`);
|
5 |
-
}
|
6 |
-
if (child.height < child.childrenHeight) {
|
7 |
-
// Warning
|
8 |
-
console.warn(`Layout height error: Parent=${parent.constructor.name}, Child=${child.constructor.name}`);
|
9 |
-
}
|
10 |
-
}
|
11 |
-
|
12 |
-
export default CheckSize;
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/AgentVerse/agentVerse/ui/src/phaser3-rex-plugins/templates/ui/imagebox/Factory.d.ts
DELETED
@@ -1,7 +0,0 @@
|
|
1 |
-
import ImageBox from './ImageBox';
|
2 |
-
|
3 |
-
export default function (
|
4 |
-
x?: number, y?: number,
|
5 |
-
texture?: string, frame?: string,
|
6 |
-
config?: ImageBox.IConfig
|
7 |
-
): ImageBox;
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/AgentVerse/agentVerse/ui/src/phaser3-rex-plugins/templates/ui/perspectivecard/Factory.d.ts
DELETED
@@ -1,5 +0,0 @@
|
|
1 |
-
import PerspectiveCard from './PerspectiveCard';
|
2 |
-
|
3 |
-
export default function (
|
4 |
-
config?: PerspectiveCard.IConfig
|
5 |
-
): PerspectiveCard;
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Aki004/herta-so-vits/hubert/__init__.py
DELETED
File without changes
|
spaces/Alcedo/yunmedia/README.md
DELETED
@@ -1,10 +0,0 @@
|
|
1 |
-
---
|
2 |
-
title: Media
|
3 |
-
emoji: 💫
|
4 |
-
colorFrom: green
|
5 |
-
colorTo: pink
|
6 |
-
sdk: docker
|
7 |
-
pinned: false
|
8 |
-
license: mit
|
9 |
-
app_port: 3000
|
10 |
-
---
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Allakhazam/anythingV4/app.py
DELETED
@@ -1,26 +0,0 @@
|
|
1 |
-
import gradio
|
2 |
-
|
3 |
-
model_interfaces = gradio.Interface.load("models/ckpt/anything-v4.0")
|
4 |
-
|
5 |
-
def process_prompt(prompt):
|
6 |
-
prompt=prompt.lower()
|
7 |
-
print(prompt)
|
8 |
-
image = model_interfaces(prompt)
|
9 |
-
return image
|
10 |
-
|
11 |
-
sandbox = gradio.Interface(
|
12 |
-
fn=process_prompt,
|
13 |
-
inputs=[gradio.Textbox(label="Enter Prompt:")],
|
14 |
-
outputs=[gradio.Image(label="Produced Image")],
|
15 |
-
title="Text to Image",
|
16 |
-
examples=[["Female Adventurer portrait, rogue, tavern background"],
|
17 |
-
["female Adventurer portrait, barbarian, tavern background"],
|
18 |
-
["Magic Adventurer portrait, old wizard, tavern background"],
|
19 |
-
["Male superhero portrait, modern city, building background"],
|
20 |
-
["Magic Adventurer portrait, old wizard, fire elementalist, tavern background, fire"],
|
21 |
-
["Female Adventurer portrait, Druid, tavern background"],
|
22 |
-
["close up portrait Benedict Cumberbatch wizard of black magic, robe with hood, Hogwart University, castle tower background, oil painting on canvas"],
|
23 |
-
["Adventurer portrait, cleric, rogue looking stranger, tavern background"]]
|
24 |
-
)
|
25 |
-
|
26 |
-
sandbox.queue(concurrency_count=10).launch(debug=True)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Androidonnxfork/CivitAi-to-Diffusers/diffusers/docs/source/ko/using-diffusers/reusing_seeds.md
DELETED
@@ -1,63 +0,0 @@
|
|
1 |
-
<!--Copyright 2023 The HuggingFace Team. All rights reserved.
|
2 |
-
|
3 |
-
Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
|
4 |
-
the License. You may obtain a copy of the License at
|
5 |
-
|
6 |
-
http://www.apache.org/licenses/LICENSE-2.0
|
7 |
-
|
8 |
-
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
|
9 |
-
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
|
10 |
-
specific language governing permissions and limitations under the License.
|
11 |
-
-->
|
12 |
-
|
13 |
-
# Deterministic(결정적) 생성을 통한 이미지 품질 개선
|
14 |
-
|
15 |
-
생성된 이미지의 품질을 개선하는 일반적인 방법은 *결정적 batch(배치) 생성*을 사용하는 것입니다. 이 방법은 이미지 batch(배치)를 생성하고 두 번째 추론 라운드에서 더 자세한 프롬프트와 함께 개선할 이미지 하나를 선택하는 것입니다. 핵심은 일괄 이미지 생성을 위해 파이프라인에 [`torch.Generator`](https://pytorch.org/docs/stable/generated/torch.Generator.html#generator) 목록을 전달하고, 각 `Generator`를 시드에 연결하여 이미지에 재사용할 수 있도록 하는 것입니다.
|
16 |
-
|
17 |
-
예를 들어 [`runwayml/stable-diffusion-v1-5`](runwayml/stable-diffusion-v1-5)를 사용하여 다음 프롬프트의 여러 버전을 생성해 봅시다.
|
18 |
-
|
19 |
-
```py
|
20 |
-
prompt = "Labrador in the style of Vermeer"
|
21 |
-
```
|
22 |
-
|
23 |
-
(가능하다면) 파이프라인을 [`DiffusionPipeline.from_pretrained`]로 인스턴스화하여 GPU에 배치합니다.
|
24 |
-
|
25 |
-
```python
|
26 |
-
>>> from diffusers import DiffusionPipeline
|
27 |
-
|
28 |
-
>>> pipe = DiffusionPipeline.from_pretrained("runwayml/stable-diffusion-v1-5", torch_dtype=torch.float16)
|
29 |
-
>>> pipe = pipe.to("cuda")
|
30 |
-
```
|
31 |
-
|
32 |
-
이제 네 개의 서로 다른 `Generator`를 정의하고 각 `Generator`에 시드(`0` ~ `3`)를 할당하여 나중에 특정 이미지에 대해 `Generator`를 재사용할 수 있도록 합니다.
|
33 |
-
|
34 |
-
```python
|
35 |
-
>>> import torch
|
36 |
-
|
37 |
-
>>> generator = [torch.Generator(device="cuda").manual_seed(i) for i in range(4)]
|
38 |
-
```
|
39 |
-
|
40 |
-
이미지를 생성하고 살펴봅니다.
|
41 |
-
|
42 |
-
```python
|
43 |
-
>>> images = pipe(prompt, generator=generator, num_images_per_prompt=4).images
|
44 |
-
>>> images
|
45 |
-
```
|
46 |
-
|
47 |
-

|
48 |
-
|
49 |
-
이 예제에서는 첫 번째 이미지를 개선했지만 실제로는 원하는 모든 이미지를 사용할 수 있습니다(심지어 두 개의 눈이 있는 이미지도!). 첫 번째 이미지에서는 시드가 '0'인 '생성기'를 사용했기 때문에 두 번째 추론 라운드에서는 이 '생성기'를 재사용할 것입니다. 이미지의 품질을 개선하려면 프롬프트에 몇 가지 텍스트를 추가합니다:
|
50 |
-
|
51 |
-
```python
|
52 |
-
prompt = [prompt + t for t in [", highly realistic", ", artsy", ", trending", ", colorful"]]
|
53 |
-
generator = [torch.Generator(device="cuda").manual_seed(0) for i in range(4)]
|
54 |
-
```
|
55 |
-
|
56 |
-
시드가 `0`인 제너레이터 4개를 생성하고, 이전 라운드의 첫 번째 이미지처럼 보이는 다른 이미지 batch(배치)를 생성합니다!
|
57 |
-
|
58 |
-
```python
|
59 |
-
>>> images = pipe(prompt, generator=generator).images
|
60 |
-
>>> images
|
61 |
-
```
|
62 |
-
|
63 |
-

|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Androidonnxfork/CivitAi-to-Diffusers/diffusers/examples/community/interpolate_stable_diffusion.py
DELETED
@@ -1,524 +0,0 @@
|
|
1 |
-
import inspect
|
2 |
-
import time
|
3 |
-
from pathlib import Path
|
4 |
-
from typing import Callable, List, Optional, Union
|
5 |
-
|
6 |
-
import numpy as np
|
7 |
-
import torch
|
8 |
-
from transformers import CLIPImageProcessor, CLIPTextModel, CLIPTokenizer
|
9 |
-
|
10 |
-
from diffusers import DiffusionPipeline
|
11 |
-
from diffusers.configuration_utils import FrozenDict
|
12 |
-
from diffusers.models import AutoencoderKL, UNet2DConditionModel
|
13 |
-
from diffusers.pipelines.stable_diffusion import StableDiffusionPipelineOutput
|
14 |
-
from diffusers.pipelines.stable_diffusion.safety_checker import StableDiffusionSafetyChecker
|
15 |
-
from diffusers.schedulers import DDIMScheduler, LMSDiscreteScheduler, PNDMScheduler
|
16 |
-
from diffusers.utils import deprecate, logging
|
17 |
-
|
18 |
-
|
19 |
-
logger = logging.get_logger(__name__) # pylint: disable=invalid-name
|
20 |
-
|
21 |
-
|
22 |
-
def slerp(t, v0, v1, DOT_THRESHOLD=0.9995):
|
23 |
-
"""helper function to spherically interpolate two arrays v1 v2"""
|
24 |
-
|
25 |
-
if not isinstance(v0, np.ndarray):
|
26 |
-
inputs_are_torch = True
|
27 |
-
input_device = v0.device
|
28 |
-
v0 = v0.cpu().numpy()
|
29 |
-
v1 = v1.cpu().numpy()
|
30 |
-
|
31 |
-
dot = np.sum(v0 * v1 / (np.linalg.norm(v0) * np.linalg.norm(v1)))
|
32 |
-
if np.abs(dot) > DOT_THRESHOLD:
|
33 |
-
v2 = (1 - t) * v0 + t * v1
|
34 |
-
else:
|
35 |
-
theta_0 = np.arccos(dot)
|
36 |
-
sin_theta_0 = np.sin(theta_0)
|
37 |
-
theta_t = theta_0 * t
|
38 |
-
sin_theta_t = np.sin(theta_t)
|
39 |
-
s0 = np.sin(theta_0 - theta_t) / sin_theta_0
|
40 |
-
s1 = sin_theta_t / sin_theta_0
|
41 |
-
v2 = s0 * v0 + s1 * v1
|
42 |
-
|
43 |
-
if inputs_are_torch:
|
44 |
-
v2 = torch.from_numpy(v2).to(input_device)
|
45 |
-
|
46 |
-
return v2
|
47 |
-
|
48 |
-
|
49 |
-
class StableDiffusionWalkPipeline(DiffusionPipeline):
|
50 |
-
r"""
|
51 |
-
Pipeline for text-to-image generation using Stable Diffusion.
|
52 |
-
|
53 |
-
This model inherits from [`DiffusionPipeline`]. Check the superclass documentation for the generic methods the
|
54 |
-
library implements for all the pipelines (such as downloading or saving, running on a particular device, etc.)
|
55 |
-
|
56 |
-
Args:
|
57 |
-
vae ([`AutoencoderKL`]):
|
58 |
-
Variational Auto-Encoder (VAE) Model to encode and decode images to and from latent representations.
|
59 |
-
text_encoder ([`CLIPTextModel`]):
|
60 |
-
Frozen text-encoder. Stable Diffusion uses the text portion of
|
61 |
-
[CLIP](https://huggingface.co/docs/transformers/model_doc/clip#transformers.CLIPTextModel), specifically
|
62 |
-
the [clip-vit-large-patch14](https://huggingface.co/openai/clip-vit-large-patch14) variant.
|
63 |
-
tokenizer (`CLIPTokenizer`):
|
64 |
-
Tokenizer of class
|
65 |
-
[CLIPTokenizer](https://huggingface.co/docs/transformers/v4.21.0/en/model_doc/clip#transformers.CLIPTokenizer).
|
66 |
-
unet ([`UNet2DConditionModel`]): Conditional U-Net architecture to denoise the encoded image latents.
|
67 |
-
scheduler ([`SchedulerMixin`]):
|
68 |
-
A scheduler to be used in combination with `unet` to denoise the encoded image latents. Can be one of
|
69 |
-
[`DDIMScheduler`], [`LMSDiscreteScheduler`], or [`PNDMScheduler`].
|
70 |
-
safety_checker ([`StableDiffusionSafetyChecker`]):
|
71 |
-
Classification module that estimates whether generated images could be considered offensive or harmful.
|
72 |
-
Please, refer to the [model card](https://huggingface.co/CompVis/stable-diffusion-v1-4) for details.
|
73 |
-
feature_extractor ([`CLIPImageProcessor`]):
|
74 |
-
Model that extracts features from generated images to be used as inputs for the `safety_checker`.
|
75 |
-
"""
|
76 |
-
|
77 |
-
def __init__(
|
78 |
-
self,
|
79 |
-
vae: AutoencoderKL,
|
80 |
-
text_encoder: CLIPTextModel,
|
81 |
-
tokenizer: CLIPTokenizer,
|
82 |
-
unet: UNet2DConditionModel,
|
83 |
-
scheduler: Union[DDIMScheduler, PNDMScheduler, LMSDiscreteScheduler],
|
84 |
-
safety_checker: StableDiffusionSafetyChecker,
|
85 |
-
feature_extractor: CLIPImageProcessor,
|
86 |
-
):
|
87 |
-
super().__init__()
|
88 |
-
|
89 |
-
if hasattr(scheduler.config, "steps_offset") and scheduler.config.steps_offset != 1:
|
90 |
-
deprecation_message = (
|
91 |
-
f"The configuration file of this scheduler: {scheduler} is outdated. `steps_offset`"
|
92 |
-
f" should be set to 1 instead of {scheduler.config.steps_offset}. Please make sure "
|
93 |
-
"to update the config accordingly as leaving `steps_offset` might led to incorrect results"
|
94 |
-
" in future versions. If you have downloaded this checkpoint from the Hugging Face Hub,"
|
95 |
-
" it would be very nice if you could open a Pull request for the `scheduler/scheduler_config.json`"
|
96 |
-
" file"
|
97 |
-
)
|
98 |
-
deprecate("steps_offset!=1", "1.0.0", deprecation_message, standard_warn=False)
|
99 |
-
new_config = dict(scheduler.config)
|
100 |
-
new_config["steps_offset"] = 1
|
101 |
-
scheduler._internal_dict = FrozenDict(new_config)
|
102 |
-
|
103 |
-
if safety_checker is None:
|
104 |
-
logger.warning(
|
105 |
-
f"You have disabled the safety checker for {self.__class__} by passing `safety_checker=None`. Ensure"
|
106 |
-
" that you abide to the conditions of the Stable Diffusion license and do not expose unfiltered"
|
107 |
-
" results in services or applications open to the public. Both the diffusers team and Hugging Face"
|
108 |
-
" strongly recommend to keep the safety filter enabled in all public facing circumstances, disabling"
|
109 |
-
" it only for use-cases that involve analyzing network behavior or auditing its results. For more"
|
110 |
-
" information, please have a look at https://github.com/huggingface/diffusers/pull/254 ."
|
111 |
-
)
|
112 |
-
|
113 |
-
self.register_modules(
|
114 |
-
vae=vae,
|
115 |
-
text_encoder=text_encoder,
|
116 |
-
tokenizer=tokenizer,
|
117 |
-
unet=unet,
|
118 |
-
scheduler=scheduler,
|
119 |
-
safety_checker=safety_checker,
|
120 |
-
feature_extractor=feature_extractor,
|
121 |
-
)
|
122 |
-
|
123 |
-
def enable_attention_slicing(self, slice_size: Optional[Union[str, int]] = "auto"):
|
124 |
-
r"""
|
125 |
-
Enable sliced attention computation.
|
126 |
-
|
127 |
-
When this option is enabled, the attention module will split the input tensor in slices, to compute attention
|
128 |
-
in several steps. This is useful to save some memory in exchange for a small speed decrease.
|
129 |
-
|
130 |
-
Args:
|
131 |
-
slice_size (`str` or `int`, *optional*, defaults to `"auto"`):
|
132 |
-
When `"auto"`, halves the input to the attention heads, so attention will be computed in two steps. If
|
133 |
-
a number is provided, uses as many slices as `attention_head_dim // slice_size`. In this case,
|
134 |
-
`attention_head_dim` must be a multiple of `slice_size`.
|
135 |
-
"""
|
136 |
-
if slice_size == "auto":
|
137 |
-
# half the attention head size is usually a good trade-off between
|
138 |
-
# speed and memory
|
139 |
-
slice_size = self.unet.config.attention_head_dim // 2
|
140 |
-
self.unet.set_attention_slice(slice_size)
|
141 |
-
|
142 |
-
def disable_attention_slicing(self):
|
143 |
-
r"""
|
144 |
-
Disable sliced attention computation. If `enable_attention_slicing` was previously invoked, this method will go
|
145 |
-
back to computing attention in one step.
|
146 |
-
"""
|
147 |
-
# set slice_size = `None` to disable `attention slicing`
|
148 |
-
self.enable_attention_slicing(None)
|
149 |
-
|
150 |
-
@torch.no_grad()
|
151 |
-
def __call__(
|
152 |
-
self,
|
153 |
-
prompt: Optional[Union[str, List[str]]] = None,
|
154 |
-
height: int = 512,
|
155 |
-
width: int = 512,
|
156 |
-
num_inference_steps: int = 50,
|
157 |
-
guidance_scale: float = 7.5,
|
158 |
-
negative_prompt: Optional[Union[str, List[str]]] = None,
|
159 |
-
num_images_per_prompt: Optional[int] = 1,
|
160 |
-
eta: float = 0.0,
|
161 |
-
generator: Optional[torch.Generator] = None,
|
162 |
-
latents: Optional[torch.FloatTensor] = None,
|
163 |
-
output_type: Optional[str] = "pil",
|
164 |
-
return_dict: bool = True,
|
165 |
-
callback: Optional[Callable[[int, int, torch.FloatTensor], None]] = None,
|
166 |
-
callback_steps: int = 1,
|
167 |
-
text_embeddings: Optional[torch.FloatTensor] = None,
|
168 |
-
**kwargs,
|
169 |
-
):
|
170 |
-
r"""
|
171 |
-
Function invoked when calling the pipeline for generation.
|
172 |
-
|
173 |
-
Args:
|
174 |
-
prompt (`str` or `List[str]`, *optional*, defaults to `None`):
|
175 |
-
The prompt or prompts to guide the image generation. If not provided, `text_embeddings` is required.
|
176 |
-
height (`int`, *optional*, defaults to 512):
|
177 |
-
The height in pixels of the generated image.
|
178 |
-
width (`int`, *optional*, defaults to 512):
|
179 |
-
The width in pixels of the generated image.
|
180 |
-
num_inference_steps (`int`, *optional*, defaults to 50):
|
181 |
-
The number of denoising steps. More denoising steps usually lead to a higher quality image at the
|
182 |
-
expense of slower inference.
|
183 |
-
guidance_scale (`float`, *optional*, defaults to 7.5):
|
184 |
-
Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).
|
185 |
-
`guidance_scale` is defined as `w` of equation 2. of [Imagen
|
186 |
-
Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >
|
187 |
-
1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,
|
188 |
-
usually at the expense of lower image quality.
|
189 |
-
negative_prompt (`str` or `List[str]`, *optional*):
|
190 |
-
The prompt or prompts not to guide the image generation. Ignored when not using guidance (i.e., ignored
|
191 |
-
if `guidance_scale` is less than `1`).
|
192 |
-
num_images_per_prompt (`int`, *optional*, defaults to 1):
|
193 |
-
The number of images to generate per prompt.
|
194 |
-
eta (`float`, *optional*, defaults to 0.0):
|
195 |
-
Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to
|
196 |
-
[`schedulers.DDIMScheduler`], will be ignored for others.
|
197 |
-
generator (`torch.Generator`, *optional*):
|
198 |
-
A [torch generator](https://pytorch.org/docs/stable/generated/torch.Generator.html) to make generation
|
199 |
-
deterministic.
|
200 |
-
latents (`torch.FloatTensor`, *optional*):
|
201 |
-
Pre-generated noisy latents, sampled from a Gaussian distribution, to be used as inputs for image
|
202 |
-
generation. Can be used to tweak the same generation with different prompts. If not provided, a latents
|
203 |
-
tensor will ge generated by sampling using the supplied random `generator`.
|
204 |
-
output_type (`str`, *optional*, defaults to `"pil"`):
|
205 |
-
The output format of the generate image. Choose between
|
206 |
-
[PIL](https://pillow.readthedocs.io/en/stable/): `PIL.Image.Image` or `np.array`.
|
207 |
-
return_dict (`bool`, *optional*, defaults to `True`):
|
208 |
-
Whether or not to return a [`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] instead of a
|
209 |
-
plain tuple.
|
210 |
-
callback (`Callable`, *optional*):
|
211 |
-
A function that will be called every `callback_steps` steps during inference. The function will be
|
212 |
-
called with the following arguments: `callback(step: int, timestep: int, latents: torch.FloatTensor)`.
|
213 |
-
callback_steps (`int`, *optional*, defaults to 1):
|
214 |
-
The frequency at which the `callback` function will be called. If not specified, the callback will be
|
215 |
-
called at every step.
|
216 |
-
text_embeddings (`torch.FloatTensor`, *optional*, defaults to `None`):
|
217 |
-
Pre-generated text embeddings to be used as inputs for image generation. Can be used in place of
|
218 |
-
`prompt` to avoid re-computing the embeddings. If not provided, the embeddings will be generated from
|
219 |
-
the supplied `prompt`.
|
220 |
-
|
221 |
-
Returns:
|
222 |
-
[`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] or `tuple`:
|
223 |
-
[`~pipelines.stable_diffusion.StableDiffusionPipelineOutput`] if `return_dict` is True, otherwise a `tuple.
|
224 |
-
When returning a tuple, the first element is a list with the generated images, and the second element is a
|
225 |
-
list of `bool`s denoting whether the corresponding generated image likely represents "not-safe-for-work"
|
226 |
-
(nsfw) content, according to the `safety_checker`.
|
227 |
-
"""
|
228 |
-
|
229 |
-
if height % 8 != 0 or width % 8 != 0:
|
230 |
-
raise ValueError(f"`height` and `width` have to be divisible by 8 but are {height} and {width}.")
|
231 |
-
|
232 |
-
if (callback_steps is None) or (
|
233 |
-
callback_steps is not None and (not isinstance(callback_steps, int) or callback_steps <= 0)
|
234 |
-
):
|
235 |
-
raise ValueError(
|
236 |
-
f"`callback_steps` has to be a positive integer but is {callback_steps} of type"
|
237 |
-
f" {type(callback_steps)}."
|
238 |
-
)
|
239 |
-
|
240 |
-
if text_embeddings is None:
|
241 |
-
if isinstance(prompt, str):
|
242 |
-
batch_size = 1
|
243 |
-
elif isinstance(prompt, list):
|
244 |
-
batch_size = len(prompt)
|
245 |
-
else:
|
246 |
-
raise ValueError(f"`prompt` has to be of type `str` or `list` but is {type(prompt)}")
|
247 |
-
|
248 |
-
# get prompt text embeddings
|
249 |
-
text_inputs = self.tokenizer(
|
250 |
-
prompt,
|
251 |
-
padding="max_length",
|
252 |
-
max_length=self.tokenizer.model_max_length,
|
253 |
-
return_tensors="pt",
|
254 |
-
)
|
255 |
-
text_input_ids = text_inputs.input_ids
|
256 |
-
|
257 |
-
if text_input_ids.shape[-1] > self.tokenizer.model_max_length:
|
258 |
-
removed_text = self.tokenizer.batch_decode(text_input_ids[:, self.tokenizer.model_max_length :])
|
259 |
-
print(
|
260 |
-
"The following part of your input was truncated because CLIP can only handle sequences up to"
|
261 |
-
f" {self.tokenizer.model_max_length} tokens: {removed_text}"
|
262 |
-
)
|
263 |
-
text_input_ids = text_input_ids[:, : self.tokenizer.model_max_length]
|
264 |
-
text_embeddings = self.text_encoder(text_input_ids.to(self.device))[0]
|
265 |
-
else:
|
266 |
-
batch_size = text_embeddings.shape[0]
|
267 |
-
|
268 |
-
# duplicate text embeddings for each generation per prompt, using mps friendly method
|
269 |
-
bs_embed, seq_len, _ = text_embeddings.shape
|
270 |
-
text_embeddings = text_embeddings.repeat(1, num_images_per_prompt, 1)
|
271 |
-
text_embeddings = text_embeddings.view(bs_embed * num_images_per_prompt, seq_len, -1)
|
272 |
-
|
273 |
-
# here `guidance_scale` is defined analog to the guidance weight `w` of equation (2)
|
274 |
-
# of the Imagen paper: https://arxiv.org/pdf/2205.11487.pdf . `guidance_scale = 1`
|
275 |
-
# corresponds to doing no classifier free guidance.
|
276 |
-
do_classifier_free_guidance = guidance_scale > 1.0
|
277 |
-
# get unconditional embeddings for classifier free guidance
|
278 |
-
if do_classifier_free_guidance:
|
279 |
-
uncond_tokens: List[str]
|
280 |
-
if negative_prompt is None:
|
281 |
-
uncond_tokens = [""] * batch_size
|
282 |
-
elif type(prompt) is not type(negative_prompt):
|
283 |
-
raise TypeError(
|
284 |
-
f"`negative_prompt` should be the same type to `prompt`, but got {type(negative_prompt)} !="
|
285 |
-
f" {type(prompt)}."
|
286 |
-
)
|
287 |
-
elif isinstance(negative_prompt, str):
|
288 |
-
uncond_tokens = [negative_prompt]
|
289 |
-
elif batch_size != len(negative_prompt):
|
290 |
-
raise ValueError(
|
291 |
-
f"`negative_prompt`: {negative_prompt} has batch size {len(negative_prompt)}, but `prompt`:"
|
292 |
-
f" {prompt} has batch size {batch_size}. Please make sure that passed `negative_prompt` matches"
|
293 |
-
" the batch size of `prompt`."
|
294 |
-
)
|
295 |
-
else:
|
296 |
-
uncond_tokens = negative_prompt
|
297 |
-
|
298 |
-
max_length = self.tokenizer.model_max_length
|
299 |
-
uncond_input = self.tokenizer(
|
300 |
-
uncond_tokens,
|
301 |
-
padding="max_length",
|
302 |
-
max_length=max_length,
|
303 |
-
truncation=True,
|
304 |
-
return_tensors="pt",
|
305 |
-
)
|
306 |
-
uncond_embeddings = self.text_encoder(uncond_input.input_ids.to(self.device))[0]
|
307 |
-
|
308 |
-
# duplicate unconditional embeddings for each generation per prompt, using mps friendly method
|
309 |
-
seq_len = uncond_embeddings.shape[1]
|
310 |
-
uncond_embeddings = uncond_embeddings.repeat(1, num_images_per_prompt, 1)
|
311 |
-
uncond_embeddings = uncond_embeddings.view(batch_size * num_images_per_prompt, seq_len, -1)
|
312 |
-
|
313 |
-
# For classifier free guidance, we need to do two forward passes.
|
314 |
-
# Here we concatenate the unconditional and text embeddings into a single batch
|
315 |
-
# to avoid doing two forward passes
|
316 |
-
text_embeddings = torch.cat([uncond_embeddings, text_embeddings])
|
317 |
-
|
318 |
-
# get the initial random noise unless the user supplied it
|
319 |
-
|
320 |
-
# Unlike in other pipelines, latents need to be generated in the target device
|
321 |
-
# for 1-to-1 results reproducibility with the CompVis implementation.
|
322 |
-
# However this currently doesn't work in `mps`.
|
323 |
-
latents_shape = (batch_size * num_images_per_prompt, self.unet.config.in_channels, height // 8, width // 8)
|
324 |
-
latents_dtype = text_embeddings.dtype
|
325 |
-
if latents is None:
|
326 |
-
if self.device.type == "mps":
|
327 |
-
# randn does not work reproducibly on mps
|
328 |
-
latents = torch.randn(latents_shape, generator=generator, device="cpu", dtype=latents_dtype).to(
|
329 |
-
self.device
|
330 |
-
)
|
331 |
-
else:
|
332 |
-
latents = torch.randn(latents_shape, generator=generator, device=self.device, dtype=latents_dtype)
|
333 |
-
else:
|
334 |
-
if latents.shape != latents_shape:
|
335 |
-
raise ValueError(f"Unexpected latents shape, got {latents.shape}, expected {latents_shape}")
|
336 |
-
latents = latents.to(self.device)
|
337 |
-
|
338 |
-
# set timesteps
|
339 |
-
self.scheduler.set_timesteps(num_inference_steps)
|
340 |
-
|
341 |
-
# Some schedulers like PNDM have timesteps as arrays
|
342 |
-
# It's more optimized to move all timesteps to correct device beforehand
|
343 |
-
timesteps_tensor = self.scheduler.timesteps.to(self.device)
|
344 |
-
|
345 |
-
# scale the initial noise by the standard deviation required by the scheduler
|
346 |
-
latents = latents * self.scheduler.init_noise_sigma
|
347 |
-
|
348 |
-
# prepare extra kwargs for the scheduler step, since not all schedulers have the same signature
|
349 |
-
# eta (η) is only used with the DDIMScheduler, it will be ignored for other schedulers.
|
350 |
-
# eta corresponds to η in DDIM paper: https://arxiv.org/abs/2010.02502
|
351 |
-
# and should be between [0, 1]
|
352 |
-
accepts_eta = "eta" in set(inspect.signature(self.scheduler.step).parameters.keys())
|
353 |
-
extra_step_kwargs = {}
|
354 |
-
if accepts_eta:
|
355 |
-
extra_step_kwargs["eta"] = eta
|
356 |
-
|
357 |
-
for i, t in enumerate(self.progress_bar(timesteps_tensor)):
|
358 |
-
# expand the latents if we are doing classifier free guidance
|
359 |
-
latent_model_input = torch.cat([latents] * 2) if do_classifier_free_guidance else latents
|
360 |
-
latent_model_input = self.scheduler.scale_model_input(latent_model_input, t)
|
361 |
-
|
362 |
-
# predict the noise residual
|
363 |
-
noise_pred = self.unet(latent_model_input, t, encoder_hidden_states=text_embeddings).sample
|
364 |
-
|
365 |
-
# perform guidance
|
366 |
-
if do_classifier_free_guidance:
|
367 |
-
noise_pred_uncond, noise_pred_text = noise_pred.chunk(2)
|
368 |
-
noise_pred = noise_pred_uncond + guidance_scale * (noise_pred_text - noise_pred_uncond)
|
369 |
-
|
370 |
-
# compute the previous noisy sample x_t -> x_t-1
|
371 |
-
latents = self.scheduler.step(noise_pred, t, latents, **extra_step_kwargs).prev_sample
|
372 |
-
|
373 |
-
# call the callback, if provided
|
374 |
-
if callback is not None and i % callback_steps == 0:
|
375 |
-
callback(i, t, latents)
|
376 |
-
|
377 |
-
latents = 1 / 0.18215 * latents
|
378 |
-
image = self.vae.decode(latents).sample
|
379 |
-
|
380 |
-
image = (image / 2 + 0.5).clamp(0, 1)
|
381 |
-
|
382 |
-
# we always cast to float32 as this does not cause significant overhead and is compatible with bfloat16
|
383 |
-
image = image.cpu().permute(0, 2, 3, 1).float().numpy()
|
384 |
-
|
385 |
-
if self.safety_checker is not None:
|
386 |
-
safety_checker_input = self.feature_extractor(self.numpy_to_pil(image), return_tensors="pt").to(
|
387 |
-
self.device
|
388 |
-
)
|
389 |
-
image, has_nsfw_concept = self.safety_checker(
|
390 |
-
images=image, clip_input=safety_checker_input.pixel_values.to(text_embeddings.dtype)
|
391 |
-
)
|
392 |
-
else:
|
393 |
-
has_nsfw_concept = None
|
394 |
-
|
395 |
-
if output_type == "pil":
|
396 |
-
image = self.numpy_to_pil(image)
|
397 |
-
|
398 |
-
if not return_dict:
|
399 |
-
return (image, has_nsfw_concept)
|
400 |
-
|
401 |
-
return StableDiffusionPipelineOutput(images=image, nsfw_content_detected=has_nsfw_concept)
|
402 |
-
|
403 |
-
def embed_text(self, text):
|
404 |
-
"""takes in text and turns it into text embeddings"""
|
405 |
-
text_input = self.tokenizer(
|
406 |
-
text,
|
407 |
-
padding="max_length",
|
408 |
-
max_length=self.tokenizer.model_max_length,
|
409 |
-
truncation=True,
|
410 |
-
return_tensors="pt",
|
411 |
-
)
|
412 |
-
with torch.no_grad():
|
413 |
-
embed = self.text_encoder(text_input.input_ids.to(self.device))[0]
|
414 |
-
return embed
|
415 |
-
|
416 |
-
def get_noise(self, seed, dtype=torch.float32, height=512, width=512):
|
417 |
-
"""Takes in random seed and returns corresponding noise vector"""
|
418 |
-
return torch.randn(
|
419 |
-
(1, self.unet.config.in_channels, height // 8, width // 8),
|
420 |
-
generator=torch.Generator(device=self.device).manual_seed(seed),
|
421 |
-
device=self.device,
|
422 |
-
dtype=dtype,
|
423 |
-
)
|
424 |
-
|
425 |
-
def walk(
|
426 |
-
self,
|
427 |
-
prompts: List[str],
|
428 |
-
seeds: List[int],
|
429 |
-
num_interpolation_steps: Optional[int] = 6,
|
430 |
-
output_dir: Optional[str] = "./dreams",
|
431 |
-
name: Optional[str] = None,
|
432 |
-
batch_size: Optional[int] = 1,
|
433 |
-
height: Optional[int] = 512,
|
434 |
-
width: Optional[int] = 512,
|
435 |
-
guidance_scale: Optional[float] = 7.5,
|
436 |
-
num_inference_steps: Optional[int] = 50,
|
437 |
-
eta: Optional[float] = 0.0,
|
438 |
-
) -> List[str]:
|
439 |
-
"""
|
440 |
-
Walks through a series of prompts and seeds, interpolating between them and saving the results to disk.
|
441 |
-
|
442 |
-
Args:
|
443 |
-
prompts (`List[str]`):
|
444 |
-
List of prompts to generate images for.
|
445 |
-
seeds (`List[int]`):
|
446 |
-
List of seeds corresponding to provided prompts. Must be the same length as prompts.
|
447 |
-
num_interpolation_steps (`int`, *optional*, defaults to 6):
|
448 |
-
Number of interpolation steps to take between prompts.
|
449 |
-
output_dir (`str`, *optional*, defaults to `./dreams`):
|
450 |
-
Directory to save the generated images to.
|
451 |
-
name (`str`, *optional*, defaults to `None`):
|
452 |
-
Subdirectory of `output_dir` to save the generated images to. If `None`, the name will
|
453 |
-
be the current time.
|
454 |
-
batch_size (`int`, *optional*, defaults to 1):
|
455 |
-
Number of images to generate at once.
|
456 |
-
height (`int`, *optional*, defaults to 512):
|
457 |
-
Height of the generated images.
|
458 |
-
width (`int`, *optional*, defaults to 512):
|
459 |
-
Width of the generated images.
|
460 |
-
guidance_scale (`float`, *optional*, defaults to 7.5):
|
461 |
-
Guidance scale as defined in [Classifier-Free Diffusion Guidance](https://arxiv.org/abs/2207.12598).
|
462 |
-
`guidance_scale` is defined as `w` of equation 2. of [Imagen
|
463 |
-
Paper](https://arxiv.org/pdf/2205.11487.pdf). Guidance scale is enabled by setting `guidance_scale >
|
464 |
-
1`. Higher guidance scale encourages to generate images that are closely linked to the text `prompt`,
|
465 |
-
usually at the expense of lower image quality.
|
466 |
-
num_inference_steps (`int`, *optional*, defaults to 50):
|
467 |
-
The number of denoising steps. More denoising steps usually lead to a higher quality image at the
|
468 |
-
expense of slower inference.
|
469 |
-
eta (`float`, *optional*, defaults to 0.0):
|
470 |
-
Corresponds to parameter eta (η) in the DDIM paper: https://arxiv.org/abs/2010.02502. Only applies to
|
471 |
-
[`schedulers.DDIMScheduler`], will be ignored for others.
|
472 |
-
|
473 |
-
Returns:
|
474 |
-
`List[str]`: List of paths to the generated images.
|
475 |
-
"""
|
476 |
-
if not len(prompts) == len(seeds):
|
477 |
-
raise ValueError(
|
478 |
-
f"Number of prompts and seeds must be equalGot {len(prompts)} prompts and {len(seeds)} seeds"
|
479 |
-
)
|
480 |
-
|
481 |
-
name = name or time.strftime("%Y%m%d-%H%M%S")
|
482 |
-
save_path = Path(output_dir) / name
|
483 |
-
save_path.mkdir(exist_ok=True, parents=True)
|
484 |
-
|
485 |
-
frame_idx = 0
|
486 |
-
frame_filepaths = []
|
487 |
-
for prompt_a, prompt_b, seed_a, seed_b in zip(prompts, prompts[1:], seeds, seeds[1:]):
|
488 |
-
# Embed Text
|
489 |
-
embed_a = self.embed_text(prompt_a)
|
490 |
-
embed_b = self.embed_text(prompt_b)
|
491 |
-
|
492 |
-
# Get Noise
|
493 |
-
noise_dtype = embed_a.dtype
|
494 |
-
noise_a = self.get_noise(seed_a, noise_dtype, height, width)
|
495 |
-
noise_b = self.get_noise(seed_b, noise_dtype, height, width)
|
496 |
-
|
497 |
-
noise_batch, embeds_batch = None, None
|
498 |
-
T = np.linspace(0.0, 1.0, num_interpolation_steps)
|
499 |
-
for i, t in enumerate(T):
|
500 |
-
noise = slerp(float(t), noise_a, noise_b)
|
501 |
-
embed = torch.lerp(embed_a, embed_b, t)
|
502 |
-
|
503 |
-
noise_batch = noise if noise_batch is None else torch.cat([noise_batch, noise], dim=0)
|
504 |
-
embeds_batch = embed if embeds_batch is None else torch.cat([embeds_batch, embed], dim=0)
|
505 |
-
|
506 |
-
batch_is_ready = embeds_batch.shape[0] == batch_size or i + 1 == T.shape[0]
|
507 |
-
if batch_is_ready:
|
508 |
-
outputs = self(
|
509 |
-
latents=noise_batch,
|
510 |
-
text_embeddings=embeds_batch,
|
511 |
-
height=height,
|
512 |
-
width=width,
|
513 |
-
guidance_scale=guidance_scale,
|
514 |
-
eta=eta,
|
515 |
-
num_inference_steps=num_inference_steps,
|
516 |
-
)
|
517 |
-
noise_batch, embeds_batch = None, None
|
518 |
-
|
519 |
-
for image in outputs["images"]:
|
520 |
-
frame_filepath = str(save_path / f"frame_{frame_idx:06d}.png")
|
521 |
-
image.save(frame_filepath)
|
522 |
-
frame_filepaths.append(frame_filepath)
|
523 |
-
frame_idx += 1
|
524 |
-
return frame_filepaths
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Androidonnxfork/CivitAi-to-Diffusers/diffusers/src/diffusers/pipelines/vq_diffusion/__init__.py
DELETED
@@ -1,10 +0,0 @@
|
|
1 |
-
from ...utils import OptionalDependencyNotAvailable, is_torch_available, is_transformers_available
|
2 |
-
|
3 |
-
|
4 |
-
try:
|
5 |
-
if not (is_transformers_available() and is_torch_available()):
|
6 |
-
raise OptionalDependencyNotAvailable()
|
7 |
-
except OptionalDependencyNotAvailable:
|
8 |
-
from ...utils.dummy_torch_and_transformers_objects import *
|
9 |
-
else:
|
10 |
-
from .pipeline_vq_diffusion import LearnedClassifierFreeSamplingEmbeddings, VQDiffusionPipeline
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Andy1621/uniformer_image_segmentation/configs/fcn/fcn_r101-d8_512x1024_40k_cityscapes.py
DELETED
@@ -1,2 +0,0 @@
|
|
1 |
-
_base_ = './fcn_r50-d8_512x1024_40k_cityscapes.py'
|
2 |
-
model = dict(pretrained='open-mmlab://resnet101_v1c', backbone=dict(depth=101))
|
|
|
|
|
|
spaces/Andy1621/uniformer_image_segmentation/configs/hrnet/fcn_hr18s_480x480_40k_pascal_context_59.py
DELETED
@@ -1,9 +0,0 @@
|
|
1 |
-
_base_ = './fcn_hr18_480x480_40k_pascal_context_59.py'
|
2 |
-
model = dict(
|
3 |
-
pretrained='open-mmlab://msra/hrnetv2_w18_small',
|
4 |
-
backbone=dict(
|
5 |
-
extra=dict(
|
6 |
-
stage1=dict(num_blocks=(2, )),
|
7 |
-
stage2=dict(num_blocks=(2, 2)),
|
8 |
-
stage3=dict(num_modules=3, num_blocks=(2, 2, 2)),
|
9 |
-
stage4=dict(num_modules=2, num_blocks=(2, 2, 2, 2)))))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/AnishKumbhar/ChatBot/text-generation-webui-main/extensions/openai/cache_embedding_model.py
DELETED
@@ -1,11 +0,0 @@
|
|
1 |
-
#!/usr/bin/env python3
|
2 |
-
# preload the embedding model, useful for Docker images to prevent re-download on config change
|
3 |
-
# Dockerfile:
|
4 |
-
# ENV OPENEDAI_EMBEDDING_MODEL=all-mpnet-base-v2 # Optional
|
5 |
-
# RUN python3 cache_embedded_model.py
|
6 |
-
import os
|
7 |
-
|
8 |
-
import sentence_transformers
|
9 |
-
|
10 |
-
st_model = os.environ.get("OPENEDAI_EMBEDDING_MODEL", "all-mpnet-base-v2")
|
11 |
-
model = sentence_transformers.SentenceTransformer(st_model)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/AnishKumbhar/ChatBot/text-generation-webui-main/start_windows.bat
DELETED
@@ -1,84 +0,0 @@
|
|
1 |
-
@echo off
|
2 |
-
|
3 |
-
cd /D "%~dp0"
|
4 |
-
|
5 |
-
set PATH=%PATH%;%SystemRoot%\system32
|
6 |
-
|
7 |
-
echo "%CD%"| findstr /C:" " >nul && echo This script relies on Miniconda which can not be silently installed under a path with spaces. && goto end
|
8 |
-
|
9 |
-
@rem Check for special characters in installation path
|
10 |
-
set "SPCHARMESSAGE="WARNING: Special characters were detected in the installation path!" " This can cause the installation to fail!""
|
11 |
-
echo "%CD%"| findstr /R /C:"[!#\$%&()\*+,;<=>?@\[\]\^`{|}~]" >nul && (
|
12 |
-
call :PrintBigMessage %SPCHARMESSAGE%
|
13 |
-
)
|
14 |
-
set SPCHARMESSAGE=
|
15 |
-
|
16 |
-
@rem fix failed install when installing to a separate drive
|
17 |
-
set TMP=%cd%\installer_files
|
18 |
-
set TEMP=%cd%\installer_files
|
19 |
-
|
20 |
-
@rem deactivate existing conda envs as needed to avoid conflicts
|
21 |
-
(call conda deactivate && call conda deactivate && call conda deactivate) 2>nul
|
22 |
-
|
23 |
-
@rem config
|
24 |
-
set INSTALL_DIR=%cd%\installer_files
|
25 |
-
set CONDA_ROOT_PREFIX=%cd%\installer_files\conda
|
26 |
-
set INSTALL_ENV_DIR=%cd%\installer_files\env
|
27 |
-
set MINICONDA_DOWNLOAD_URL=https://repo.anaconda.com/miniconda/Miniconda3-py310_23.3.1-0-Windows-x86_64.exe
|
28 |
-
set conda_exists=F
|
29 |
-
|
30 |
-
@rem figure out whether git and conda needs to be installed
|
31 |
-
call "%CONDA_ROOT_PREFIX%\_conda.exe" --version >nul 2>&1
|
32 |
-
if "%ERRORLEVEL%" EQU "0" set conda_exists=T
|
33 |
-
|
34 |
-
@rem (if necessary) install git and conda into a contained environment
|
35 |
-
@rem download conda
|
36 |
-
if "%conda_exists%" == "F" (
|
37 |
-
echo Downloading Miniconda from %MINICONDA_DOWNLOAD_URL% to %INSTALL_DIR%\miniconda_installer.exe
|
38 |
-
|
39 |
-
mkdir "%INSTALL_DIR%"
|
40 |
-
call curl -Lk "%MINICONDA_DOWNLOAD_URL%" > "%INSTALL_DIR%\miniconda_installer.exe" || ( echo. && echo Miniconda failed to download. && goto end )
|
41 |
-
|
42 |
-
echo Installing Miniconda to %CONDA_ROOT_PREFIX%
|
43 |
-
start /wait "" "%INSTALL_DIR%\miniconda_installer.exe" /InstallationType=JustMe /NoShortcuts=1 /AddToPath=0 /RegisterPython=0 /NoRegistry=1 /S /D=%CONDA_ROOT_PREFIX%
|
44 |
-
|
45 |
-
@rem test the conda binary
|
46 |
-
echo Miniconda version:
|
47 |
-
call "%CONDA_ROOT_PREFIX%\_conda.exe" --version || ( echo. && echo Miniconda not found. && goto end )
|
48 |
-
)
|
49 |
-
|
50 |
-
@rem create the installer env
|
51 |
-
if not exist "%INSTALL_ENV_DIR%" (
|
52 |
-
echo Packages to install: %PACKAGES_TO_INSTALL%
|
53 |
-
call "%CONDA_ROOT_PREFIX%\_conda.exe" create --no-shortcuts -y -k --prefix "%INSTALL_ENV_DIR%" python=3.10 || ( echo. && echo Conda environment creation failed. && goto end )
|
54 |
-
)
|
55 |
-
|
56 |
-
@rem check if conda environment was actually created
|
57 |
-
if not exist "%INSTALL_ENV_DIR%\python.exe" ( echo. && echo Conda environment is empty. && goto end )
|
58 |
-
|
59 |
-
@rem environment isolation
|
60 |
-
set PYTHONNOUSERSITE=1
|
61 |
-
set PYTHONPATH=
|
62 |
-
set PYTHONHOME=
|
63 |
-
set "CUDA_PATH=%INSTALL_ENV_DIR%"
|
64 |
-
set "CUDA_HOME=%CUDA_PATH%"
|
65 |
-
|
66 |
-
@rem activate installer env
|
67 |
-
call "%CONDA_ROOT_PREFIX%\condabin\conda.bat" activate "%INSTALL_ENV_DIR%" || ( echo. && echo Miniconda hook not found. && goto end )
|
68 |
-
|
69 |
-
@rem setup installer env
|
70 |
-
call python one_click.py %*
|
71 |
-
|
72 |
-
@rem below are functions for the script next line skips these during normal execution
|
73 |
-
goto end
|
74 |
-
|
75 |
-
:PrintBigMessage
|
76 |
-
echo. && echo.
|
77 |
-
echo *******************************************************************
|
78 |
-
for %%M in (%*) do echo * %%~M
|
79 |
-
echo *******************************************************************
|
80 |
-
echo. && echo.
|
81 |
-
exit /b
|
82 |
-
|
83 |
-
:end
|
84 |
-
pause
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/ArchitSharma/Digital-Photo-Color-Restoration/README.md
DELETED
@@ -1,13 +0,0 @@
|
|
1 |
-
---
|
2 |
-
title: Digital Photo Color Restoration
|
3 |
-
emoji: 📚
|
4 |
-
colorFrom: yellow
|
5 |
-
colorTo: blue
|
6 |
-
sdk: streamlit
|
7 |
-
sdk_version: 1.19.0
|
8 |
-
app_file: app.py
|
9 |
-
python_version: 3.9.17
|
10 |
-
pinned: false
|
11 |
-
---
|
12 |
-
|
13 |
-
Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/AriaMei/TTSdemo/text/cleaners.py
DELETED
@@ -1,177 +0,0 @@
|
|
1 |
-
import re
|
2 |
-
|
3 |
-
from text.japanese import japanese_to_romaji_with_accent, japanese_to_ipa, japanese_to_ipa2, japanese_to_ipa3
|
4 |
-
from text.mandarin import number_to_chinese, chinese_to_bopomofo, latin_to_bopomofo, chinese_to_romaji, chinese_to_lazy_ipa, chinese_to_ipa, chinese_to_ipa2
|
5 |
-
|
6 |
-
# from text.sanskrit import devanagari_to_ipa
|
7 |
-
# from text.english import english_to_lazy_ipa, english_to_ipa2, english_to_lazy_ipa2
|
8 |
-
# from text.thai import num_to_thai, latin_to_thai
|
9 |
-
# from text.shanghainese import shanghainese_to_ipa
|
10 |
-
# from text.cantonese import cantonese_to_ipa
|
11 |
-
# from text.ngu_dialect import ngu_dialect_to_ipa
|
12 |
-
|
13 |
-
|
14 |
-
def japanese_cleaners(text):
|
15 |
-
text = japanese_to_romaji_with_accent(text)
|
16 |
-
if re.match('[A-Za-z]', text[-1]):
|
17 |
-
text += '.'
|
18 |
-
return text
|
19 |
-
|
20 |
-
|
21 |
-
def japanese_cleaners2(text):
|
22 |
-
return japanese_cleaners(text).replace('ts', 'ʦ').replace('...', '…')
|
23 |
-
|
24 |
-
|
25 |
-
def korean_cleaners(text):
|
26 |
-
'''Pipeline for Korean text'''
|
27 |
-
text = latin_to_hangul(text)
|
28 |
-
text = number_to_hangul(text)
|
29 |
-
text = divide_hangul(text)
|
30 |
-
if re.match('[\u3131-\u3163]', text[-1]):
|
31 |
-
text += '.'
|
32 |
-
return text
|
33 |
-
|
34 |
-
|
35 |
-
def chinese_cleaners(text):
|
36 |
-
'''Pipeline for Chinese text'''
|
37 |
-
text = number_to_chinese(text)
|
38 |
-
text = chinese_to_bopomofo(text)
|
39 |
-
text = latin_to_bopomofo(text)
|
40 |
-
if re.match('[ˉˊˇˋ˙]', text[-1]):
|
41 |
-
text += '。'
|
42 |
-
return text
|
43 |
-
|
44 |
-
|
45 |
-
def zh_ja_mixture_cleaners(text):
|
46 |
-
chinese_texts = re.findall(r'\[ZH\].*?\[ZH\]', text)
|
47 |
-
japanese_texts = re.findall(r'\[JA\].*?\[JA\]', text)
|
48 |
-
for chinese_text in chinese_texts:
|
49 |
-
cleaned_text = chinese_to_romaji(chinese_text[4:-4])
|
50 |
-
text = text.replace(chinese_text, cleaned_text+' ', 1)
|
51 |
-
for japanese_text in japanese_texts:
|
52 |
-
cleaned_text = japanese_to_romaji_with_accent(
|
53 |
-
japanese_text[4:-4]).replace('ts', 'ʦ').replace('u', 'ɯ').replace('...', '…')
|
54 |
-
text = text.replace(japanese_text, cleaned_text+' ', 1)
|
55 |
-
text = text[:-1]
|
56 |
-
if re.match('[A-Za-zɯɹəɥ→↓↑]', text[-1]):
|
57 |
-
text += '.'
|
58 |
-
return text
|
59 |
-
|
60 |
-
|
61 |
-
def sanskrit_cleaners(text):
|
62 |
-
text = text.replace('॥', '।').replace('ॐ', 'ओम्')
|
63 |
-
if text[-1] != '।':
|
64 |
-
text += ' ।'
|
65 |
-
return text
|
66 |
-
|
67 |
-
|
68 |
-
def cjks_cleaners(text):
|
69 |
-
chinese_texts = re.findall(r'\[ZH\].*?\[ZH\]', text)
|
70 |
-
japanese_texts = re.findall(r'\[JA\].*?\[JA\]', text)
|
71 |
-
korean_texts = re.findall(r'\[KO\].*?\[KO\]', text)
|
72 |
-
sanskrit_texts = re.findall(r'\[SA\].*?\[SA\]', text)
|
73 |
-
english_texts = re.findall(r'\[EN\].*?\[EN\]', text)
|
74 |
-
for chinese_text in chinese_texts:
|
75 |
-
cleaned_text = chinese_to_lazy_ipa(chinese_text[4:-4])
|
76 |
-
text = text.replace(chinese_text, cleaned_text+' ', 1)
|
77 |
-
for japanese_text in japanese_texts:
|
78 |
-
cleaned_text = japanese_to_ipa(japanese_text[4:-4])
|
79 |
-
text = text.replace(japanese_text, cleaned_text+' ', 1)
|
80 |
-
for korean_text in korean_texts:
|
81 |
-
cleaned_text = korean_to_lazy_ipa(korean_text[4:-4])
|
82 |
-
text = text.replace(korean_text, cleaned_text+' ', 1)
|
83 |
-
for sanskrit_text in sanskrit_texts:
|
84 |
-
cleaned_text = devanagari_to_ipa(sanskrit_text[4:-4])
|
85 |
-
text = text.replace(sanskrit_text, cleaned_text+' ', 1)
|
86 |
-
for english_text in english_texts:
|
87 |
-
cleaned_text = english_to_lazy_ipa(english_text[4:-4])
|
88 |
-
text = text.replace(english_text, cleaned_text+' ', 1)
|
89 |
-
text = text[:-1]
|
90 |
-
if re.match(r'[^\.,!\?\-…~]', text[-1]):
|
91 |
-
text += '.'
|
92 |
-
return text
|
93 |
-
|
94 |
-
|
95 |
-
def cjke_cleaners(text):
|
96 |
-
chinese_texts = re.findall(r'\[ZH\].*?\[ZH\]', text)
|
97 |
-
japanese_texts = re.findall(r'\[JA\].*?\[JA\]', text)
|
98 |
-
korean_texts = re.findall(r'\[KO\].*?\[KO\]', text)
|
99 |
-
english_texts = re.findall(r'\[EN\].*?\[EN\]', text)
|
100 |
-
for chinese_text in chinese_texts:
|
101 |
-
cleaned_text = chinese_to_lazy_ipa(chinese_text[4:-4])
|
102 |
-
cleaned_text = cleaned_text.replace(
|
103 |
-
'ʧ', 'tʃ').replace('ʦ', 'ts').replace('ɥan', 'ɥæn')
|
104 |
-
text = text.replace(chinese_text, cleaned_text+' ', 1)
|
105 |
-
for japanese_text in japanese_texts:
|
106 |
-
cleaned_text = japanese_to_ipa(japanese_text[4:-4])
|
107 |
-
cleaned_text = cleaned_text.replace('ʧ', 'tʃ').replace(
|
108 |
-
'ʦ', 'ts').replace('ɥan', 'ɥæn').replace('ʥ', 'dz')
|
109 |
-
text = text.replace(japanese_text, cleaned_text+' ', 1)
|
110 |
-
for korean_text in korean_texts:
|
111 |
-
cleaned_text = korean_to_ipa(korean_text[4:-4])
|
112 |
-
text = text.replace(korean_text, cleaned_text+' ', 1)
|
113 |
-
for english_text in english_texts:
|
114 |
-
cleaned_text = english_to_ipa2(english_text[4:-4])
|
115 |
-
cleaned_text = cleaned_text.replace('ɑ', 'a').replace(
|
116 |
-
'ɔ', 'o').replace('ɛ', 'e').replace('ɪ', 'i').replace('ʊ', 'u')
|
117 |
-
text = text.replace(english_text, cleaned_text+' ', 1)
|
118 |
-
text = text[:-1]
|
119 |
-
if re.match(r'[^\.,!\?\-…~]', text[-1]):
|
120 |
-
text += '.'
|
121 |
-
return text
|
122 |
-
|
123 |
-
|
124 |
-
def cjke_cleaners2(text):
|
125 |
-
chinese_texts = re.findall(r'\[ZH\].*?\[ZH\]', text)
|
126 |
-
japanese_texts = re.findall(r'\[JA\].*?\[JA\]', text)
|
127 |
-
korean_texts = re.findall(r'\[KO\].*?\[KO\]', text)
|
128 |
-
english_texts = re.findall(r'\[EN\].*?\[EN\]', text)
|
129 |
-
for chinese_text in chinese_texts:
|
130 |
-
cleaned_text = chinese_to_ipa(chinese_text[4:-4])
|
131 |
-
text = text.replace(chinese_text, cleaned_text+' ', 1)
|
132 |
-
for japanese_text in japanese_texts:
|
133 |
-
cleaned_text = japanese_to_ipa2(japanese_text[4:-4])
|
134 |
-
text = text.replace(japanese_text, cleaned_text+' ', 1)
|
135 |
-
for korean_text in korean_texts:
|
136 |
-
cleaned_text = korean_to_ipa(korean_text[4:-4])
|
137 |
-
text = text.replace(korean_text, cleaned_text+' ', 1)
|
138 |
-
for english_text in english_texts:
|
139 |
-
cleaned_text = english_to_ipa2(english_text[4:-4])
|
140 |
-
text = text.replace(english_text, cleaned_text+' ', 1)
|
141 |
-
text = text[:-1]
|
142 |
-
if re.match(r'[^\.,!\?\-…~]', text[-1]):
|
143 |
-
text += '.'
|
144 |
-
return text
|
145 |
-
|
146 |
-
|
147 |
-
def thai_cleaners(text):
|
148 |
-
text = num_to_thai(text)
|
149 |
-
text = latin_to_thai(text)
|
150 |
-
return text
|
151 |
-
|
152 |
-
|
153 |
-
def shanghainese_cleaners(text):
|
154 |
-
text = shanghainese_to_ipa(text)
|
155 |
-
if re.match(r'[^\.,!\?\-…~]', text[-1]):
|
156 |
-
text += '.'
|
157 |
-
return text
|
158 |
-
|
159 |
-
|
160 |
-
def chinese_dialect_cleaners(text):
|
161 |
-
text = re.sub(r'\[MD\](.*?)\[MD\]',
|
162 |
-
lambda x: chinese_to_ipa2(x.group(1))+' ', text)
|
163 |
-
text = re.sub(r'\[TW\](.*?)\[TW\]',
|
164 |
-
lambda x: chinese_to_ipa2(x.group(1), True)+' ', text)
|
165 |
-
text = re.sub(r'\[JA\](.*?)\[JA\]',
|
166 |
-
lambda x: japanese_to_ipa3(x.group(1)).replace('Q', 'ʔ')+' ', text)
|
167 |
-
text = re.sub(r'\[SH\](.*?)\[SH\]', lambda x: shanghainese_to_ipa(x.group(1)).replace('1', '˥˧').replace('5',
|
168 |
-
'˧˧˦').replace('6', '˩˩˧').replace('7', '˥').replace('8', '˩˨').replace('ᴀ', 'ɐ').replace('ᴇ', 'e')+' ', text)
|
169 |
-
text = re.sub(r'\[GD\](.*?)\[GD\]',
|
170 |
-
lambda x: cantonese_to_ipa(x.group(1))+' ', text)
|
171 |
-
text = re.sub(r'\[EN\](.*?)\[EN\]',
|
172 |
-
lambda x: english_to_lazy_ipa2(x.group(1))+' ', text)
|
173 |
-
text = re.sub(r'\[([A-Z]{2})\](.*?)\[\1\]', lambda x: ngu_dialect_to_ipa(x.group(2), x.group(
|
174 |
-
1)).replace('ʣ', 'dz').replace('ʥ', 'dʑ').replace('ʦ', 'ts').replace('ʨ', 'tɕ')+' ', text)
|
175 |
-
text = re.sub(r'\s+$', '', text)
|
176 |
-
text = re.sub(r'([^\.,!\?\-…~])$', r'\1.', text)
|
177 |
-
return text
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Ariharasudhan/YoloV5/utils/aws/userdata.sh
DELETED
@@ -1,27 +0,0 @@
|
|
1 |
-
#!/bin/bash
|
2 |
-
# AWS EC2 instance startup script https://docs.aws.amazon.com/AWSEC2/latest/UserGuide/user-data.html
|
3 |
-
# This script will run only once on first instance start (for a re-start script see mime.sh)
|
4 |
-
# /home/ubuntu (ubuntu) or /home/ec2-user (amazon-linux) is working dir
|
5 |
-
# Use >300 GB SSD
|
6 |
-
|
7 |
-
cd home/ubuntu
|
8 |
-
if [ ! -d yolov5 ]; then
|
9 |
-
echo "Running first-time script." # install dependencies, download COCO, pull Docker
|
10 |
-
git clone https://github.com/ultralytics/yolov5 -b master && sudo chmod -R 777 yolov5
|
11 |
-
cd yolov5
|
12 |
-
bash data/scripts/get_coco.sh && echo "COCO done." &
|
13 |
-
sudo docker pull ultralytics/yolov5:latest && echo "Docker done." &
|
14 |
-
python -m pip install --upgrade pip && pip install -r requirements.txt && python detect.py && echo "Requirements done." &
|
15 |
-
wait && echo "All tasks done." # finish background tasks
|
16 |
-
else
|
17 |
-
echo "Running re-start script." # resume interrupted runs
|
18 |
-
i=0
|
19 |
-
list=$(sudo docker ps -qa) # container list i.e. $'one\ntwo\nthree\nfour'
|
20 |
-
while IFS= read -r id; do
|
21 |
-
((i++))
|
22 |
-
echo "restarting container $i: $id"
|
23 |
-
sudo docker start $id
|
24 |
-
# sudo docker exec -it $id python train.py --resume # single-GPU
|
25 |
-
sudo docker exec -d $id python utils/aws/resume.py # multi-scenario
|
26 |
-
done <<<"$list"
|
27 |
-
fi
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/AtlasUnified/DeforumPromptGenerator/app.py
DELETED
@@ -1,33 +0,0 @@
|
|
1 |
-
import gradio as gr
|
2 |
-
|
3 |
-
def generate_sequence(frames_per_second, seconds_per_prompt, *main_prompts):
|
4 |
-
sequence_count = int(frames_per_second) * int(seconds_per_prompt)
|
5 |
-
output = {}
|
6 |
-
|
7 |
-
for prompt_index, main_prompt in enumerate(main_prompts):
|
8 |
-
if main_prompt: # Check if the field has information
|
9 |
-
prompts = main_prompt.split(',')
|
10 |
-
for i, prompt in enumerate(prompts):
|
11 |
-
output[str(prompt_index * sequence_count + i * sequence_count)] = prompt.strip()
|
12 |
-
|
13 |
-
return output
|
14 |
-
|
15 |
-
def stringify_output(output_dict):
|
16 |
-
output_items = [f'"{k}": "{v}"' for k, v in output_dict.items()]
|
17 |
-
return ',\n'.join(output_items)
|
18 |
-
|
19 |
-
frames_per_second = gr.Number(label="Frames per second")
|
20 |
-
seconds_per_prompt = gr.Number(label="Seconds per prompt")
|
21 |
-
|
22 |
-
main_prompts = [gr.Textbox(lines=2, label=f"Main prompt {i+1} (comma-separated)") for i in range(10)]
|
23 |
-
|
24 |
-
output = gr.Textbox(label="Output")
|
25 |
-
|
26 |
-
iface = gr.Interface(
|
27 |
-
fn=lambda fps, spp, *mp: stringify_output(generate_sequence(fps, spp, *mp)),
|
28 |
-
inputs=[frames_per_second, seconds_per_prompt, *main_prompts],
|
29 |
-
outputs=output,
|
30 |
-
title="Deforum Prompt Generator"
|
31 |
-
)
|
32 |
-
|
33 |
-
iface.launch()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/BartPoint/VoiceChange/util.py
DELETED
@@ -1,81 +0,0 @@
|
|
1 |
-
import sys
|
2 |
-
import asyncio
|
3 |
-
from io import BytesIO
|
4 |
-
|
5 |
-
from fairseq import checkpoint_utils
|
6 |
-
|
7 |
-
import torch
|
8 |
-
|
9 |
-
import edge_tts
|
10 |
-
import librosa
|
11 |
-
|
12 |
-
|
13 |
-
# https://github.com/fumiama/Retrieval-based-Voice-Conversion-WebUI/blob/main/config.py#L43-L55 # noqa
|
14 |
-
def has_mps() -> bool:
|
15 |
-
if sys.platform != "darwin":
|
16 |
-
return False
|
17 |
-
else:
|
18 |
-
if not getattr(torch, 'has_mps', False):
|
19 |
-
return False
|
20 |
-
|
21 |
-
try:
|
22 |
-
torch.zeros(1).to(torch.device("mps"))
|
23 |
-
return True
|
24 |
-
except Exception:
|
25 |
-
return False
|
26 |
-
|
27 |
-
|
28 |
-
def is_half(device: str) -> bool:
|
29 |
-
if not device.startswith('cuda'):
|
30 |
-
return False
|
31 |
-
else:
|
32 |
-
gpu_name = torch.cuda.get_device_name(
|
33 |
-
int(device.split(':')[-1])
|
34 |
-
).upper()
|
35 |
-
|
36 |
-
# ...regex?
|
37 |
-
if (
|
38 |
-
('16' in gpu_name and 'V100' not in gpu_name)
|
39 |
-
or 'P40' in gpu_name
|
40 |
-
or '1060' in gpu_name
|
41 |
-
or '1070' in gpu_name
|
42 |
-
or '1080' in gpu_name
|
43 |
-
):
|
44 |
-
return False
|
45 |
-
|
46 |
-
return True
|
47 |
-
|
48 |
-
|
49 |
-
def load_hubert_model(device: str, model_path: str = 'hubert_base.pt'):
|
50 |
-
model = checkpoint_utils.load_model_ensemble_and_task(
|
51 |
-
[model_path]
|
52 |
-
)[0][0].to(device)
|
53 |
-
|
54 |
-
if is_half(device):
|
55 |
-
return model.half()
|
56 |
-
else:
|
57 |
-
return model.float()
|
58 |
-
|
59 |
-
|
60 |
-
async def call_edge_tts(speaker_name: str, text: str):
|
61 |
-
tts_com = edge_tts.Communicate(text, speaker_name)
|
62 |
-
tts_raw = b''
|
63 |
-
|
64 |
-
# Stream TTS audio to bytes
|
65 |
-
async for chunk in tts_com.stream():
|
66 |
-
if chunk['type'] == 'audio':
|
67 |
-
tts_raw += chunk['data']
|
68 |
-
|
69 |
-
# Convert mp3 stream to wav
|
70 |
-
ffmpeg_proc = await asyncio.create_subprocess_exec(
|
71 |
-
'ffmpeg',
|
72 |
-
'-f', 'mp3',
|
73 |
-
'-i', '-',
|
74 |
-
'-f', 'wav',
|
75 |
-
'-',
|
76 |
-
stdin=asyncio.subprocess.PIPE,
|
77 |
-
stdout=asyncio.subprocess.PIPE
|
78 |
-
)
|
79 |
-
(tts_wav, _) = await ffmpeg_proc.communicate(tts_raw)
|
80 |
-
|
81 |
-
return librosa.load(BytesIO(tts_wav))
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Benson/text-generation/Examples/Descargar Apk Mod Hello Neighbor.md
DELETED
@@ -1,63 +0,0 @@
|
|
1 |
-
<br />
|
2 |
-
<h1>Descargar APK Mod Hola vecino: Cómo colarse en la casa de su vecino con recursos ilimitados</h1>
|
3 |
-
<p>¿Alguna vez te has preguntado qué esconde tu vecino en su sótano? ¿Tienes el coraje y las habilidades para colarte en su casa y averiguarlo? Si eres un fan de los juegos de terror sigilosos, es posible que hayas oído hablar de <strong>Hello Neighbor</strong>, un juego popular que te desafía a ser más astuto que una IA avanzada que aprende de cada movimiento. Pero ¿qué pasa si quieres tener más diversión y libertad en el juego? ¿Qué pasa si desea acceder a todos los niveles, secretos y artículos sin pasar horas o dinero? En este artículo, le mostraremos cómo <strong>download APK mod Hello Neighbor</strong>, una versión modificada del juego que le da recursos ilimitados y trucos. Pero antes de hacer eso, vamos a averiguar más sobre el juego en sí y lo que es un mod APK. </p>
|
4 |
-
<h2>descargar apk mod hello neighbor</h2><br /><p><b><b>Download</b> ---> <a href="https://bltlly.com/2v6L2L">https://bltlly.com/2v6L2L</a></b></p><br /><br />
|
5 |
-
<h2>¿Qué es Hello Neighbor? </h2>
|
6 |
-
<p>Hello Neighbor es un juego de terror oculto que fue lanzado en 2017 por Dynamic Pixels y tinyBuild. El juego está disponible para Windows, Xbox One, PlayStation 4, Nintendo Switch, iOS, Android y Stadia. El juego también ha generado varios spin-offs, como Secret Neighbor, Hello Neighbor Hide and Seek, Hello Engineer y Hello Guest.</p>
|
7 |
-
<h3>Un juego de terror sigiloso con una IA avanzada</h3>
|
8 |
-
<p>La premisa principal de Hello Neighbor es que eres un chico curioso que quiere colarse en la casa de tu vecino y descubrir lo que está escondiendo en su sótano. Sin embargo, su vecino no es una persona amigable o normal. Es un hombre misterioso y espeluznante que hará cualquier cosa para evitar que entre en su casa. Él pondrá trampas, te perseguirá, e incluso aprenderá de tus acciones. El juego cuenta con una IA avanzada que se adapta a tu comportamiento y crea nuevos obstáculos y desafíos para ti. Tendrás que usar sigilo, estrategia y creatividad para evitar la detección y alcanzar tu objetivo. </p>
|
9 |
-
<h3>Un juego estilo sandbox con interacción ambiental y física</h3>
|
10 |
-
|
11 |
-
<h3>Una serie de juegos y spin-offs establecidos en el universo Hello Neighbor</h3>
|
12 |
-
<p>Hola vecino no es solo un juego. Se trata de una serie de juegos y spin-offs que amplían el universo de Hello Neighbor y ofrecen diferentes perspectivas y experiencias. Algunos de los juegos y spin-offs son: - <strong>Secret Neighbor</strong>: Un juego multijugador de terror social que enfrenta a un grupo de niños contra uno de ellos que es secretamente el vecino disfrazado. El juego se desarrolla entre el Acto 1 y el Acto 2 de Hello Neighbor. - <strong>Hello Neighbor Hide and Seek</strong>: Una precuela de Hello Neighbor que revela la trágica historia de fondo del vecino y su familia. El juego es un juego de sigilo que simula un juego de escondidas entre los hijos del vecino. - <strong>Hello Engineer</strong>: Un juego spin-off que se centra en la construcción y elaboración en lugar de sigilo y horror. El juego se desarrolla en un parque de atracciones abandonado donde tienes que usar materiales de desecho y herramientas para crear máquinas y vehículos. - <strong>Hello Guest</strong>: Una secuela de Hello Neighbor que sigue a un nuevo protagonista que trabaja como guardia nocturno en el mismo parque de diversiones. El juego es un juego de terror sigiloso que introduce un nuevo enemigo, el Invitado, que te acosa y acosa. <h2>¿Qué es APK Mod? </h2>
|
13 |
-
<p>APK Mod es un término que se refiere a una versión modificada de una aplicación Android. APK significa Android Package Kit, que es el formato de archivo utilizado por los dispositivos Android para instalar y distribuir aplicaciones. Mod significa modificación, lo que significa que el archivo APK original ha sido alterado o hackeado para cambiar algunos aspectos de la aplicación. </p>
|
14 |
-
<h3>Una versión modificada de una aplicación Android</h3>
|
15 |
-
|
16 |
-
<h3>Una forma de acceder a funciones premium, recursos ilimitados o trucos</h3>
|
17 |
-
<p>Una de las principales razones por las que la gente descarga mods APK es acceder a características premium, recursos ilimitados, o trucos que no están disponibles en la versión original de la aplicación. Por ejemplo, pueden desbloquear todos los niveles, personajes, objetos, armas, etc. También pueden obtener monedas, gemas, vidas, salud, etc. También pueden usar trucos como invencibilidad, hackeo de velocidad, teletransportación, etc.</p>
|
18 |
-
<h3>Un riesgo potencial de malware, virus o problemas legales</h3>
|
19 |
-
<p>Sin embargo, la descarga de mods APK no está exenta de riesgos. Hay muchas fuentes de archivos APK modificados en Internet, pero no todos ellos son confiables o seguros. Algunos de ellos pueden contener malware, virus, spyware u otro software dañino que puede dañar su dispositivo o comprometer sus datos. Algunos de ellos también pueden violar los términos de servicio o los derechos de propiedad intelectual de los desarrolladores originales o editores de la aplicación. Esto puede resultar en problemas legales o prohibiciones de usar la aplicación. </p>
|
20 |
-
<p></p>
|
21 |
-
<h2>Cómo descargar APK mod hola vecino? </h2>
|
22 |
-
<p>Si desea descargar APK mod Hello Neighbor, tendrá que seguir estos pasos:</p>
|
23 |
-
<h3>Encontrar una fuente confiable de archivos APK modded</h3>
|
24 |
-
<p>El primer paso es encontrar una fuente confiable de archivos APK modded que ofrecen mod APK Hello Neighbor. Puede buscar en línea para sitios web o foros que proporcionan enlaces o descargas de archivos APK modded. Sin embargo, debe tener cuidado y hacer algunas investigaciones antes de descargar nada de fuentes desconocidas. Usted debe comprobar las revisiones, calificaciones, comentarios y comentarios de otros usuarios que han descargado el mismo archivo. También debe escanear el archivo con un software antivirus o anti-malware antes de instalarlo. </p>
|
25 |
-
<h3>Habilitar fuentes desconocidas en la configuración del dispositivo</h3>
|
26 |
-
|
27 |
-
<h3>Instalar el archivo APK y disfrutar del juego</h3>
|
28 |
-
<p>El tercer paso es instalar el archivo APK y disfrutar del juego. Tendrá que localizar el archivo APK descargado en el almacenamiento del dispositivo y toque en él para iniciar el proceso de instalación. Es posible que tenga que seguir algunas instrucciones o aceptar algunos términos y condiciones antes de completar la instalación. Una vez realizada la instalación, puedes iniciar el juego y disfrutar jugando con recursos y trucos ilimitados. </p>
|
29 |
-
<h2>¿Cuáles son los beneficios de descargar APK Mod Hello Neighbor? </h2>
|
30 |
-
|
31 |
-
<p>Sin embargo, descargar APK mod Hello Neighbor también puede tener algunos inconvenientes para algunos jugadores que quieren disfrutar de la experiencia original y auténtica del juego. Algunos de los inconvenientes son: - <strong>Riesgo de dañar su dispositivo o comprometer sus datos</strong>: Como se mencionó anteriormente, descargar APK mod Hello Neighbor de fuentes desconocidas puede exponer su dispositivo o datos a malware, virus, spyware u otro software dañino. Estos pueden dañar su dispositivo o comprometer sus datos al eliminarlos, robarlos o cifrarlos. También pueden causar que el dispositivo falle, se bloquee o se sobrecaliente. También pueden acceder a su información personal, como sus contactos, mensajes, fotos, etc., y usarlas con fines maliciosos. - <strong>Riesgo de violación de los términos de servicio o los derechos de propiedad intelectual de los desarrolladores</strong>: Descarga APK mod Hello Neighbor también puede violar los términos de servicio o los derechos de propiedad intelectual de los desarrolladores originales o editores del juego. Estas son las reglas y regulaciones que aceptas cuando descargas o juegas el juego desde la fuente oficial. Al descargar APK mod Hola Vecino, usted está rompiendo estas reglas y reglamentos y falta de respeto a los creadores y propietarios del juego. Esto puede resultar en problemas legales o prohibiciones de usar el juego u otros servicios de los desarrolladores o editores. - <strong>Riesgo de perder el encanto original y el desafío del juego</strong>: Descarga mod APK Hola vecino también puede perder el encanto original y el desafío del juego. El juego está diseñado para ser un juego de terror sigiloso que pone a prueba tus habilidades y nervios contra una IA avanzada que aprende de tus acciones. El juego también está diseñado para ser un juego al estilo sandbox que te anima a experimentar y ser creativo con tu enfoque. Al descargar APK mod Hello Neighbor, estás cambiando estos aspectos del juego y haciéndolo más fácil y menos inmersivo. Usted también está perdiendo la satisfacción y la recompensa de completar
|
32 |
-
|
33 |
-
<p>En conclusión, descargar APK mod Hello Neighbor es una elección personal que depende de qué tipo de jugador eres y qué tipo de experiencia quieres tener en el juego. Si quieres tener más diversión y libertad en el juego, se puede descargar APK mod Hello Neighbor y disfrutar de jugar con recursos ilimitados y trucos. Sin embargo, debe ser consciente de los riesgos y desventajas de hacerlo, como dañar su dispositivo o datos, violar los términos de servicio o los derechos de propiedad intelectual de los desarrolladores, o perder el encanto original y el desafío del juego. Si quieres disfrutar del juego como está destinado a ser jugado, puedes descargar Hello Neighbor desde la fuente oficial y respetar a los creadores y otros jugadores. La elección es tuya, pero lo que elijas, diviértete y sé seguro. </p>
|
34 |
-
<h3>Preguntas frecuentes</h3>
|
35 |
-
<p>Aquí hay algunas preguntas frecuentes sobre la descarga de APK mod Hola vecino:</p>
|
36 |
-
<tabla>
|
37 |
-
<tr>
|
38 |
-
<th>Pregunta</th>
|
39 |
-
<th>Respuesta</th>
|
40 |
-
</tr>
|
41 |
-
<tr>
|
42 |
-
<td>¿Dónde puedo descargar APK mod Hello Neighbor? </td>
|
43 |
-
<td>Puedes buscar en línea sitios web o foros que proporcionan enlaces o descargas para archivos APK modificados. Sin embargo, debe tener cuidado y hacer algunas investigaciones antes de descargar cualquier cosa de fuentes desconocidas. </td>
|
44 |
-
</tr>
|
45 |
-
<tr>
|
46 |
-
<td>¿Cómo puedo instalar APK mod Hello Neighbor? </td>
|
47 |
-
<td>Tendrá que habilitar fuentes desconocidas en la configuración del dispositivo, localizar el archivo APK descargado en el almacenamiento del dispositivo, y toque en él para iniciar el proceso de instalación. </td>
|
48 |
-
</tr>
|
49 |
-
<tr>
|
50 |
-
<td>¿Cuáles son algunas características de APK mod Hello Neighbor? </td>
|
51 |
-
<td>Algunas características de APK mod Hola Vecino están desbloqueando todos los niveles y secretos, conseguir artículos ilimitados, monedas, y la salud, y la personalización de su personaje y el juego. </td>
|
52 |
-
</tr>
|
53 |
-
<tr>
|
54 |
-
<td>¿Cuáles son algunos de los riesgos de descargar APK mod Hello Neighbor? </td>
|
55 |
-
|
56 |
-
</tr>
|
57 |
-
<tr>
|
58 |
-
<td>¿Es legal descargar APK mod Hello Neighbor? </td>
|
59 |
-
<td>Descargar APK mod Hello Neighbor puede no ser legal en algunos países o regiones, ya que puede violar los términos de servicio o los derechos de propiedad intelectual de los desarrolladores originales o editores del juego. Usted debe comprobar las leyes y regulaciones en su área antes de descargar nada de fuentes desconocidas. </td>
|
60 |
-
</tr>
|
61 |
-
</tabla></p> 64aa2da5cf<br />
|
62 |
-
<br />
|
63 |
-
<br />
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/CVPR/LIVE/thrust/thrust/iterator/transform_iterator.h
DELETED
@@ -1,356 +0,0 @@
|
|
1 |
-
/*
|
2 |
-
* Copyright 2008-2013 NVIDIA Corporation
|
3 |
-
*
|
4 |
-
* Licensed under the Apache License, Version 2.0 (the "License");
|
5 |
-
* you may not use this file except in compliance with the License.
|
6 |
-
* You may obtain a copy of the License at
|
7 |
-
*
|
8 |
-
* http://www.apache.org/licenses/LICENSE-2.0
|
9 |
-
*
|
10 |
-
* Unless required by applicable law or agreed to in writing, software
|
11 |
-
* distributed under the License is distributed on an "AS IS" BASIS,
|
12 |
-
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
13 |
-
* See the License for the specific language governing permissions and
|
14 |
-
* limitations under the License.
|
15 |
-
*/
|
16 |
-
|
17 |
-
|
18 |
-
/*! \file thrust/iterator/transform_iterator.h
|
19 |
-
* \brief An iterator which adapts another iterator by applying a function to the result of its dereference
|
20 |
-
*/
|
21 |
-
|
22 |
-
/*
|
23 |
-
* (C) Copyright David Abrahams 2002.
|
24 |
-
* (C) Copyright Jeremy Siek 2002.
|
25 |
-
* (C) Copyright Thomas Witt 2002.
|
26 |
-
*
|
27 |
-
* Distributed under the Boost Software License, Version 1.0.
|
28 |
-
* (See accompanying NOTICE file for the complete license)
|
29 |
-
*
|
30 |
-
* For more information, see http://www.boost.org
|
31 |
-
*/
|
32 |
-
|
33 |
-
#pragma once
|
34 |
-
|
35 |
-
#include <thrust/detail/config.h>
|
36 |
-
|
37 |
-
// #include the details first
|
38 |
-
#include <thrust/iterator/detail/transform_iterator.inl>
|
39 |
-
#include <thrust/iterator/iterator_facade.h>
|
40 |
-
#include <thrust/iterator/iterator_traits.h>
|
41 |
-
#include <thrust/detail/type_traits.h>
|
42 |
-
|
43 |
-
namespace thrust
|
44 |
-
{
|
45 |
-
|
46 |
-
/*! \addtogroup iterators
|
47 |
-
* \{
|
48 |
-
*/
|
49 |
-
|
50 |
-
/*! \addtogroup fancyiterator Fancy Iterators
|
51 |
-
* \ingroup iterators
|
52 |
-
* \{
|
53 |
-
*/
|
54 |
-
|
55 |
-
/*! \p transform_iterator is an iterator which represents a pointer into a range
|
56 |
-
* of values after transformation by a function. This iterator is useful for
|
57 |
-
* creating a range filled with the result of applying an operation to another range
|
58 |
-
* without either explicitly storing it in memory, or explicitly executing the transformation.
|
59 |
-
* Using \p transform_iterator facilitates kernel fusion by deferring the execution
|
60 |
-
* of a transformation until the value is needed while saving both memory capacity
|
61 |
-
* and bandwidth.
|
62 |
-
*
|
63 |
-
* The following code snippet demonstrates how to create a \p transform_iterator
|
64 |
-
* which represents the result of \c sqrtf applied to the contents of a \p device_vector.
|
65 |
-
*
|
66 |
-
* \code
|
67 |
-
* #include <thrust/iterator/transform_iterator.h>
|
68 |
-
* #include <thrust/device_vector.h>
|
69 |
-
*
|
70 |
-
* // note: functor inherits from unary_function
|
71 |
-
* struct square_root : public thrust::unary_function<float,float>
|
72 |
-
* {
|
73 |
-
* __host__ __device__
|
74 |
-
* float operator()(float x) const
|
75 |
-
* {
|
76 |
-
* return sqrtf(x);
|
77 |
-
* }
|
78 |
-
* };
|
79 |
-
*
|
80 |
-
* int main()
|
81 |
-
* {
|
82 |
-
* thrust::device_vector<float> v(4);
|
83 |
-
* v[0] = 1.0f;
|
84 |
-
* v[1] = 4.0f;
|
85 |
-
* v[2] = 9.0f;
|
86 |
-
* v[3] = 16.0f;
|
87 |
-
*
|
88 |
-
* typedef thrust::device_vector<float>::iterator FloatIterator;
|
89 |
-
*
|
90 |
-
* thrust::transform_iterator<square_root, FloatIterator> iter(v.begin(), square_root());
|
91 |
-
*
|
92 |
-
* *iter; // returns 1.0f
|
93 |
-
* iter[0]; // returns 1.0f;
|
94 |
-
* iter[1]; // returns 2.0f;
|
95 |
-
* iter[2]; // returns 3.0f;
|
96 |
-
* iter[3]; // returns 4.0f;
|
97 |
-
*
|
98 |
-
* // iter[4] is an out-of-bounds error
|
99 |
-
* }
|
100 |
-
* \endcode
|
101 |
-
*
|
102 |
-
* This next example demonstrates how to use a \p transform_iterator with the
|
103 |
-
* \p thrust::reduce function to compute the sum of squares of a sequence.
|
104 |
-
* We will create temporary \p transform_iterators with the
|
105 |
-
* \p make_transform_iterator function in order to avoid explicitly specifying their type:
|
106 |
-
*
|
107 |
-
* \code
|
108 |
-
* #include <thrust/iterator/transform_iterator.h>
|
109 |
-
* #include <thrust/device_vector.h>
|
110 |
-
* #include <thrust/reduce.h>
|
111 |
-
* #include <iostream>
|
112 |
-
*
|
113 |
-
* // note: functor inherits from unary_function
|
114 |
-
* struct square : public thrust::unary_function<float,float>
|
115 |
-
* {
|
116 |
-
* __host__ __device__
|
117 |
-
* float operator()(float x) const
|
118 |
-
* {
|
119 |
-
* return x * x;
|
120 |
-
* }
|
121 |
-
* };
|
122 |
-
*
|
123 |
-
* int main()
|
124 |
-
* {
|
125 |
-
* // initialize a device array
|
126 |
-
* thrust::device_vector<float> v(4);
|
127 |
-
* v[0] = 1.0f;
|
128 |
-
* v[1] = 2.0f;
|
129 |
-
* v[2] = 3.0f;
|
130 |
-
* v[3] = 4.0f;
|
131 |
-
*
|
132 |
-
* float sum_of_squares =
|
133 |
-
* thrust::reduce(thrust::make_transform_iterator(v.begin(), square()),
|
134 |
-
* thrust::make_transform_iterator(v.end(), square()));
|
135 |
-
*
|
136 |
-
* std::cout << "sum of squares: " << sum_of_squares << std::endl;
|
137 |
-
* return 0;
|
138 |
-
* }
|
139 |
-
* \endcode
|
140 |
-
*
|
141 |
-
* Note that in the previous two examples the transform functor (namely \c square_root
|
142 |
-
* and \c square) inherits from \c thrust::unary_function. Inheriting from
|
143 |
-
* \c thrust::unary_function ensures that a functor is a valid \c AdaptableUnaryFunction
|
144 |
-
* and provides all the necessary \c typedef declarations. The \p transform_iterator
|
145 |
-
* can also be applied to a \c UnaryFunction that does not inherit from
|
146 |
-
* \c thrust::unary_function using an optional template argument. The following example
|
147 |
-
* illustrates how to use the third template argument to specify the \c result_type of
|
148 |
-
* the function.
|
149 |
-
*
|
150 |
-
* \code
|
151 |
-
* #include <thrust/iterator/transform_iterator.h>
|
152 |
-
* #include <thrust/device_vector.h>
|
153 |
-
*
|
154 |
-
* // note: functor *does not* inherit from unary_function
|
155 |
-
* struct square_root
|
156 |
-
* {
|
157 |
-
* __host__ __device__
|
158 |
-
* float operator()(float x) const
|
159 |
-
* {
|
160 |
-
* return sqrtf(x);
|
161 |
-
* }
|
162 |
-
* };
|
163 |
-
*
|
164 |
-
* int main()
|
165 |
-
* {
|
166 |
-
* thrust::device_vector<float> v(4);
|
167 |
-
* v[0] = 1.0f;
|
168 |
-
* v[1] = 4.0f;
|
169 |
-
* v[2] = 9.0f;
|
170 |
-
* v[3] = 16.0f;
|
171 |
-
*
|
172 |
-
* typedef thrust::device_vector<float>::iterator FloatIterator;
|
173 |
-
*
|
174 |
-
* // note: float result_type is specified explicitly
|
175 |
-
* thrust::transform_iterator<square_root, FloatIterator, float> iter(v.begin(), square_root());
|
176 |
-
*
|
177 |
-
* *iter; // returns 1.0f
|
178 |
-
* iter[0]; // returns 1.0f;
|
179 |
-
* iter[1]; // returns 2.0f;
|
180 |
-
* iter[2]; // returns 3.0f;
|
181 |
-
* iter[3]; // returns 4.0f;
|
182 |
-
*
|
183 |
-
* // iter[4] is an out-of-bounds error
|
184 |
-
* }
|
185 |
-
* \endcode
|
186 |
-
*
|
187 |
-
* \see make_transform_iterator
|
188 |
-
*/
|
189 |
-
template <class AdaptableUnaryFunction, class Iterator, class Reference = use_default, class Value = use_default>
|
190 |
-
class transform_iterator
|
191 |
-
: public detail::transform_iterator_base<AdaptableUnaryFunction, Iterator, Reference, Value>::type
|
192 |
-
{
|
193 |
-
/*! \cond
|
194 |
-
*/
|
195 |
-
public:
|
196 |
-
typedef typename
|
197 |
-
detail::transform_iterator_base<AdaptableUnaryFunction, Iterator, Reference, Value>::type
|
198 |
-
super_t;
|
199 |
-
|
200 |
-
friend class thrust::iterator_core_access;
|
201 |
-
/*! \endcond
|
202 |
-
*/
|
203 |
-
|
204 |
-
public:
|
205 |
-
/*! Null constructor does nothing.
|
206 |
-
*/
|
207 |
-
__host__ __device__
|
208 |
-
transform_iterator() {}
|
209 |
-
|
210 |
-
#if THRUST_CPP_DIALECT >= 2011
|
211 |
-
transform_iterator(transform_iterator const&) = default;
|
212 |
-
#endif
|
213 |
-
|
214 |
-
/*! This constructor takes as arguments an \c Iterator and an \c AdaptableUnaryFunction
|
215 |
-
* and copies them to a new \p transform_iterator.
|
216 |
-
*
|
217 |
-
* \param x An \c Iterator pointing to the input to this \p transform_iterator's \c AdaptableUnaryFunction.
|
218 |
-
* \param f An \c AdaptableUnaryFunction used to transform the objects pointed to by \p x.
|
219 |
-
*/
|
220 |
-
__host__ __device__
|
221 |
-
transform_iterator(Iterator const& x, AdaptableUnaryFunction f)
|
222 |
-
: super_t(x), m_f(f) {
|
223 |
-
}
|
224 |
-
|
225 |
-
/*! This explicit constructor copies the value of a given \c Iterator and creates
|
226 |
-
* this \p transform_iterator's \c AdaptableUnaryFunction using its null constructor.
|
227 |
-
*
|
228 |
-
* \param x An \c Iterator to copy.
|
229 |
-
*/
|
230 |
-
__host__ __device__
|
231 |
-
explicit transform_iterator(Iterator const& x)
|
232 |
-
: super_t(x) { }
|
233 |
-
|
234 |
-
/*! This copy constructor creates a new \p transform_iterator from another
|
235 |
-
* \p transform_iterator.
|
236 |
-
*
|
237 |
-
* \param other The \p transform_iterator to copy.
|
238 |
-
*/
|
239 |
-
template<typename OtherAdaptableUnaryFunction,
|
240 |
-
typename OtherIterator,
|
241 |
-
typename OtherReference,
|
242 |
-
typename OtherValue>
|
243 |
-
__host__ __device__
|
244 |
-
transform_iterator(const transform_iterator<OtherAdaptableUnaryFunction, OtherIterator, OtherReference, OtherValue> &other,
|
245 |
-
typename thrust::detail::enable_if_convertible<OtherIterator, Iterator>::type* = 0,
|
246 |
-
typename thrust::detail::enable_if_convertible<OtherAdaptableUnaryFunction, AdaptableUnaryFunction>::type* = 0)
|
247 |
-
: super_t(other.base()), m_f(other.functor()) {}
|
248 |
-
|
249 |
-
/*! Copy assignment operator copies from another \p transform_iterator.
|
250 |
-
* \p other The other \p transform_iterator to copy
|
251 |
-
* \return <tt>*this</tt>
|
252 |
-
*
|
253 |
-
* \note If the type of this \p transform_iterator's functor is not copy assignable
|
254 |
-
* (for example, if it is a lambda) it is not an error to call this function.
|
255 |
-
* In this case, however, the functor will not be modified.
|
256 |
-
*
|
257 |
-
* In any case, this \p transform_iterator's underlying iterator will be copy assigned.
|
258 |
-
*/
|
259 |
-
__host__ __device__
|
260 |
-
transform_iterator &operator=(const transform_iterator &other)
|
261 |
-
{
|
262 |
-
return do_assign(other,
|
263 |
-
// XXX gcc 4.2.1 crashes on is_copy_assignable; just assume the functor is assignable as a WAR
|
264 |
-
#if (THRUST_HOST_COMPILER == THRUST_HOST_COMPILER_GCC) && (THRUST_GCC_VERSION <= 40201)
|
265 |
-
thrust::detail::true_type()
|
266 |
-
#else
|
267 |
-
typename thrust::detail::is_copy_assignable<AdaptableUnaryFunction>::type()
|
268 |
-
#endif // THRUST_HOST_COMPILER
|
269 |
-
);
|
270 |
-
}
|
271 |
-
|
272 |
-
/*! This method returns a copy of this \p transform_iterator's \c AdaptableUnaryFunction.
|
273 |
-
* \return A copy of this \p transform_iterator's \c AdaptableUnaryFunction.
|
274 |
-
*/
|
275 |
-
__host__ __device__
|
276 |
-
AdaptableUnaryFunction functor() const
|
277 |
-
{ return m_f; }
|
278 |
-
|
279 |
-
/*! \cond
|
280 |
-
*/
|
281 |
-
private:
|
282 |
-
__host__ __device__
|
283 |
-
transform_iterator &do_assign(const transform_iterator &other, thrust::detail::true_type)
|
284 |
-
{
|
285 |
-
super_t::operator=(other);
|
286 |
-
|
287 |
-
// do assign to m_f
|
288 |
-
m_f = other.functor();
|
289 |
-
|
290 |
-
return *this;
|
291 |
-
}
|
292 |
-
|
293 |
-
__host__ __device__
|
294 |
-
transform_iterator &do_assign(const transform_iterator &other, thrust::detail::false_type)
|
295 |
-
{
|
296 |
-
super_t::operator=(other);
|
297 |
-
|
298 |
-
// don't assign to m_f
|
299 |
-
|
300 |
-
return *this;
|
301 |
-
}
|
302 |
-
|
303 |
-
// MSVC 2013 and 2015 incorrectly warning about returning a reference to
|
304 |
-
// a local/temporary here.
|
305 |
-
// See goo.gl/LELTNp
|
306 |
-
THRUST_DISABLE_MSVC_WARNING_BEGIN(4172)
|
307 |
-
|
308 |
-
__thrust_exec_check_disable__
|
309 |
-
__host__ __device__
|
310 |
-
typename super_t::reference dereference() const
|
311 |
-
{
|
312 |
-
// Create a temporary to allow iterators with wrapped references to
|
313 |
-
// convert to their value type before calling m_f. Note that this
|
314 |
-
// disallows non-constant operations through m_f.
|
315 |
-
typename thrust::iterator_value<Iterator>::type x = *this->base();
|
316 |
-
return m_f(x);
|
317 |
-
}
|
318 |
-
|
319 |
-
THRUST_DISABLE_MSVC_WARNING_END(4172)
|
320 |
-
|
321 |
-
// tag this as mutable per Dave Abrahams in this thread:
|
322 |
-
// http://lists.boost.org/Archives/boost/2004/05/65332.php
|
323 |
-
mutable AdaptableUnaryFunction m_f;
|
324 |
-
|
325 |
-
/*! \endcond
|
326 |
-
*/
|
327 |
-
}; // end transform_iterator
|
328 |
-
|
329 |
-
|
330 |
-
/*! \p make_transform_iterator creates a \p transform_iterator
|
331 |
-
* from an \c Iterator and \c AdaptableUnaryFunction.
|
332 |
-
*
|
333 |
-
* \param it The \c Iterator pointing to the input range of the
|
334 |
-
* newly created \p transform_iterator.
|
335 |
-
* \param fun The \c AdaptableUnaryFunction used to transform the range pointed
|
336 |
-
* to by \p it in the newly created \p transform_iterator.
|
337 |
-
* \return A new \p transform_iterator which transforms the range at
|
338 |
-
* \p it by \p fun.
|
339 |
-
* \see transform_iterator
|
340 |
-
*/
|
341 |
-
template <class AdaptableUnaryFunction, class Iterator>
|
342 |
-
inline __host__ __device__
|
343 |
-
transform_iterator<AdaptableUnaryFunction, Iterator>
|
344 |
-
make_transform_iterator(Iterator it, AdaptableUnaryFunction fun)
|
345 |
-
{
|
346 |
-
return transform_iterator<AdaptableUnaryFunction, Iterator>(it, fun);
|
347 |
-
} // end make_transform_iterator
|
348 |
-
|
349 |
-
/*! \} // end fancyiterators
|
350 |
-
*/
|
351 |
-
|
352 |
-
/*! \} // end iterators
|
353 |
-
*/
|
354 |
-
|
355 |
-
} // end thrust
|
356 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/CVPR/LIVE/thrust/thrust/system/tbb/detail/transform_scan.h
DELETED
@@ -1,23 +0,0 @@
|
|
1 |
-
/*
|
2 |
-
* Copyright 2008-2013 NVIDIA Corporation
|
3 |
-
*
|
4 |
-
* Licensed under the Apache License, Version 2.0 (the "License");
|
5 |
-
* you may not use this file except in compliance with the License.
|
6 |
-
* You may obtain a copy of the License at
|
7 |
-
*
|
8 |
-
* http://www.apache.org/licenses/LICENSE-2.0
|
9 |
-
*
|
10 |
-
* Unless required by applicable law or agreed to in writing, software
|
11 |
-
* distributed under the License is distributed on an "AS IS" BASIS,
|
12 |
-
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
13 |
-
* See the License for the specific language governing permissions and
|
14 |
-
* limitations under the License.
|
15 |
-
*/
|
16 |
-
|
17 |
-
#pragma once
|
18 |
-
|
19 |
-
#include <thrust/detail/config.h>
|
20 |
-
|
21 |
-
// this system inherits transform_scan
|
22 |
-
#include <thrust/system/cpp/detail/transform_scan.h>
|
23 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/CVPR/WALT/mmdet/models/backbones/resnest.py
DELETED
@@ -1,317 +0,0 @@
|
|
1 |
-
import math
|
2 |
-
|
3 |
-
import torch
|
4 |
-
import torch.nn as nn
|
5 |
-
import torch.nn.functional as F
|
6 |
-
import torch.utils.checkpoint as cp
|
7 |
-
from mmcv.cnn import build_conv_layer, build_norm_layer
|
8 |
-
|
9 |
-
from ..builder import BACKBONES
|
10 |
-
from ..utils import ResLayer
|
11 |
-
from .resnet import Bottleneck as _Bottleneck
|
12 |
-
from .resnet import ResNetV1d
|
13 |
-
|
14 |
-
|
15 |
-
class RSoftmax(nn.Module):
|
16 |
-
"""Radix Softmax module in ``SplitAttentionConv2d``.
|
17 |
-
|
18 |
-
Args:
|
19 |
-
radix (int): Radix of input.
|
20 |
-
groups (int): Groups of input.
|
21 |
-
"""
|
22 |
-
|
23 |
-
def __init__(self, radix, groups):
|
24 |
-
super().__init__()
|
25 |
-
self.radix = radix
|
26 |
-
self.groups = groups
|
27 |
-
|
28 |
-
def forward(self, x):
|
29 |
-
batch = x.size(0)
|
30 |
-
if self.radix > 1:
|
31 |
-
x = x.view(batch, self.groups, self.radix, -1).transpose(1, 2)
|
32 |
-
x = F.softmax(x, dim=1)
|
33 |
-
x = x.reshape(batch, -1)
|
34 |
-
else:
|
35 |
-
x = torch.sigmoid(x)
|
36 |
-
return x
|
37 |
-
|
38 |
-
|
39 |
-
class SplitAttentionConv2d(nn.Module):
|
40 |
-
"""Split-Attention Conv2d in ResNeSt.
|
41 |
-
|
42 |
-
Args:
|
43 |
-
in_channels (int): Number of channels in the input feature map.
|
44 |
-
channels (int): Number of intermediate channels.
|
45 |
-
kernel_size (int | tuple[int]): Size of the convolution kernel.
|
46 |
-
stride (int | tuple[int]): Stride of the convolution.
|
47 |
-
padding (int | tuple[int]): Zero-padding added to both sides of
|
48 |
-
dilation (int | tuple[int]): Spacing between kernel elements.
|
49 |
-
groups (int): Number of blocked connections from input channels to
|
50 |
-
output channels.
|
51 |
-
groups (int): Same as nn.Conv2d.
|
52 |
-
radix (int): Radix of SpltAtConv2d. Default: 2
|
53 |
-
reduction_factor (int): Reduction factor of inter_channels. Default: 4.
|
54 |
-
conv_cfg (dict): Config dict for convolution layer. Default: None,
|
55 |
-
which means using conv2d.
|
56 |
-
norm_cfg (dict): Config dict for normalization layer. Default: None.
|
57 |
-
dcn (dict): Config dict for DCN. Default: None.
|
58 |
-
"""
|
59 |
-
|
60 |
-
def __init__(self,
|
61 |
-
in_channels,
|
62 |
-
channels,
|
63 |
-
kernel_size,
|
64 |
-
stride=1,
|
65 |
-
padding=0,
|
66 |
-
dilation=1,
|
67 |
-
groups=1,
|
68 |
-
radix=2,
|
69 |
-
reduction_factor=4,
|
70 |
-
conv_cfg=None,
|
71 |
-
norm_cfg=dict(type='BN'),
|
72 |
-
dcn=None):
|
73 |
-
super(SplitAttentionConv2d, self).__init__()
|
74 |
-
inter_channels = max(in_channels * radix // reduction_factor, 32)
|
75 |
-
self.radix = radix
|
76 |
-
self.groups = groups
|
77 |
-
self.channels = channels
|
78 |
-
self.with_dcn = dcn is not None
|
79 |
-
self.dcn = dcn
|
80 |
-
fallback_on_stride = False
|
81 |
-
if self.with_dcn:
|
82 |
-
fallback_on_stride = self.dcn.pop('fallback_on_stride', False)
|
83 |
-
if self.with_dcn and not fallback_on_stride:
|
84 |
-
assert conv_cfg is None, 'conv_cfg must be None for DCN'
|
85 |
-
conv_cfg = dcn
|
86 |
-
self.conv = build_conv_layer(
|
87 |
-
conv_cfg,
|
88 |
-
in_channels,
|
89 |
-
channels * radix,
|
90 |
-
kernel_size,
|
91 |
-
stride=stride,
|
92 |
-
padding=padding,
|
93 |
-
dilation=dilation,
|
94 |
-
groups=groups * radix,
|
95 |
-
bias=False)
|
96 |
-
# To be consistent with original implementation, starting from 0
|
97 |
-
self.norm0_name, norm0 = build_norm_layer(
|
98 |
-
norm_cfg, channels * radix, postfix=0)
|
99 |
-
self.add_module(self.norm0_name, norm0)
|
100 |
-
self.relu = nn.ReLU(inplace=True)
|
101 |
-
self.fc1 = build_conv_layer(
|
102 |
-
None, channels, inter_channels, 1, groups=self.groups)
|
103 |
-
self.norm1_name, norm1 = build_norm_layer(
|
104 |
-
norm_cfg, inter_channels, postfix=1)
|
105 |
-
self.add_module(self.norm1_name, norm1)
|
106 |
-
self.fc2 = build_conv_layer(
|
107 |
-
None, inter_channels, channels * radix, 1, groups=self.groups)
|
108 |
-
self.rsoftmax = RSoftmax(radix, groups)
|
109 |
-
|
110 |
-
@property
|
111 |
-
def norm0(self):
|
112 |
-
"""nn.Module: the normalization layer named "norm0" """
|
113 |
-
return getattr(self, self.norm0_name)
|
114 |
-
|
115 |
-
@property
|
116 |
-
def norm1(self):
|
117 |
-
"""nn.Module: the normalization layer named "norm1" """
|
118 |
-
return getattr(self, self.norm1_name)
|
119 |
-
|
120 |
-
def forward(self, x):
|
121 |
-
x = self.conv(x)
|
122 |
-
x = self.norm0(x)
|
123 |
-
x = self.relu(x)
|
124 |
-
|
125 |
-
batch, rchannel = x.shape[:2]
|
126 |
-
batch = x.size(0)
|
127 |
-
if self.radix > 1:
|
128 |
-
splits = x.view(batch, self.radix, -1, *x.shape[2:])
|
129 |
-
gap = splits.sum(dim=1)
|
130 |
-
else:
|
131 |
-
gap = x
|
132 |
-
gap = F.adaptive_avg_pool2d(gap, 1)
|
133 |
-
gap = self.fc1(gap)
|
134 |
-
|
135 |
-
gap = self.norm1(gap)
|
136 |
-
gap = self.relu(gap)
|
137 |
-
|
138 |
-
atten = self.fc2(gap)
|
139 |
-
atten = self.rsoftmax(atten).view(batch, -1, 1, 1)
|
140 |
-
|
141 |
-
if self.radix > 1:
|
142 |
-
attens = atten.view(batch, self.radix, -1, *atten.shape[2:])
|
143 |
-
out = torch.sum(attens * splits, dim=1)
|
144 |
-
else:
|
145 |
-
out = atten * x
|
146 |
-
return out.contiguous()
|
147 |
-
|
148 |
-
|
149 |
-
class Bottleneck(_Bottleneck):
|
150 |
-
"""Bottleneck block for ResNeSt.
|
151 |
-
|
152 |
-
Args:
|
153 |
-
inplane (int): Input planes of this block.
|
154 |
-
planes (int): Middle planes of this block.
|
155 |
-
groups (int): Groups of conv2.
|
156 |
-
base_width (int): Base of width in terms of base channels. Default: 4.
|
157 |
-
base_channels (int): Base of channels for calculating width.
|
158 |
-
Default: 64.
|
159 |
-
radix (int): Radix of SpltAtConv2d. Default: 2
|
160 |
-
reduction_factor (int): Reduction factor of inter_channels in
|
161 |
-
SplitAttentionConv2d. Default: 4.
|
162 |
-
avg_down_stride (bool): Whether to use average pool for stride in
|
163 |
-
Bottleneck. Default: True.
|
164 |
-
kwargs (dict): Key word arguments for base class.
|
165 |
-
"""
|
166 |
-
expansion = 4
|
167 |
-
|
168 |
-
def __init__(self,
|
169 |
-
inplanes,
|
170 |
-
planes,
|
171 |
-
groups=1,
|
172 |
-
base_width=4,
|
173 |
-
base_channels=64,
|
174 |
-
radix=2,
|
175 |
-
reduction_factor=4,
|
176 |
-
avg_down_stride=True,
|
177 |
-
**kwargs):
|
178 |
-
"""Bottleneck block for ResNeSt."""
|
179 |
-
super(Bottleneck, self).__init__(inplanes, planes, **kwargs)
|
180 |
-
|
181 |
-
if groups == 1:
|
182 |
-
width = self.planes
|
183 |
-
else:
|
184 |
-
width = math.floor(self.planes *
|
185 |
-
(base_width / base_channels)) * groups
|
186 |
-
|
187 |
-
self.avg_down_stride = avg_down_stride and self.conv2_stride > 1
|
188 |
-
|
189 |
-
self.norm1_name, norm1 = build_norm_layer(
|
190 |
-
self.norm_cfg, width, postfix=1)
|
191 |
-
self.norm3_name, norm3 = build_norm_layer(
|
192 |
-
self.norm_cfg, self.planes * self.expansion, postfix=3)
|
193 |
-
|
194 |
-
self.conv1 = build_conv_layer(
|
195 |
-
self.conv_cfg,
|
196 |
-
self.inplanes,
|
197 |
-
width,
|
198 |
-
kernel_size=1,
|
199 |
-
stride=self.conv1_stride,
|
200 |
-
bias=False)
|
201 |
-
self.add_module(self.norm1_name, norm1)
|
202 |
-
self.with_modulated_dcn = False
|
203 |
-
self.conv2 = SplitAttentionConv2d(
|
204 |
-
width,
|
205 |
-
width,
|
206 |
-
kernel_size=3,
|
207 |
-
stride=1 if self.avg_down_stride else self.conv2_stride,
|
208 |
-
padding=self.dilation,
|
209 |
-
dilation=self.dilation,
|
210 |
-
groups=groups,
|
211 |
-
radix=radix,
|
212 |
-
reduction_factor=reduction_factor,
|
213 |
-
conv_cfg=self.conv_cfg,
|
214 |
-
norm_cfg=self.norm_cfg,
|
215 |
-
dcn=self.dcn)
|
216 |
-
delattr(self, self.norm2_name)
|
217 |
-
|
218 |
-
if self.avg_down_stride:
|
219 |
-
self.avd_layer = nn.AvgPool2d(3, self.conv2_stride, padding=1)
|
220 |
-
|
221 |
-
self.conv3 = build_conv_layer(
|
222 |
-
self.conv_cfg,
|
223 |
-
width,
|
224 |
-
self.planes * self.expansion,
|
225 |
-
kernel_size=1,
|
226 |
-
bias=False)
|
227 |
-
self.add_module(self.norm3_name, norm3)
|
228 |
-
|
229 |
-
def forward(self, x):
|
230 |
-
|
231 |
-
def _inner_forward(x):
|
232 |
-
identity = x
|
233 |
-
|
234 |
-
out = self.conv1(x)
|
235 |
-
out = self.norm1(out)
|
236 |
-
out = self.relu(out)
|
237 |
-
|
238 |
-
if self.with_plugins:
|
239 |
-
out = self.forward_plugin(out, self.after_conv1_plugin_names)
|
240 |
-
|
241 |
-
out = self.conv2(out)
|
242 |
-
|
243 |
-
if self.avg_down_stride:
|
244 |
-
out = self.avd_layer(out)
|
245 |
-
|
246 |
-
if self.with_plugins:
|
247 |
-
out = self.forward_plugin(out, self.after_conv2_plugin_names)
|
248 |
-
|
249 |
-
out = self.conv3(out)
|
250 |
-
out = self.norm3(out)
|
251 |
-
|
252 |
-
if self.with_plugins:
|
253 |
-
out = self.forward_plugin(out, self.after_conv3_plugin_names)
|
254 |
-
|
255 |
-
if self.downsample is not None:
|
256 |
-
identity = self.downsample(x)
|
257 |
-
|
258 |
-
out += identity
|
259 |
-
|
260 |
-
return out
|
261 |
-
|
262 |
-
if self.with_cp and x.requires_grad:
|
263 |
-
out = cp.checkpoint(_inner_forward, x)
|
264 |
-
else:
|
265 |
-
out = _inner_forward(x)
|
266 |
-
|
267 |
-
out = self.relu(out)
|
268 |
-
|
269 |
-
return out
|
270 |
-
|
271 |
-
|
272 |
-
@BACKBONES.register_module()
|
273 |
-
class ResNeSt(ResNetV1d):
|
274 |
-
"""ResNeSt backbone.
|
275 |
-
|
276 |
-
Args:
|
277 |
-
groups (int): Number of groups of Bottleneck. Default: 1
|
278 |
-
base_width (int): Base width of Bottleneck. Default: 4
|
279 |
-
radix (int): Radix of SplitAttentionConv2d. Default: 2
|
280 |
-
reduction_factor (int): Reduction factor of inter_channels in
|
281 |
-
SplitAttentionConv2d. Default: 4.
|
282 |
-
avg_down_stride (bool): Whether to use average pool for stride in
|
283 |
-
Bottleneck. Default: True.
|
284 |
-
kwargs (dict): Keyword arguments for ResNet.
|
285 |
-
"""
|
286 |
-
|
287 |
-
arch_settings = {
|
288 |
-
50: (Bottleneck, (3, 4, 6, 3)),
|
289 |
-
101: (Bottleneck, (3, 4, 23, 3)),
|
290 |
-
152: (Bottleneck, (3, 8, 36, 3)),
|
291 |
-
200: (Bottleneck, (3, 24, 36, 3))
|
292 |
-
}
|
293 |
-
|
294 |
-
def __init__(self,
|
295 |
-
groups=1,
|
296 |
-
base_width=4,
|
297 |
-
radix=2,
|
298 |
-
reduction_factor=4,
|
299 |
-
avg_down_stride=True,
|
300 |
-
**kwargs):
|
301 |
-
self.groups = groups
|
302 |
-
self.base_width = base_width
|
303 |
-
self.radix = radix
|
304 |
-
self.reduction_factor = reduction_factor
|
305 |
-
self.avg_down_stride = avg_down_stride
|
306 |
-
super(ResNeSt, self).__init__(**kwargs)
|
307 |
-
|
308 |
-
def make_res_layer(self, **kwargs):
|
309 |
-
"""Pack all blocks in a stage into a ``ResLayer``."""
|
310 |
-
return ResLayer(
|
311 |
-
groups=self.groups,
|
312 |
-
base_width=self.base_width,
|
313 |
-
base_channels=self.base_channels,
|
314 |
-
radix=self.radix,
|
315 |
-
reduction_factor=self.reduction_factor,
|
316 |
-
avg_down_stride=self.avg_down_stride,
|
317 |
-
**kwargs)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/CVPR/regionclip-demo/detectron2/checkpoint/clip_model_loading.py
DELETED
@@ -1,415 +0,0 @@
|
|
1 |
-
# Copyright (c) Facebook, Inc. and its affiliates.
|
2 |
-
import copy
|
3 |
-
import logging
|
4 |
-
import re
|
5 |
-
from typing import Dict, List
|
6 |
-
import torch
|
7 |
-
from tabulate import tabulate
|
8 |
-
|
9 |
-
|
10 |
-
def convert_basic_clip_names(original_keys, add_backbone_prefix=False, use_whole_clip=False, use_fpn_arch=False, regionclip=False):
|
11 |
-
"""
|
12 |
-
Apply some basic name conversion to names in CLIP weights.
|
13 |
-
It only deals with typical backbone models.
|
14 |
-
|
15 |
-
Args:
|
16 |
-
original_keys (list[str]):
|
17 |
-
Returns:
|
18 |
-
list[str]: The same number of strings matching those in original_keys.
|
19 |
-
"""
|
20 |
-
layer_keys = copy.deepcopy(original_keys)
|
21 |
-
|
22 |
-
vit = False
|
23 |
-
for l_k in layer_keys:
|
24 |
-
if 'visual.transformer' in l_k:
|
25 |
-
vit = True
|
26 |
-
|
27 |
-
# load pretrained oai clip
|
28 |
-
if not vit: # resnet
|
29 |
-
if add_backbone_prefix: # CLIPRCNN or CLIPFastRCNN
|
30 |
-
if use_whole_clip: # CLIPRCNN
|
31 |
-
layer_keys = [k.replace("visual.", "clip_backbone.visual.") for k in layer_keys]
|
32 |
-
else: # CLIPFastRCNN
|
33 |
-
if use_fpn_arch: # FPN
|
34 |
-
layer_keys = [k.replace("visual.", "backbone.bottom_up.") for k in layer_keys]
|
35 |
-
else: # C4
|
36 |
-
layer_keys = [k.replace("visual.", "backbone.") for k in layer_keys]
|
37 |
-
else: # GeneralizedRCNN or ProposalNetwork
|
38 |
-
#layer_keys = [k.replace("visual.", "backbone.bottom_up.") for k in layer_keys] #
|
39 |
-
layer_keys = [k.replace("visual.", "") for k in layer_keys] #
|
40 |
-
#layer_keys = [k.replace("visual.", "backbone.visual.") for k in layer_keys] #
|
41 |
-
else: # vit
|
42 |
-
pass
|
43 |
-
|
44 |
-
return layer_keys, vit
|
45 |
-
|
46 |
-
|
47 |
-
def convert_clip_names(weights, add_backbone_prefix=False, use_whole_clip=False, use_fpn_arch=False, regionclip=False):
|
48 |
-
"""
|
49 |
-
Map CLIP Detectron weight names to Detectron2 names.
|
50 |
-
|
51 |
-
Args:
|
52 |
-
weights (dict): name -> tensor
|
53 |
-
|
54 |
-
Returns:
|
55 |
-
dict: detectron2 names -> tensor
|
56 |
-
dict: detectron2 names -> C2 names
|
57 |
-
"""
|
58 |
-
logger = logging.getLogger(__name__)
|
59 |
-
logger.info("Renaming CLIP weights ......")
|
60 |
-
original_keys = sorted(weights.keys())
|
61 |
-
layer_keys = copy.deepcopy(original_keys)
|
62 |
-
|
63 |
-
layer_keys, use_vit = convert_basic_clip_names(layer_keys, add_backbone_prefix, use_whole_clip, use_fpn_arch, regionclip)
|
64 |
-
|
65 |
-
# --------------------------------------------------------------------------
|
66 |
-
# RPN hidden representation conv
|
67 |
-
# --------------------------------------------------------------------------
|
68 |
-
# FPN case
|
69 |
-
# In the C2 model, the RPN hidden layer conv is defined for FPN level 2 and then
|
70 |
-
# shared for all other levels, hence the appearance of "fpn2"
|
71 |
-
layer_keys = [
|
72 |
-
k.replace("conv.rpn.fpn2", "proposal_generator.rpn_head.conv") for k in layer_keys
|
73 |
-
]
|
74 |
-
# Non-FPN case
|
75 |
-
layer_keys = [k.replace("conv.rpn", "proposal_generator.rpn_head.conv") for k in layer_keys]
|
76 |
-
|
77 |
-
# --------------------------------------------------------------------------
|
78 |
-
# RPN box transformation conv
|
79 |
-
# --------------------------------------------------------------------------
|
80 |
-
# FPN case (see note above about "fpn2")
|
81 |
-
layer_keys = [
|
82 |
-
k.replace("rpn.bbox.pred.fpn2", "proposal_generator.rpn_head.anchor_deltas")
|
83 |
-
for k in layer_keys
|
84 |
-
]
|
85 |
-
layer_keys = [
|
86 |
-
k.replace("rpn.cls.logits.fpn2", "proposal_generator.rpn_head.objectness_logits")
|
87 |
-
for k in layer_keys
|
88 |
-
]
|
89 |
-
# Non-FPN case
|
90 |
-
layer_keys = [
|
91 |
-
k.replace("rpn.bbox.pred", "proposal_generator.rpn_head.anchor_deltas") for k in layer_keys
|
92 |
-
]
|
93 |
-
layer_keys = [
|
94 |
-
k.replace("rpn.cls.logits", "proposal_generator.rpn_head.objectness_logits")
|
95 |
-
for k in layer_keys
|
96 |
-
]
|
97 |
-
|
98 |
-
# --------------------------------------------------------------------------
|
99 |
-
# Fast R-CNN box head
|
100 |
-
# --------------------------------------------------------------------------
|
101 |
-
layer_keys = [re.sub("^bbox\\.pred", "bbox_pred", k) for k in layer_keys]
|
102 |
-
layer_keys = [re.sub("^cls\\.score", "cls_score", k) for k in layer_keys]
|
103 |
-
layer_keys = [re.sub("^fc6\\.", "box_head.fc1.", k) for k in layer_keys]
|
104 |
-
layer_keys = [re.sub("^fc7\\.", "box_head.fc2.", k) for k in layer_keys]
|
105 |
-
# 4conv1fc head tensor names: head_conv1_w, head_conv1_gn_s
|
106 |
-
layer_keys = [re.sub("^head\\.conv", "box_head.conv", k) for k in layer_keys]
|
107 |
-
|
108 |
-
# --------------------------------------------------------------------------
|
109 |
-
# FPN lateral and output convolutions
|
110 |
-
# --------------------------------------------------------------------------
|
111 |
-
def fpn_map(name):
|
112 |
-
"""
|
113 |
-
Look for keys with the following patterns:
|
114 |
-
1) Starts with "fpn.inner."
|
115 |
-
Example: "fpn.inner.res2.2.sum.lateral.weight"
|
116 |
-
Meaning: These are lateral pathway convolutions
|
117 |
-
2) Starts with "fpn.res"
|
118 |
-
Example: "fpn.res2.2.sum.weight"
|
119 |
-
Meaning: These are FPN output convolutions
|
120 |
-
"""
|
121 |
-
splits = name.split(".")
|
122 |
-
norm = ".norm" if "norm" in splits else ""
|
123 |
-
if name.startswith("fpn.inner."):
|
124 |
-
# splits example: ['fpn', 'inner', 'res2', '2', 'sum', 'lateral', 'weight']
|
125 |
-
stage = int(splits[2][len("res") :])
|
126 |
-
return "fpn_lateral{}{}.{}".format(stage, norm, splits[-1])
|
127 |
-
elif name.startswith("fpn.res"):
|
128 |
-
# splits example: ['fpn', 'res2', '2', 'sum', 'weight']
|
129 |
-
stage = int(splits[1][len("res") :])
|
130 |
-
return "fpn_output{}{}.{}".format(stage, norm, splits[-1])
|
131 |
-
return name
|
132 |
-
|
133 |
-
layer_keys = [fpn_map(k) for k in layer_keys]
|
134 |
-
|
135 |
-
# --------------------------------------------------------------------------
|
136 |
-
# Mask R-CNN mask head
|
137 |
-
# --------------------------------------------------------------------------
|
138 |
-
# roi_heads.StandardROIHeads case
|
139 |
-
layer_keys = [k.replace(".[mask].fcn", "mask_head.mask_fcn") for k in layer_keys]
|
140 |
-
layer_keys = [re.sub("^\\.mask\\.fcn", "mask_head.mask_fcn", k) for k in layer_keys]
|
141 |
-
layer_keys = [k.replace("mask.fcn.logits", "mask_head.predictor") for k in layer_keys]
|
142 |
-
# roi_heads.Res5ROIHeads case
|
143 |
-
layer_keys = [k.replace("conv5.mask", "mask_head.deconv") for k in layer_keys]
|
144 |
-
|
145 |
-
# --------------------------------------------------------------------------
|
146 |
-
# Keypoint R-CNN head
|
147 |
-
# --------------------------------------------------------------------------
|
148 |
-
# interestingly, the keypoint head convs have blob names that are simply "conv_fcnX"
|
149 |
-
layer_keys = [k.replace("conv.fcn", "roi_heads.keypoint_head.conv_fcn") for k in layer_keys]
|
150 |
-
layer_keys = [
|
151 |
-
k.replace("kps.score.lowres", "roi_heads.keypoint_head.score_lowres") for k in layer_keys
|
152 |
-
]
|
153 |
-
layer_keys = [k.replace("kps.score.", "roi_heads.keypoint_head.score.") for k in layer_keys]
|
154 |
-
|
155 |
-
# --------------------------------------------------------------------------
|
156 |
-
# Done with replacements
|
157 |
-
# --------------------------------------------------------------------------
|
158 |
-
assert len(set(layer_keys)) == len(layer_keys)
|
159 |
-
assert len(original_keys) == len(layer_keys)
|
160 |
-
|
161 |
-
new_weights = {}
|
162 |
-
new_keys_to_original_keys = {}
|
163 |
-
for orig, renamed in zip(original_keys, layer_keys):
|
164 |
-
new_keys_to_original_keys[renamed] = orig
|
165 |
-
if renamed.startswith("bbox_pred.") or renamed.startswith("mask_head.predictor."):
|
166 |
-
# remove the meaningless prediction weight for background class
|
167 |
-
new_start_idx = 4 if renamed.startswith("bbox_pred.") else 1
|
168 |
-
new_weights[renamed] = weights[orig][new_start_idx:]
|
169 |
-
logger.info(
|
170 |
-
"Remove prediction weight for background class in {}. The shape changes from "
|
171 |
-
"{} to {}.".format(
|
172 |
-
renamed, tuple(weights[orig].shape), tuple(new_weights[renamed].shape)
|
173 |
-
)
|
174 |
-
)
|
175 |
-
elif renamed.startswith("cls_score."):
|
176 |
-
# move weights of bg class from original index 0 to last index
|
177 |
-
logger.info(
|
178 |
-
"Move classification weights for background class in {} from index 0 to "
|
179 |
-
"index {}.".format(renamed, weights[orig].shape[0] - 1)
|
180 |
-
)
|
181 |
-
new_weights[renamed] = torch.cat([weights[orig][1:], weights[orig][:1]])
|
182 |
-
else:
|
183 |
-
new_weights[renamed] = weights[orig]
|
184 |
-
|
185 |
-
return new_weights, new_keys_to_original_keys, use_vit
|
186 |
-
|
187 |
-
|
188 |
-
# Note the current matching is not symmetric.
|
189 |
-
# it assumes model_state_dict will have longer names.
|
190 |
-
def align_and_update_state_dicts_for_CLIP(model_state_dict, ckpt_state_dict, c2_conversion=True, bb_rpn_weights=False, regionclip=False):
|
191 |
-
"""
|
192 |
-
Extended from ./c2_model_loading.py
|
193 |
-
Match names between the two state-dict, and returns a new chkpt_state_dict with names
|
194 |
-
converted to match model_state_dict with heuristics. The returned dict can be later
|
195 |
-
loaded with fvcore checkpointer.
|
196 |
-
If `c2_conversion==True`, `ckpt_state_dict` is assumed to be a Caffe2
|
197 |
-
model and will be renamed at first.
|
198 |
-
|
199 |
-
Strategy: suppose that the models that we will create will have prefixes appended
|
200 |
-
to each of its keys, for example due to an extra level of nesting that the original
|
201 |
-
pre-trained weights from ImageNet won't contain. For example, model.state_dict()
|
202 |
-
might return backbone[0].body.res2.conv1.weight, while the pre-trained model contains
|
203 |
-
res2.conv1.weight. We thus want to match both parameters together.
|
204 |
-
For that, we look for each model weight, look among all loaded keys if there is one
|
205 |
-
that is a suffix of the current weight name, and use it if that's the case.
|
206 |
-
If multiple matches exist, take the one with longest size
|
207 |
-
of the corresponding name. For example, for the same model as before, the pretrained
|
208 |
-
weight file can contain both res2.conv1.weight, as well as conv1.weight. In this case,
|
209 |
-
we want to match backbone[0].body.conv1.weight to conv1.weight, and
|
210 |
-
backbone[0].body.res2.conv1.weight to res2.conv1.weight.
|
211 |
-
"""
|
212 |
-
model_keys = sorted(model_state_dict.keys())
|
213 |
-
use_whole_clip = False # whether use the whole clip (text & visual encoders), typically in CLIPRCNN meta arch
|
214 |
-
add_backbone_prefix = False # convert to 'backbone.' prefix, typically in CLIPFastRCNN meta arch
|
215 |
-
use_fpn_arch = False # if use FPN arch then convert to `bottom_up`, typically in CLIPFastRCNN meta arch with FPN backbone
|
216 |
-
if bb_rpn_weights: # a 2nd pretrained weights to load, for offline backbone & RPN, then convert the ckpt key names and only keep the ones we need
|
217 |
-
new_ckpt_state_dict = {}
|
218 |
-
for original_k in ckpt_state_dict:
|
219 |
-
if 'backbone' in original_k:
|
220 |
-
new_key = original_k.replace('backbone', 'offline_backbone')
|
221 |
-
new_ckpt_state_dict[new_key] = ckpt_state_dict[original_k]
|
222 |
-
if 'proposal_generator' in original_k:
|
223 |
-
new_key = original_k.replace('proposal_generator', 'offline_proposal_generator')
|
224 |
-
new_ckpt_state_dict[new_key] = ckpt_state_dict[original_k]
|
225 |
-
new_ckpt_state_dict['ignore_others'] = torch.tensor([1]) # ignore other model weights (not 'offline_*') in batch_norm.py
|
226 |
-
ckpt_state_dict = new_ckpt_state_dict
|
227 |
-
else: # the 1st pretrained weigths to load
|
228 |
-
for model_key in model_keys: # if use the whole clip, then convert ckpt 'visual.' names to 'clip_backbone.visual.'
|
229 |
-
if 'clip_backbone' in model_key:
|
230 |
-
use_whole_clip = True
|
231 |
-
for model_key in model_keys: # if there are backbone & offline_backbone, then convert the ckpt 'visual.' names to 'backbone.' to avoid ambiguity
|
232 |
-
if 'offline_backbone' in model_key:
|
233 |
-
add_backbone_prefix = True
|
234 |
-
if 'fpn' in model_key:
|
235 |
-
use_fpn_arch = True
|
236 |
-
# original_keys: the name in the original dict (before renaming)
|
237 |
-
ckpt_state_dict, original_keys, use_vit = convert_clip_names(ckpt_state_dict, add_backbone_prefix, use_whole_clip, use_fpn_arch, regionclip)
|
238 |
-
ckpt_keys = sorted(ckpt_state_dict.keys())
|
239 |
-
|
240 |
-
def match(a, b):
|
241 |
-
# Matched ckpt_key should be a complete (starts with '.') suffix.
|
242 |
-
# For example, roi_heads.mesh_head.whatever_conv1 does not match conv1,
|
243 |
-
# but matches whatever_conv1 or mesh_head.whatever_conv1.
|
244 |
-
return a == b or a.endswith("." + b)
|
245 |
-
|
246 |
-
# get a matrix of string matches, where each (i, j) entry correspond to the size of the
|
247 |
-
# ckpt_key string, if it matches
|
248 |
-
match_matrix = [len(j) if match(i, j) else 0 for i in model_keys for j in ckpt_keys]
|
249 |
-
match_matrix = torch.as_tensor(match_matrix).view(len(model_keys), len(ckpt_keys))
|
250 |
-
# use the matched one with longest size in case of multiple matches
|
251 |
-
max_match_size, idxs = match_matrix.max(1)
|
252 |
-
# remove indices that correspond to no-match
|
253 |
-
idxs[max_match_size == 0] = -1
|
254 |
-
|
255 |
-
logger = logging.getLogger(__name__)
|
256 |
-
# matched_pairs (matched checkpoint key --> matched model key)
|
257 |
-
matched_keys = {}
|
258 |
-
result_state_dict = {}
|
259 |
-
for idx_model, idx_ckpt in enumerate(idxs.tolist()):
|
260 |
-
if idx_ckpt == -1:
|
261 |
-
continue
|
262 |
-
key_model = model_keys[idx_model]
|
263 |
-
key_ckpt = ckpt_keys[idx_ckpt]
|
264 |
-
value_ckpt = ckpt_state_dict[key_ckpt]
|
265 |
-
shape_in_model = model_state_dict[key_model].shape
|
266 |
-
|
267 |
-
if shape_in_model != value_ckpt.shape:
|
268 |
-
logger.warning(
|
269 |
-
"Shape of {} in checkpoint is {}, while shape of {} in model is {}.".format(
|
270 |
-
key_ckpt, value_ckpt.shape, key_model, shape_in_model
|
271 |
-
)
|
272 |
-
)
|
273 |
-
logger.warning(
|
274 |
-
"{} will not be loaded. Please double check and see if this is desired.".format(
|
275 |
-
key_ckpt
|
276 |
-
)
|
277 |
-
)
|
278 |
-
continue
|
279 |
-
|
280 |
-
assert key_model not in result_state_dict
|
281 |
-
result_state_dict[key_model] = value_ckpt
|
282 |
-
if key_ckpt in matched_keys: # already added to matched_keys
|
283 |
-
logger.error(
|
284 |
-
"Ambiguity found for {} in checkpoint!"
|
285 |
-
"It matches at least two keys in the model ({} and {}).".format(
|
286 |
-
key_ckpt, key_model, matched_keys[key_ckpt]
|
287 |
-
)
|
288 |
-
)
|
289 |
-
raise ValueError("Cannot match one checkpoint key to multiple keys in the model.")
|
290 |
-
|
291 |
-
matched_keys[key_ckpt] = key_model
|
292 |
-
|
293 |
-
# logging:
|
294 |
-
matched_model_keys = sorted(matched_keys.values())
|
295 |
-
mmk_list = "The following model parameters are loaded from checkpoints:\n"
|
296 |
-
for mmk in matched_model_keys:
|
297 |
-
mmk_list += mmk + "\n"
|
298 |
-
if len(matched_model_keys) == 0:
|
299 |
-
logger.warning("No weights in checkpoint matched with model.")
|
300 |
-
return ckpt_state_dict
|
301 |
-
common_prefix = _longest_common_prefix(matched_model_keys)
|
302 |
-
rev_matched_keys = {v: k for k, v in matched_keys.items()}
|
303 |
-
original_keys = {k: original_keys[rev_matched_keys[k]] for k in matched_model_keys}
|
304 |
-
|
305 |
-
model_key_groups = _group_keys_by_module(matched_model_keys, original_keys)
|
306 |
-
table = []
|
307 |
-
memo = set()
|
308 |
-
for key_model in matched_model_keys:
|
309 |
-
if key_model in memo:
|
310 |
-
continue
|
311 |
-
if key_model in model_key_groups:
|
312 |
-
group = model_key_groups[key_model]
|
313 |
-
memo |= set(group)
|
314 |
-
shapes = [tuple(model_state_dict[k].shape) for k in group]
|
315 |
-
table.append(
|
316 |
-
(
|
317 |
-
_longest_common_prefix([k[len(common_prefix) :] for k in group]) + "*",
|
318 |
-
_group_str([original_keys[k] for k in group]),
|
319 |
-
" ".join([str(x).replace(" ", "") for x in shapes]),
|
320 |
-
)
|
321 |
-
)
|
322 |
-
else:
|
323 |
-
key_checkpoint = original_keys[key_model]
|
324 |
-
shape = str(tuple(model_state_dict[key_model].shape))
|
325 |
-
table.append((key_model[len(common_prefix) :], key_checkpoint, shape))
|
326 |
-
table_str = tabulate(
|
327 |
-
table, tablefmt="pipe", headers=["Names in Model", "Names in Checkpoint", "Shapes"]
|
328 |
-
)
|
329 |
-
if len(table) != 1 and not use_vit: # do this for now; the table function has some bugs when the whole CLIP is loaded
|
330 |
-
logger.info(
|
331 |
-
"Following weights matched with "
|
332 |
-
+ (f"submodule {common_prefix[:-1]}" if common_prefix else "model")
|
333 |
-
+ ":\n"
|
334 |
-
+ table_str
|
335 |
-
)
|
336 |
-
else:
|
337 |
-
logger.info(mmk_list)
|
338 |
-
|
339 |
-
unmatched_ckpt_keys = [k for k in ckpt_keys if k not in set(matched_keys.keys())]
|
340 |
-
for k in unmatched_ckpt_keys:
|
341 |
-
result_state_dict[k] = ckpt_state_dict[k]
|
342 |
-
return result_state_dict
|
343 |
-
|
344 |
-
|
345 |
-
def _group_keys_by_module(keys: List[str], original_names: Dict[str, str]):
|
346 |
-
"""
|
347 |
-
Params in the same submodule are grouped together.
|
348 |
-
|
349 |
-
Args:
|
350 |
-
keys: names of all parameters
|
351 |
-
original_names: mapping from parameter name to their name in the checkpoint
|
352 |
-
|
353 |
-
Returns:
|
354 |
-
dict[name -> all other names in the same group]
|
355 |
-
"""
|
356 |
-
|
357 |
-
def _submodule_name(key):
|
358 |
-
pos = key.rfind(".")
|
359 |
-
if pos < 0:
|
360 |
-
return None
|
361 |
-
prefix = key[: pos + 1]
|
362 |
-
return prefix
|
363 |
-
|
364 |
-
all_submodules = [_submodule_name(k) for k in keys]
|
365 |
-
all_submodules = [x for x in all_submodules if x]
|
366 |
-
all_submodules = sorted(all_submodules, key=len)
|
367 |
-
|
368 |
-
ret = {}
|
369 |
-
for prefix in all_submodules:
|
370 |
-
group = [k for k in keys if k.startswith(prefix)]
|
371 |
-
if len(group) <= 1:
|
372 |
-
continue
|
373 |
-
original_name_lcp = _longest_common_prefix_str([original_names[k] for k in group])
|
374 |
-
if len(original_name_lcp) == 0:
|
375 |
-
# don't group weights if original names don't share prefix
|
376 |
-
continue
|
377 |
-
|
378 |
-
for k in group:
|
379 |
-
if k in ret:
|
380 |
-
continue
|
381 |
-
ret[k] = group
|
382 |
-
return ret
|
383 |
-
|
384 |
-
|
385 |
-
def _longest_common_prefix(names: List[str]) -> str:
|
386 |
-
"""
|
387 |
-
["abc.zfg", "abc.zef"] -> "abc."
|
388 |
-
"""
|
389 |
-
names = [n.split(".") for n in names]
|
390 |
-
m1, m2 = min(names), max(names)
|
391 |
-
ret = [a for a, b in zip(m1, m2) if a == b]
|
392 |
-
ret = ".".join(ret) + "." if len(ret) else ""
|
393 |
-
return ret
|
394 |
-
|
395 |
-
|
396 |
-
def _longest_common_prefix_str(names: List[str]) -> str:
|
397 |
-
m1, m2 = min(names), max(names)
|
398 |
-
lcp = [a for a, b in zip(m1, m2) if a == b]
|
399 |
-
lcp = "".join(lcp)
|
400 |
-
return lcp
|
401 |
-
|
402 |
-
|
403 |
-
def _group_str(names: List[str]) -> str:
|
404 |
-
"""
|
405 |
-
Turn "common1", "common2", "common3" into "common{1,2,3}"
|
406 |
-
"""
|
407 |
-
lcp = _longest_common_prefix_str(names)
|
408 |
-
rest = [x[len(lcp) :] for x in names]
|
409 |
-
rest = "{" + ",".join(rest) + "}"
|
410 |
-
ret = lcp + rest
|
411 |
-
|
412 |
-
# add some simplification for BN specifically
|
413 |
-
ret = ret.replace("bn_{beta,running_mean,running_var,gamma}", "bn_*")
|
414 |
-
ret = ret.replace("bn_beta,bn_running_mean,bn_running_var,bn_gamma", "bn_*")
|
415 |
-
return ret
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Caoyunkang/Segment-Any-Anomaly/GroundingDINO/groundingdino/util/vl_utils.py
DELETED
@@ -1,100 +0,0 @@
|
|
1 |
-
import os
|
2 |
-
import random
|
3 |
-
from typing import List
|
4 |
-
|
5 |
-
import torch
|
6 |
-
|
7 |
-
|
8 |
-
def create_positive_map_from_span(tokenized, token_span, max_text_len=256):
|
9 |
-
"""construct a map such that positive_map[i,j] = True iff box i is associated to token j
|
10 |
-
Input:
|
11 |
-
- tokenized:
|
12 |
-
- input_ids: Tensor[1, ntokens]
|
13 |
-
- attention_mask: Tensor[1, ntokens]
|
14 |
-
- token_span: list with length num_boxes.
|
15 |
-
- each item: [start_idx, end_idx]
|
16 |
-
"""
|
17 |
-
positive_map = torch.zeros((len(token_span), max_text_len), dtype=torch.float)
|
18 |
-
for j, tok_list in enumerate(token_span):
|
19 |
-
for (beg, end) in tok_list:
|
20 |
-
beg_pos = tokenized.char_to_token(beg)
|
21 |
-
end_pos = tokenized.char_to_token(end - 1)
|
22 |
-
if beg_pos is None:
|
23 |
-
try:
|
24 |
-
beg_pos = tokenized.char_to_token(beg + 1)
|
25 |
-
if beg_pos is None:
|
26 |
-
beg_pos = tokenized.char_to_token(beg + 2)
|
27 |
-
except:
|
28 |
-
beg_pos = None
|
29 |
-
if end_pos is None:
|
30 |
-
try:
|
31 |
-
end_pos = tokenized.char_to_token(end - 2)
|
32 |
-
if end_pos is None:
|
33 |
-
end_pos = tokenized.char_to_token(end - 3)
|
34 |
-
except:
|
35 |
-
end_pos = None
|
36 |
-
if beg_pos is None or end_pos is None:
|
37 |
-
continue
|
38 |
-
|
39 |
-
assert beg_pos is not None and end_pos is not None
|
40 |
-
if os.environ.get("SHILONG_DEBUG_ONLY_ONE_POS", None) == "TRUE":
|
41 |
-
positive_map[j, beg_pos] = 1
|
42 |
-
break
|
43 |
-
else:
|
44 |
-
positive_map[j, beg_pos : end_pos + 1].fill_(1)
|
45 |
-
|
46 |
-
return positive_map / (positive_map.sum(-1)[:, None] + 1e-6)
|
47 |
-
|
48 |
-
|
49 |
-
def build_captions_and_token_span(cat_list, force_lowercase):
|
50 |
-
"""
|
51 |
-
Return:
|
52 |
-
captions: str
|
53 |
-
cat2tokenspan: dict
|
54 |
-
{
|
55 |
-
'dog': [[0, 2]],
|
56 |
-
...
|
57 |
-
}
|
58 |
-
"""
|
59 |
-
|
60 |
-
cat2tokenspan = {}
|
61 |
-
captions = ""
|
62 |
-
for catname in cat_list:
|
63 |
-
class_name = catname
|
64 |
-
if force_lowercase:
|
65 |
-
class_name = class_name.lower()
|
66 |
-
if "/" in class_name:
|
67 |
-
class_name_list: List = class_name.strip().split("/")
|
68 |
-
class_name_list.append(class_name)
|
69 |
-
class_name: str = random.choice(class_name_list)
|
70 |
-
|
71 |
-
tokens_positive_i = []
|
72 |
-
subnamelist = [i.strip() for i in class_name.strip().split(" ")]
|
73 |
-
for subname in subnamelist:
|
74 |
-
if len(subname) == 0:
|
75 |
-
continue
|
76 |
-
if len(captions) > 0:
|
77 |
-
captions = captions + " "
|
78 |
-
strat_idx = len(captions)
|
79 |
-
end_idx = strat_idx + len(subname)
|
80 |
-
tokens_positive_i.append([strat_idx, end_idx])
|
81 |
-
captions = captions + subname
|
82 |
-
|
83 |
-
if len(tokens_positive_i) > 0:
|
84 |
-
captions = captions + " ."
|
85 |
-
cat2tokenspan[class_name] = tokens_positive_i
|
86 |
-
|
87 |
-
return captions, cat2tokenspan
|
88 |
-
|
89 |
-
|
90 |
-
def build_id2posspan_and_caption(category_dict: dict):
|
91 |
-
"""Build id2pos_span and caption from category_dict
|
92 |
-
|
93 |
-
Args:
|
94 |
-
category_dict (dict): category_dict
|
95 |
-
"""
|
96 |
-
cat_list = [item["name"].lower() for item in category_dict]
|
97 |
-
id2catname = {item["id"]: item["name"].lower() for item in category_dict}
|
98 |
-
caption, cat2posspan = build_captions_and_token_span(cat_list, force_lowercase=True)
|
99 |
-
id2posspan = {catid: cat2posspan[catname] for catid, catname in id2catname.items()}
|
100 |
-
return id2posspan, caption
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Chris4K/llms_compare/Hackintosh MacOS Niresh High Sierra For Intel And AMD ? MacOS.md
DELETED
@@ -1,128 +0,0 @@
|
|
1 |
-
## Hackintosh macOS Niresh High Sierra for Intel and AMD – macOS
|
2 |
-
|
3 |
-
|
4 |
-
|
5 |
-
|
6 |
-
|
7 |
-

|
8 |
-
|
9 |
-
|
10 |
-
|
11 |
-
|
12 |
-
|
13 |
-
**Hackintosh MacOS Niresh High Sierra For Intel And AMD ? MacOS ->>> [https://www.google.com/url?q=https%3A%2F%2Furlgoal.com%2F2txP22&sa=D&sntz=1&usg=AOvVaw3za\_PPTo0AOSXp\_zwTpKjt](https://www.google.com/url?q=https%3A%2F%2Furlgoal.com%2F2txP22&sa=D&sntz=1&usg=AOvVaw3za\_PPTo0AOSXp\_zwTpKjt)**
|
14 |
-
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
25 |
-
Here is a possible title and article with html formatting for the keyword "Hackintosh macOS Niresh High Sierra for Intel and AMD â macOS":
|
26 |
-
|
27 |
-
# How to Install Hackintosh macOS Niresh High Sierra on Intel and AMD PCs
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
Hackintosh is a term used to describe a computer that runs macOS on non-Apple hardware. It can be a great way to enjoy the features and benefits of macOS without buying a Mac. However, hackintoshing is not a straightforward process and requires some technical knowledge and skills. In this article, we will show you how to install Hackintosh macOS Niresh High Sierra on Intel and AMD PCs using a bootable USB drive.
|
32 |
-
|
33 |
-
|
34 |
-
|
35 |
-
Niresh High Sierra is a custom macOS installer that supports both Intel and AMD processors. It comes with many pre-installed drivers and kexts that make the installation easier and faster. Niresh High Sierra also supports legacy BIOS and UEFI boot modes, which means you can use it on older or newer PCs.
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
Before you start, you will need the following:
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
- A PC with an Intel or AMD processor that supports SSE4.1 instruction set
|
44 |
-
|
45 |
-
- A 16GB or larger USB drive
|
46 |
-
|
47 |
-
- A Windows PC or a Mac to create the bootable USB drive
|
48 |
-
|
49 |
-
- A copy of Niresh High Sierra ISO file (you can download it from [here](https://www.hackintoshzone.com/files/file/1094-niresh-high-sierra/))
|
50 |
-
|
51 |
-
- A copy of TransMac software (you can download it from [here](https://www.acutesystems.com/scrtm.htm))
|
52 |
-
|
53 |
-
- A backup of your important data (hackintoshing may erase your hard drive)
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
Now, follow these steps to create the bootable USB drive:
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
1. Insert the USB drive into your Windows PC or Mac
|
62 |
-
|
63 |
-
2. Open TransMac software and run it as administrator (on Windows) or enter your password (on Mac)
|
64 |
-
|
65 |
-
3. Right-click on the USB drive in the left pane and select Format Disk for Mac
|
66 |
-
|
67 |
-
4. Enter a name for the USB drive (e.g. Niresh) and click OK
|
68 |
-
|
69 |
-
5. Right-click on the USB drive again and select Restore with Disk Image
|
70 |
-
|
71 |
-
6. Browse to the Niresh High Sierra ISO file and click OK
|
72 |
-
|
73 |
-
7. Wait for the process to complete (it may take some time)
|
74 |
-
|
75 |
-
8. Eject the USB drive safely and insert it into your PC that you want to hackintosh
|
76 |
-
|
77 |
-
|
78 |
-
|
79 |
-
Next, follow these steps to install Hackintosh macOS Niresh High Sierra on your PC:
|
80 |
-
|
81 |
-
|
82 |
-
|
83 |
-
1. Turn on your PC and enter the BIOS or UEFI settings (usually by pressing F2, F10, F12, Del or Esc keys)
|
84 |
-
|
85 |
-
2. Change the boot order to prioritize the USB drive as the first boot device
|
86 |
-
|
87 |
-
3. Save and exit the BIOS or UEFI settings
|
88 |
-
|
89 |
-
4. Your PC should boot from the USB drive and load the Niresh High Sierra installer
|
90 |
-
|
91 |
-
5. Select Boot macOS Install from Niresh High Sierra at the Clover bootloader menu
|
92 |
-
|
93 |
-
6. Wait for the installer to load (it may take some time)
|
94 |
-
|
95 |
-
7. Select your language and click Continue
|
96 |
-
|
97 |
-
8. At the top menu bar, click Utilities and select Disk Utility
|
98 |
-
|
99 |
-
9. Select your hard drive in the left pane and click Erase
|
100 |
-
|
101 |
-
10. Enter a name for your hard drive (e.g. Macintosh HD) and choose Mac OS Extended (Journaled) as the format
|
102 |
-
|
103 |
-
11. Click Erase then Done
|
104 |
-
|
105 |
-
12. Close Disk Utility and click Continue at the installer screen
|
106 |
-
|
107 |
-
13. Agree to the terms and conditions and select your hard drive as the destination for installation
|
108 |
-
|
109 |
-
14. Click Customize and check or uncheck any options according to your preference (you can leave them as default if you are not sure)
|
110 |
-
|
111 |
-
15. Click Install and wait for the installation to complete (it may take some time)
|
112 |
-
|
113 |
-
16. Your PC should reboot automatically after the installation is done
|
114 |
-
|
115 |
-
17. Select Boot macOS from Macintosh HD at the Clover bootloader menu
|
116 |
-
|
117 |
-
18. Follow the on-screen instructions to set up your hackintosh (e.g. choose your country, keyboard layout, Apple ID, etc.)
|
118 |
-
|
119 |
-
dfd1c89656
|
120 |
-
|
121 |
-
|
122 |
-
|
123 |
-
|
124 |
-
|
125 |
-
|
126 |
-
|
127 |
-
|
128 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Cletrason/Cletrason-toad-mario-movie/app.py
DELETED
@@ -1,3 +0,0 @@
|
|
1 |
-
import gradio as gr
|
2 |
-
|
3 |
-
gr.Interface.load("models/Cletrason/toad-mario-movie").launch()
|
|
|
|
|
|
|
|
spaces/Cong723/gpt-academic-public/toolbox.py
DELETED
@@ -1,717 +0,0 @@
|
|
1 |
-
import markdown
|
2 |
-
import importlib
|
3 |
-
import traceback
|
4 |
-
import inspect
|
5 |
-
import re
|
6 |
-
import os
|
7 |
-
from latex2mathml.converter import convert as tex2mathml
|
8 |
-
from functools import wraps, lru_cache
|
9 |
-
|
10 |
-
"""
|
11 |
-
========================================================================
|
12 |
-
第一部分
|
13 |
-
函数插件输入输出接驳区
|
14 |
-
- ChatBotWithCookies: 带Cookies的Chatbot类,为实现更多强大的功能做基础
|
15 |
-
- ArgsGeneralWrapper: 装饰器函数,用于重组输入参数,改变输入参数的顺序与结构
|
16 |
-
- update_ui: 刷新界面用 yield from update_ui(chatbot, history)
|
17 |
-
- CatchException: 将插件中出的所有问题显示在界面上
|
18 |
-
- HotReload: 实现插件的热更新
|
19 |
-
- trimmed_format_exc: 打印traceback,为了安全而隐藏绝对地址
|
20 |
-
========================================================================
|
21 |
-
"""
|
22 |
-
|
23 |
-
class ChatBotWithCookies(list):
|
24 |
-
def __init__(self, cookie):
|
25 |
-
self._cookies = cookie
|
26 |
-
|
27 |
-
def write_list(self, list):
|
28 |
-
for t in list:
|
29 |
-
self.append(t)
|
30 |
-
|
31 |
-
def get_list(self):
|
32 |
-
return [t for t in self]
|
33 |
-
|
34 |
-
def get_cookies(self):
|
35 |
-
return self._cookies
|
36 |
-
|
37 |
-
|
38 |
-
def ArgsGeneralWrapper(f):
|
39 |
-
"""
|
40 |
-
装饰器函数,用于重组输入参数,改变输入参数的顺序与结构。
|
41 |
-
"""
|
42 |
-
def decorated(cookies, max_length, llm_model, txt, txt2, top_p, temperature, chatbot, history, system_prompt, plugin_advanced_arg, *args):
|
43 |
-
txt_passon = txt
|
44 |
-
if txt == "" and txt2 != "": txt_passon = txt2
|
45 |
-
# 引入一个有cookie的chatbot
|
46 |
-
cookies.update({
|
47 |
-
'top_p':top_p,
|
48 |
-
'temperature':temperature,
|
49 |
-
})
|
50 |
-
llm_kwargs = {
|
51 |
-
'api_key': cookies['api_key'],
|
52 |
-
'llm_model': llm_model,
|
53 |
-
'top_p':top_p,
|
54 |
-
'max_length': max_length,
|
55 |
-
'temperature':temperature,
|
56 |
-
}
|
57 |
-
plugin_kwargs = {
|
58 |
-
"advanced_arg": plugin_advanced_arg,
|
59 |
-
}
|
60 |
-
chatbot_with_cookie = ChatBotWithCookies(cookies)
|
61 |
-
chatbot_with_cookie.write_list(chatbot)
|
62 |
-
yield from f(txt_passon, llm_kwargs, plugin_kwargs, chatbot_with_cookie, history, system_prompt, *args)
|
63 |
-
return decorated
|
64 |
-
|
65 |
-
|
66 |
-
def update_ui(chatbot, history, msg='正常', **kwargs): # 刷新界面
|
67 |
-
"""
|
68 |
-
刷新用户界面
|
69 |
-
"""
|
70 |
-
assert isinstance(chatbot, ChatBotWithCookies), "在传递chatbot的过程中不要将其丢弃。必要时,可用clear将其清空,然后用for+append循环重新赋值。"
|
71 |
-
yield chatbot.get_cookies(), chatbot, history, msg
|
72 |
-
|
73 |
-
def trimmed_format_exc():
|
74 |
-
import os, traceback
|
75 |
-
str = traceback.format_exc()
|
76 |
-
current_path = os.getcwd()
|
77 |
-
replace_path = "."
|
78 |
-
return str.replace(current_path, replace_path)
|
79 |
-
|
80 |
-
def CatchException(f):
|
81 |
-
"""
|
82 |
-
装饰器函数,捕捉函数f中的异常并封装到一个生成器中返回,并显示到聊天当中。
|
83 |
-
"""
|
84 |
-
|
85 |
-
@wraps(f)
|
86 |
-
def decorated(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT):
|
87 |
-
try:
|
88 |
-
yield from f(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT)
|
89 |
-
except Exception as e:
|
90 |
-
from check_proxy import check_proxy
|
91 |
-
from toolbox import get_conf
|
92 |
-
proxies, = get_conf('proxies')
|
93 |
-
tb_str = '```\n' + trimmed_format_exc() + '```'
|
94 |
-
if len(chatbot) == 0:
|
95 |
-
chatbot.clear()
|
96 |
-
chatbot.append(["插件调度异常", "异常原因"])
|
97 |
-
chatbot[-1] = (chatbot[-1][0],
|
98 |
-
f"[Local Message] 实验性函数调用出错: \n\n{tb_str} \n\n当前代理可用性: \n\n{check_proxy(proxies)}")
|
99 |
-
yield from update_ui(chatbot=chatbot, history=history, msg=f'异常 {e}') # 刷新界面
|
100 |
-
return decorated
|
101 |
-
|
102 |
-
|
103 |
-
def HotReload(f):
|
104 |
-
"""
|
105 |
-
HotReload的装饰器函数,用于实现Python函数插件的热更新。
|
106 |
-
函数热更新是指在不停止程序运行的情况下,更新函数代码,从而达到实时更新功能。
|
107 |
-
在装饰器内部,使用wraps(f)来保留函数的元信息,并定义了一个名为decorated的内部函数。
|
108 |
-
内部函数通过使用importlib模块的reload函数和inspect模块的getmodule函数来重新加载并获取函数模块,
|
109 |
-
然后通过getattr函数获取函数名,并在新模块中重新加载函数。
|
110 |
-
最后,使用yield from语句返回重新加载过的函数,并在被装饰的函数上执行。
|
111 |
-
最终,装饰器函数返回内部函数。这个内部函数可以将函数的原始定义更新为最新版本,并执行函数的新版本。
|
112 |
-
"""
|
113 |
-
@wraps(f)
|
114 |
-
def decorated(*args, **kwargs):
|
115 |
-
fn_name = f.__name__
|
116 |
-
f_hot_reload = getattr(importlib.reload(inspect.getmodule(f)), fn_name)
|
117 |
-
yield from f_hot_reload(*args, **kwargs)
|
118 |
-
return decorated
|
119 |
-
|
120 |
-
|
121 |
-
"""
|
122 |
-
========================================================================
|
123 |
-
第二部分
|
124 |
-
其他小工具:
|
125 |
-
- write_results_to_file: 将结果写入markdown文件中
|
126 |
-
- regular_txt_to_markdown: 将普通文本转换为Markdown格式的文本。
|
127 |
-
- report_execption: 向chatbot中添加简单的意外错误信息
|
128 |
-
- text_divide_paragraph: 将文本按照段落分隔符分割开,生成带有段落标签的HTML代码。
|
129 |
-
- markdown_convertion: 用多种方式组合,将markdown转化为好看的html
|
130 |
-
- format_io: 接管gradio默认的markdown处理方式
|
131 |
-
- on_file_uploaded: 处理文件的上传(自动解压)
|
132 |
-
- on_report_generated: 将生成的报告自动投射到文件上传区
|
133 |
-
- clip_history: 当历史上下文过长时,自动截断
|
134 |
-
- get_conf: 获取设置
|
135 |
-
- select_api_key: 根据当前的模型类别,抽取可用的api-key
|
136 |
-
========================================================================
|
137 |
-
"""
|
138 |
-
|
139 |
-
def get_reduce_token_percent(text):
|
140 |
-
"""
|
141 |
-
* 此函数未来将被弃用
|
142 |
-
"""
|
143 |
-
try:
|
144 |
-
# text = "maximum context length is 4097 tokens. However, your messages resulted in 4870 tokens"
|
145 |
-
pattern = r"(\d+)\s+tokens\b"
|
146 |
-
match = re.findall(pattern, text)
|
147 |
-
EXCEED_ALLO = 500 # 稍微留一点余地,否则在回复时会因余量太少出问题
|
148 |
-
max_limit = float(match[0]) - EXCEED_ALLO
|
149 |
-
current_tokens = float(match[1])
|
150 |
-
ratio = max_limit/current_tokens
|
151 |
-
assert ratio > 0 and ratio < 1
|
152 |
-
return ratio, str(int(current_tokens-max_limit))
|
153 |
-
except:
|
154 |
-
return 0.5, '不详'
|
155 |
-
|
156 |
-
|
157 |
-
def write_results_to_file(history, file_name=None):
|
158 |
-
"""
|
159 |
-
将对话记录history以Markdown格式写入文件中。如果没有指定文件名,则使用当前时间生成文件名。
|
160 |
-
"""
|
161 |
-
import os
|
162 |
-
import time
|
163 |
-
if file_name is None:
|
164 |
-
# file_name = time.strftime("chatGPT分析报告%Y-%m-%d-%H-%M-%S", time.localtime()) + '.md'
|
165 |
-
file_name = 'chatGPT分析报告' + \
|
166 |
-
time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime()) + '.md'
|
167 |
-
os.makedirs('./gpt_log/', exist_ok=True)
|
168 |
-
with open(f'./gpt_log/{file_name}', 'w', encoding='utf8') as f:
|
169 |
-
f.write('# chatGPT 分析报告\n')
|
170 |
-
for i, content in enumerate(history):
|
171 |
-
try: # 这个bug没找到触发条件,暂时先这样顶一下
|
172 |
-
if type(content) != str:
|
173 |
-
content = str(content)
|
174 |
-
except:
|
175 |
-
continue
|
176 |
-
if i % 2 == 0:
|
177 |
-
f.write('## ')
|
178 |
-
f.write(content)
|
179 |
-
f.write('\n\n')
|
180 |
-
res = '以上材料已经被写入' + os.path.abspath(f'./gpt_log/{file_name}')
|
181 |
-
print(res)
|
182 |
-
return res
|
183 |
-
|
184 |
-
|
185 |
-
def regular_txt_to_markdown(text):
|
186 |
-
"""
|
187 |
-
将普通文本转换为Markdown格式的文本。
|
188 |
-
"""
|
189 |
-
text = text.replace('\n', '\n\n')
|
190 |
-
text = text.replace('\n\n\n', '\n\n')
|
191 |
-
text = text.replace('\n\n\n', '\n\n')
|
192 |
-
return text
|
193 |
-
|
194 |
-
|
195 |
-
|
196 |
-
|
197 |
-
def report_execption(chatbot, history, a, b):
|
198 |
-
"""
|
199 |
-
向chatbot中添加错误信息
|
200 |
-
"""
|
201 |
-
chatbot.append((a, b))
|
202 |
-
history.append(a)
|
203 |
-
history.append(b)
|
204 |
-
|
205 |
-
|
206 |
-
def text_divide_paragraph(text):
|
207 |
-
"""
|
208 |
-
将文本按照段落分隔符分割开,生成带有段落标签的HTML代码。
|
209 |
-
"""
|
210 |
-
if '```' in text:
|
211 |
-
# careful input
|
212 |
-
return text
|
213 |
-
else:
|
214 |
-
# wtf input
|
215 |
-
lines = text.split("\n")
|
216 |
-
for i, line in enumerate(lines):
|
217 |
-
lines[i] = lines[i].replace(" ", " ")
|
218 |
-
text = "</br>".join(lines)
|
219 |
-
return text
|
220 |
-
|
221 |
-
@lru_cache(maxsize=128) # 使用 lru缓存 加快转换速度
|
222 |
-
def markdown_convertion(txt):
|
223 |
-
"""
|
224 |
-
将Markdown格式的文本转换为HTML格式。如果包含数学公式,则先将公式转换为HTML格式。
|
225 |
-
"""
|
226 |
-
pre = '<div class="markdown-body">'
|
227 |
-
suf = '</div>'
|
228 |
-
if txt.startswith(pre) and txt.endswith(suf):
|
229 |
-
# print('警告,输入了已经经过转化的字符串,二次转化可能出问题')
|
230 |
-
return txt # 已经被转化过,不需要再次转化
|
231 |
-
|
232 |
-
markdown_extension_configs = {
|
233 |
-
'mdx_math': {
|
234 |
-
'enable_dollar_delimiter': True,
|
235 |
-
'use_gitlab_delimiters': False,
|
236 |
-
},
|
237 |
-
}
|
238 |
-
find_equation_pattern = r'<script type="math/tex(?:.*?)>(.*?)</script>'
|
239 |
-
|
240 |
-
def tex2mathml_catch_exception(content, *args, **kwargs):
|
241 |
-
try:
|
242 |
-
content = tex2mathml(content, *args, **kwargs)
|
243 |
-
except:
|
244 |
-
content = content
|
245 |
-
return content
|
246 |
-
|
247 |
-
def replace_math_no_render(match):
|
248 |
-
content = match.group(1)
|
249 |
-
if 'mode=display' in match.group(0):
|
250 |
-
content = content.replace('\n', '</br>')
|
251 |
-
return f"<font color=\"#00FF00\">$$</font><font color=\"#FF00FF\">{content}</font><font color=\"#00FF00\">$$</font>"
|
252 |
-
else:
|
253 |
-
return f"<font color=\"#00FF00\">$</font><font color=\"#FF00FF\">{content}</font><font color=\"#00FF00\">$</font>"
|
254 |
-
|
255 |
-
def replace_math_render(match):
|
256 |
-
content = match.group(1)
|
257 |
-
if 'mode=display' in match.group(0):
|
258 |
-
if '\\begin{aligned}' in content:
|
259 |
-
content = content.replace('\\begin{aligned}', '\\begin{array}')
|
260 |
-
content = content.replace('\\end{aligned}', '\\end{array}')
|
261 |
-
content = content.replace('&', ' ')
|
262 |
-
content = tex2mathml_catch_exception(content, display="block")
|
263 |
-
return content
|
264 |
-
else:
|
265 |
-
return tex2mathml_catch_exception(content)
|
266 |
-
|
267 |
-
def markdown_bug_hunt(content):
|
268 |
-
"""
|
269 |
-
解决一个mdx_math的bug(单$包裹begin命令时多余<script>)
|
270 |
-
"""
|
271 |
-
content = content.replace('<script type="math/tex">\n<script type="math/tex; mode=display">', '<script type="math/tex; mode=display">')
|
272 |
-
content = content.replace('</script>\n</script>', '</script>')
|
273 |
-
return content
|
274 |
-
|
275 |
-
def no_code(txt):
|
276 |
-
if '```' not in txt:
|
277 |
-
return True
|
278 |
-
else:
|
279 |
-
if '```reference' in txt: return True # newbing
|
280 |
-
else: return False
|
281 |
-
|
282 |
-
if ('$' in txt) and no_code(txt): # 有$标识的公式符号,且没有代码段```的标识
|
283 |
-
# convert everything to html format
|
284 |
-
split = markdown.markdown(text='---')
|
285 |
-
convert_stage_1 = markdown.markdown(text=txt, extensions=['mdx_math', 'fenced_code', 'tables', 'sane_lists'], extension_configs=markdown_extension_configs)
|
286 |
-
convert_stage_1 = markdown_bug_hunt(convert_stage_1)
|
287 |
-
# re.DOTALL: Make the '.' special character match any character at all, including a newline; without this flag, '.' will match anything except a newline. Corresponds to the inline flag (?s).
|
288 |
-
# 1. convert to easy-to-copy tex (do not render math)
|
289 |
-
convert_stage_2_1, n = re.subn(find_equation_pattern, replace_math_no_render, convert_stage_1, flags=re.DOTALL)
|
290 |
-
# 2. convert to rendered equation
|
291 |
-
convert_stage_2_2, n = re.subn(find_equation_pattern, replace_math_render, convert_stage_1, flags=re.DOTALL)
|
292 |
-
# cat them together
|
293 |
-
return pre + convert_stage_2_1 + f'{split}' + convert_stage_2_2 + suf
|
294 |
-
else:
|
295 |
-
return pre + markdown.markdown(txt, extensions=['fenced_code', 'codehilite', 'tables', 'sane_lists']) + suf
|
296 |
-
|
297 |
-
|
298 |
-
def close_up_code_segment_during_stream(gpt_reply):
|
299 |
-
"""
|
300 |
-
在gpt输出代码的中途(输出了前面的```,但还没输出完后面的```),补上后面的```
|
301 |
-
|
302 |
-
Args:
|
303 |
-
gpt_reply (str): GPT模型返回的回复字符串。
|
304 |
-
|
305 |
-
Returns:
|
306 |
-
str: 返回一个新的字符串,将输出代码片段的“后面的```”补上。
|
307 |
-
|
308 |
-
"""
|
309 |
-
if '```' not in gpt_reply:
|
310 |
-
return gpt_reply
|
311 |
-
if gpt_reply.endswith('```'):
|
312 |
-
return gpt_reply
|
313 |
-
|
314 |
-
# 排除了以上两个情况,我们
|
315 |
-
segments = gpt_reply.split('```')
|
316 |
-
n_mark = len(segments) - 1
|
317 |
-
if n_mark % 2 == 1:
|
318 |
-
# print('输出代码片段中!')
|
319 |
-
return gpt_reply+'\n```'
|
320 |
-
else:
|
321 |
-
return gpt_reply
|
322 |
-
|
323 |
-
|
324 |
-
def format_io(self, y):
|
325 |
-
"""
|
326 |
-
将输入和输出解析为HTML格式。将y中最后一项的输入部分段落化,并将输出部分的Markdown和数学公式转换为HTML格式。
|
327 |
-
"""
|
328 |
-
if y is None or y == []:
|
329 |
-
return []
|
330 |
-
i_ask, gpt_reply = y[-1]
|
331 |
-
i_ask = text_divide_paragraph(i_ask) # 输入部分太自由,预处理一波
|
332 |
-
gpt_reply = close_up_code_segment_during_stream(gpt_reply) # 当代码输出半截的时候,试着补上后个```
|
333 |
-
y[-1] = (
|
334 |
-
None if i_ask is None else markdown.markdown(i_ask, extensions=['fenced_code', 'tables']),
|
335 |
-
None if gpt_reply is None else markdown_convertion(gpt_reply)
|
336 |
-
)
|
337 |
-
return y
|
338 |
-
|
339 |
-
|
340 |
-
def find_free_port():
|
341 |
-
"""
|
342 |
-
返回当前系统中可用的未使用端口。
|
343 |
-
"""
|
344 |
-
import socket
|
345 |
-
from contextlib import closing
|
346 |
-
with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s:
|
347 |
-
s.bind(('', 0))
|
348 |
-
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
349 |
-
return s.getsockname()[1]
|
350 |
-
|
351 |
-
|
352 |
-
def extract_archive(file_path, dest_dir):
|
353 |
-
import zipfile
|
354 |
-
import tarfile
|
355 |
-
import os
|
356 |
-
# Get the file extension of the input file
|
357 |
-
file_extension = os.path.splitext(file_path)[1]
|
358 |
-
|
359 |
-
# Extract the archive based on its extension
|
360 |
-
if file_extension == '.zip':
|
361 |
-
with zipfile.ZipFile(file_path, 'r') as zipobj:
|
362 |
-
zipobj.extractall(path=dest_dir)
|
363 |
-
print("Successfully extracted zip archive to {}".format(dest_dir))
|
364 |
-
|
365 |
-
elif file_extension in ['.tar', '.gz', '.bz2']:
|
366 |
-
with tarfile.open(file_path, 'r:*') as tarobj:
|
367 |
-
tarobj.extractall(path=dest_dir)
|
368 |
-
print("Successfully extracted tar archive to {}".format(dest_dir))
|
369 |
-
|
370 |
-
# 第三方库,需要预先pip install rarfile
|
371 |
-
# 此外,Windows上还需要安装winrar软件,配置其Path环境变量,如"C:\Program Files\WinRAR"才可以
|
372 |
-
elif file_extension == '.rar':
|
373 |
-
try:
|
374 |
-
import rarfile
|
375 |
-
with rarfile.RarFile(file_path) as rf:
|
376 |
-
rf.extractall(path=dest_dir)
|
377 |
-
print("Successfully extracted rar archive to {}".format(dest_dir))
|
378 |
-
except:
|
379 |
-
print("Rar format requires additional dependencies to install")
|
380 |
-
return '\n\n需要安装pip install rarfile来解压rar文件'
|
381 |
-
|
382 |
-
# 第三方库,需要预先pip install py7zr
|
383 |
-
elif file_extension == '.7z':
|
384 |
-
try:
|
385 |
-
import py7zr
|
386 |
-
with py7zr.SevenZipFile(file_path, mode='r') as f:
|
387 |
-
f.extractall(path=dest_dir)
|
388 |
-
print("Successfully extracted 7z archive to {}".format(dest_dir))
|
389 |
-
except:
|
390 |
-
print("7z format requires additional dependencies to install")
|
391 |
-
return '\n\n需要安装pip install py7zr来解压7z文件'
|
392 |
-
else:
|
393 |
-
return ''
|
394 |
-
return ''
|
395 |
-
|
396 |
-
|
397 |
-
def find_recent_files(directory):
|
398 |
-
"""
|
399 |
-
me: find files that is created with in one minutes under a directory with python, write a function
|
400 |
-
gpt: here it is!
|
401 |
-
"""
|
402 |
-
import os
|
403 |
-
import time
|
404 |
-
current_time = time.time()
|
405 |
-
one_minute_ago = current_time - 60
|
406 |
-
recent_files = []
|
407 |
-
|
408 |
-
for filename in os.listdir(directory):
|
409 |
-
file_path = os.path.join(directory, filename)
|
410 |
-
if file_path.endswith('.log'):
|
411 |
-
continue
|
412 |
-
created_time = os.path.getmtime(file_path)
|
413 |
-
if created_time >= one_minute_ago:
|
414 |
-
if os.path.isdir(file_path):
|
415 |
-
continue
|
416 |
-
recent_files.append(file_path)
|
417 |
-
|
418 |
-
return recent_files
|
419 |
-
|
420 |
-
|
421 |
-
def on_file_uploaded(files, chatbot, txt, txt2, checkboxes):
|
422 |
-
"""
|
423 |
-
当文件被上传时的回调函数
|
424 |
-
"""
|
425 |
-
if len(files) == 0:
|
426 |
-
return chatbot, txt
|
427 |
-
import shutil
|
428 |
-
import os
|
429 |
-
import time
|
430 |
-
import glob
|
431 |
-
from toolbox import extract_archive
|
432 |
-
try:
|
433 |
-
shutil.rmtree('./private_upload/')
|
434 |
-
except:
|
435 |
-
pass
|
436 |
-
time_tag = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime())
|
437 |
-
os.makedirs(f'private_upload/{time_tag}', exist_ok=True)
|
438 |
-
err_msg = ''
|
439 |
-
for file in files:
|
440 |
-
file_origin_name = os.path.basename(file.orig_name)
|
441 |
-
shutil.copy(file.name, f'private_upload/{time_tag}/{file_origin_name}')
|
442 |
-
err_msg += extract_archive(f'private_upload/{time_tag}/{file_origin_name}',
|
443 |
-
dest_dir=f'private_upload/{time_tag}/{file_origin_name}.extract')
|
444 |
-
moved_files = [fp for fp in glob.glob('private_upload/**/*', recursive=True)]
|
445 |
-
if "底部输入区" in checkboxes:
|
446 |
-
txt = ""
|
447 |
-
txt2 = f'private_upload/{time_tag}'
|
448 |
-
else:
|
449 |
-
txt = f'private_upload/{time_tag}'
|
450 |
-
txt2 = ""
|
451 |
-
moved_files_str = '\t\n\n'.join(moved_files)
|
452 |
-
chatbot.append(['我上传了文件,请查收',
|
453 |
-
f'[Local Message] 收到以下文件: \n\n{moved_files_str}' +
|
454 |
-
f'\n\n调用路径参数已自动修正到: \n\n{txt}' +
|
455 |
-
f'\n\n现在您点击任意“红颜色”标识的函数插件时,以上文件将被作为输入参数'+err_msg])
|
456 |
-
return chatbot, txt, txt2
|
457 |
-
|
458 |
-
|
459 |
-
def on_report_generated(files, chatbot):
|
460 |
-
from toolbox import find_recent_files
|
461 |
-
report_files = find_recent_files('gpt_log')
|
462 |
-
if len(report_files) == 0:
|
463 |
-
return None, chatbot
|
464 |
-
# files.extend(report_files)
|
465 |
-
chatbot.append(['汇总报告如何远程获取?', '汇总报告已经添加到右侧“文件上传区”(可能处于折叠状态),请查收。'])
|
466 |
-
return report_files, chatbot
|
467 |
-
|
468 |
-
def is_openai_api_key(key):
|
469 |
-
API_MATCH_ORIGINAL = re.match(r"sk-[a-zA-Z0-9]{48}$", key)
|
470 |
-
API_MATCH_AZURE = re.match(r"[a-zA-Z0-9]{32}$", key)
|
471 |
-
return bool(API_MATCH_ORIGINAL) or bool(API_MATCH_AZURE)
|
472 |
-
|
473 |
-
def is_api2d_key(key):
|
474 |
-
if key.startswith('fk') and len(key) == 41:
|
475 |
-
return True
|
476 |
-
else:
|
477 |
-
return False
|
478 |
-
|
479 |
-
def is_any_api_key(key):
|
480 |
-
if ',' in key:
|
481 |
-
keys = key.split(',')
|
482 |
-
for k in keys:
|
483 |
-
if is_any_api_key(k): return True
|
484 |
-
return False
|
485 |
-
else:
|
486 |
-
return is_openai_api_key(key) or is_api2d_key(key)
|
487 |
-
|
488 |
-
def what_keys(keys):
|
489 |
-
avail_key_list = {'OpenAI Key':0, "API2D Key":0}
|
490 |
-
key_list = keys.split(',')
|
491 |
-
|
492 |
-
for k in key_list:
|
493 |
-
if is_openai_api_key(k):
|
494 |
-
avail_key_list['OpenAI Key'] += 1
|
495 |
-
|
496 |
-
for k in key_list:
|
497 |
-
if is_api2d_key(k):
|
498 |
-
avail_key_list['API2D Key'] += 1
|
499 |
-
|
500 |
-
return f"检测到: OpenAI Key {avail_key_list['OpenAI Key']} 个,API2D Key {avail_key_list['API2D Key']} 个"
|
501 |
-
|
502 |
-
def select_api_key(keys, llm_model):
|
503 |
-
import random
|
504 |
-
avail_key_list = []
|
505 |
-
key_list = keys.split(',')
|
506 |
-
|
507 |
-
if llm_model.startswith('gpt-'):
|
508 |
-
for k in key_list:
|
509 |
-
if is_openai_api_key(k): avail_key_list.append(k)
|
510 |
-
|
511 |
-
if llm_model.startswith('api2d-'):
|
512 |
-
for k in key_list:
|
513 |
-
if is_api2d_key(k): avail_key_list.append(k)
|
514 |
-
|
515 |
-
if len(avail_key_list) == 0:
|
516 |
-
raise RuntimeError(f"您提供的api-key不满足要求,不包含任何可用于{llm_model}的api-key。您可能选择了错误的模型或请求源。")
|
517 |
-
|
518 |
-
api_key = random.choice(avail_key_list) # 随机负载均衡
|
519 |
-
return api_key
|
520 |
-
|
521 |
-
def read_env_variable(arg, default_value):
|
522 |
-
"""
|
523 |
-
环境变量可以是 `GPT_ACADEMIC_CONFIG`(优先),也可以直接是`CONFIG`
|
524 |
-
例如在windows cmd中,既可以写:
|
525 |
-
set USE_PROXY=True
|
526 |
-
set API_KEY=sk-j7caBpkRoxxxxxxxxxxxxxxxxxxxxxxxxxxxx
|
527 |
-
set proxies={"http":"http://127.0.0.1:10085", "https":"http://127.0.0.1:10085",}
|
528 |
-
set AVAIL_LLM_MODELS=["gpt-3.5-turbo", "chatglm"]
|
529 |
-
set AUTHENTICATION=[("username", "password"), ("username2", "password2")]
|
530 |
-
也可以写:
|
531 |
-
set GPT_ACADEMIC_USE_PROXY=True
|
532 |
-
set GPT_ACADEMIC_API_KEY=sk-j7caBpkRoxxxxxxxxxxxxxxxxxxxxxxxxxxxx
|
533 |
-
set GPT_ACADEMIC_proxies={"http":"http://127.0.0.1:10085", "https":"http://127.0.0.1:10085",}
|
534 |
-
set GPT_ACADEMIC_AVAIL_LLM_MODELS=["gpt-3.5-turbo", "chatglm"]
|
535 |
-
set GPT_ACADEMIC_AUTHENTICATION=[("username", "password"), ("username2", "password2")]
|
536 |
-
"""
|
537 |
-
from colorful import print亮红, print亮绿
|
538 |
-
arg_with_prefix = "GPT_ACADEMIC_" + arg
|
539 |
-
if arg_with_prefix in os.environ:
|
540 |
-
env_arg = os.environ[arg_with_prefix]
|
541 |
-
elif arg in os.environ:
|
542 |
-
env_arg = os.environ[arg]
|
543 |
-
else:
|
544 |
-
raise KeyError
|
545 |
-
print(f"[ENV_VAR] 尝试加载{arg},默认值:{default_value} --> 修正值:{env_arg}")
|
546 |
-
try:
|
547 |
-
if isinstance(default_value, bool):
|
548 |
-
r = bool(env_arg)
|
549 |
-
elif isinstance(default_value, int):
|
550 |
-
r = int(env_arg)
|
551 |
-
elif isinstance(default_value, float):
|
552 |
-
r = float(env_arg)
|
553 |
-
elif isinstance(default_value, str):
|
554 |
-
r = env_arg.strip()
|
555 |
-
elif isinstance(default_value, dict):
|
556 |
-
r = eval(env_arg)
|
557 |
-
elif isinstance(default_value, list):
|
558 |
-
r = eval(env_arg)
|
559 |
-
elif default_value is None:
|
560 |
-
assert arg == "proxies"
|
561 |
-
r = eval(env_arg)
|
562 |
-
else:
|
563 |
-
print亮红(f"[ENV_VAR] 环境变量{arg}不支持通过环境变量设置! ")
|
564 |
-
raise KeyError
|
565 |
-
except:
|
566 |
-
print亮红(f"[ENV_VAR] 环境变量{arg}加载失败! ")
|
567 |
-
raise KeyError(f"[ENV_VAR] 环境变量{arg}加载失败! ")
|
568 |
-
|
569 |
-
print亮绿(f"[ENV_VAR] 成功读取环境变量{arg}")
|
570 |
-
return r
|
571 |
-
|
572 |
-
@lru_cache(maxsize=128)
|
573 |
-
def read_single_conf_with_lru_cache(arg):
|
574 |
-
from colorful import print亮红, print亮绿, print亮蓝
|
575 |
-
try:
|
576 |
-
# 优先级1. 获取环境变量作为配置
|
577 |
-
default_ref = getattr(importlib.import_module('config'), arg) # 读取默认值作为数据类型转换的参考
|
578 |
-
r = read_env_variable(arg, default_ref)
|
579 |
-
except:
|
580 |
-
try:
|
581 |
-
# 优先级2. 获取config_private中的配置
|
582 |
-
r = getattr(importlib.import_module('config_private'), arg)
|
583 |
-
except:
|
584 |
-
# 优先级3. 获取config中的配置
|
585 |
-
r = getattr(importlib.import_module('config'), arg)
|
586 |
-
|
587 |
-
# 在读取API_KEY时,检查一下是不是忘了改config
|
588 |
-
if arg == 'API_KEY':
|
589 |
-
print亮蓝(f"[API_KEY] 本项目现已支持OpenAI和API2D的api-key。也支持同时填写多个api-key,如API_KEY=\"openai-key1,openai-key2,api2d-key3\"")
|
590 |
-
print亮蓝(f"[API_KEY] 您既可以在config.py中修改api-key(s),也可以在问题输入区输入临时的api-key(s),然后回车键提交后即可生效。")
|
591 |
-
if is_any_api_key(r):
|
592 |
-
print亮绿(f"[API_KEY] 您的 API_KEY 是: {r[:15]}*** API_KEY 导入成功")
|
593 |
-
else:
|
594 |
-
print亮红( "[API_KEY] 正确的 API_KEY 是'sk'开头的51位密钥(OpenAI),或者 'fk'开头的41位密钥,请在config文件中修改API密钥之后再运行。")
|
595 |
-
if arg == 'proxies':
|
596 |
-
if r is None:
|
597 |
-
print亮红('[PROXY] 网络代理状态:未配置。无代理状态下很可能无法访问OpenAI家族的模型。建议:检查USE_PROXY选项是否修改。')
|
598 |
-
else:
|
599 |
-
print亮绿('[PROXY] 网络代理状态:已配置。配置信息如下:', r)
|
600 |
-
assert isinstance(r, dict), 'proxies格式错误,请注意proxies选项的格式,不要遗漏括号。'
|
601 |
-
return r
|
602 |
-
|
603 |
-
|
604 |
-
def get_conf(*args):
|
605 |
-
# 建议您复制一个config_private.py放自己的秘密, 如API和代理网址, 避免不小心传github被别人看到
|
606 |
-
res = []
|
607 |
-
for arg in args:
|
608 |
-
r = read_single_conf_with_lru_cache(arg)
|
609 |
-
res.append(r)
|
610 |
-
return res
|
611 |
-
|
612 |
-
|
613 |
-
def clear_line_break(txt):
|
614 |
-
txt = txt.replace('\n', ' ')
|
615 |
-
txt = txt.replace(' ', ' ')
|
616 |
-
txt = txt.replace(' ', ' ')
|
617 |
-
return txt
|
618 |
-
|
619 |
-
|
620 |
-
class DummyWith():
|
621 |
-
"""
|
622 |
-
这段代码定义了一个名为DummyWith的空上下文管理器,
|
623 |
-
它的作用是……额……就是不起作用,即在代码结构不变得情况下取代其他的上下文管理器。
|
624 |
-
上下文管理器是一种Python对象,用于与with语句一起使用,
|
625 |
-
以确保一些资源在代码块执行期间得到正确的初始化和清理。
|
626 |
-
上下文管理器必须实现两个���法,分别为 __enter__()和 __exit__()。
|
627 |
-
在上下文执行开始的情况下,__enter__()方法会在代码块被执行前被调用,
|
628 |
-
而在上下文执行结束时,__exit__()方法则会被调用。
|
629 |
-
"""
|
630 |
-
def __enter__(self):
|
631 |
-
return self
|
632 |
-
|
633 |
-
def __exit__(self, exc_type, exc_value, traceback):
|
634 |
-
return
|
635 |
-
|
636 |
-
def run_gradio_in_subpath(demo, auth, port, custom_path):
|
637 |
-
"""
|
638 |
-
把gradio的运行地址更改到指定的二次路径上
|
639 |
-
"""
|
640 |
-
def is_path_legal(path: str)->bool:
|
641 |
-
'''
|
642 |
-
check path for sub url
|
643 |
-
path: path to check
|
644 |
-
return value: do sub url wrap
|
645 |
-
'''
|
646 |
-
if path == "/": return True
|
647 |
-
if len(path) == 0:
|
648 |
-
print("ilegal custom path: {}\npath must not be empty\ndeploy on root url".format(path))
|
649 |
-
return False
|
650 |
-
if path[0] == '/':
|
651 |
-
if path[1] != '/':
|
652 |
-
print("deploy on sub-path {}".format(path))
|
653 |
-
return True
|
654 |
-
return False
|
655 |
-
print("ilegal custom path: {}\npath should begin with \'/\'\ndeploy on root url".format(path))
|
656 |
-
return False
|
657 |
-
|
658 |
-
if not is_path_legal(custom_path): raise RuntimeError('Ilegal custom path')
|
659 |
-
import uvicorn
|
660 |
-
import gradio as gr
|
661 |
-
from fastapi import FastAPI
|
662 |
-
app = FastAPI()
|
663 |
-
if custom_path != "/":
|
664 |
-
@app.get("/")
|
665 |
-
def read_main():
|
666 |
-
return {"message": f"Gradio is running at: {custom_path}"}
|
667 |
-
app = gr.mount_gradio_app(app, demo, path=custom_path)
|
668 |
-
uvicorn.run(app, host="0.0.0.0", port=port) # , auth=auth
|
669 |
-
|
670 |
-
|
671 |
-
def clip_history(inputs, history, tokenizer, max_token_limit):
|
672 |
-
"""
|
673 |
-
reduce the length of history by clipping.
|
674 |
-
this function search for the longest entries to clip, little by little,
|
675 |
-
until the number of token of history is reduced under threshold.
|
676 |
-
通过裁剪来缩短历史记录的长度。
|
677 |
-
此函数逐渐地搜索最长的条目进行剪辑,
|
678 |
-
直到历史记录的标记数量降低到阈值以下。
|
679 |
-
"""
|
680 |
-
import numpy as np
|
681 |
-
from request_llm.bridge_all import model_info
|
682 |
-
def get_token_num(txt):
|
683 |
-
return len(tokenizer.encode(txt, disallowed_special=()))
|
684 |
-
input_token_num = get_token_num(inputs)
|
685 |
-
if input_token_num < max_token_limit * 3 / 4:
|
686 |
-
# 当输入部分的token占比小于限制的3/4时,裁剪时
|
687 |
-
# 1. 把input的余量留出来
|
688 |
-
max_token_limit = max_token_limit - input_token_num
|
689 |
-
# 2. 把输出用的余量留出来
|
690 |
-
max_token_limit = max_token_limit - 128
|
691 |
-
# 3. 如果余量太小了,直接清除历史
|
692 |
-
if max_token_limit < 128:
|
693 |
-
history = []
|
694 |
-
return history
|
695 |
-
else:
|
696 |
-
# 当输入部分的token占比 > 限制的3/4时,直接清除历史
|
697 |
-
history = []
|
698 |
-
return history
|
699 |
-
|
700 |
-
everything = ['']
|
701 |
-
everything.extend(history)
|
702 |
-
n_token = get_token_num('\n'.join(everything))
|
703 |
-
everything_token = [get_token_num(e) for e in everything]
|
704 |
-
|
705 |
-
# 截断时的颗粒度
|
706 |
-
delta = max(everything_token) // 16
|
707 |
-
|
708 |
-
while n_token > max_token_limit:
|
709 |
-
where = np.argmax(everything_token)
|
710 |
-
encoded = tokenizer.encode(everything[where], disallowed_special=())
|
711 |
-
clipped_encoded = encoded[:len(encoded)-delta]
|
712 |
-
everything[where] = tokenizer.decode(clipped_encoded)[:-1] # -1 to remove the may-be illegal char
|
713 |
-
everything_token[where] = get_token_num(everything[where])
|
714 |
-
n_token = get_token_num('\n'.join(everything))
|
715 |
-
|
716 |
-
history = everything[1:]
|
717 |
-
return history
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/DQChoi/gpt-demo/venv/lib/python3.11/site-packages/altair/utils/schemapi.py
DELETED
@@ -1,1126 +0,0 @@
|
|
1 |
-
# The contents of this file are automatically written by
|
2 |
-
# tools/generate_schema_wrapper.py. Do not modify directly.
|
3 |
-
import collections
|
4 |
-
import contextlib
|
5 |
-
import inspect
|
6 |
-
import json
|
7 |
-
import textwrap
|
8 |
-
from typing import (
|
9 |
-
Any,
|
10 |
-
Sequence,
|
11 |
-
List,
|
12 |
-
Dict,
|
13 |
-
Optional,
|
14 |
-
DefaultDict,
|
15 |
-
Tuple,
|
16 |
-
Iterable,
|
17 |
-
Type,
|
18 |
-
)
|
19 |
-
from itertools import zip_longest
|
20 |
-
|
21 |
-
import jsonschema
|
22 |
-
import jsonschema.exceptions
|
23 |
-
import jsonschema.validators
|
24 |
-
import numpy as np
|
25 |
-
import pandas as pd
|
26 |
-
|
27 |
-
from altair import vegalite
|
28 |
-
|
29 |
-
ValidationErrorList = List[jsonschema.exceptions.ValidationError]
|
30 |
-
GroupedValidationErrors = Dict[str, ValidationErrorList]
|
31 |
-
|
32 |
-
|
33 |
-
# If DEBUG_MODE is True, then schema objects are converted to dict and
|
34 |
-
# validated at creation time. This slows things down, particularly for
|
35 |
-
# larger specs, but leads to much more useful tracebacks for the user.
|
36 |
-
# Individual schema classes can override this by setting the
|
37 |
-
# class-level _class_is_valid_at_instantiation attribute to False
|
38 |
-
DEBUG_MODE = True
|
39 |
-
|
40 |
-
|
41 |
-
def enable_debug_mode():
|
42 |
-
global DEBUG_MODE
|
43 |
-
DEBUG_MODE = True
|
44 |
-
|
45 |
-
|
46 |
-
def disable_debug_mode():
|
47 |
-
global DEBUG_MODE
|
48 |
-
DEBUG_MODE = False
|
49 |
-
|
50 |
-
|
51 |
-
@contextlib.contextmanager
|
52 |
-
def debug_mode(arg):
|
53 |
-
global DEBUG_MODE
|
54 |
-
original = DEBUG_MODE
|
55 |
-
DEBUG_MODE = arg
|
56 |
-
try:
|
57 |
-
yield
|
58 |
-
finally:
|
59 |
-
DEBUG_MODE = original
|
60 |
-
|
61 |
-
|
62 |
-
def validate_jsonschema(
|
63 |
-
spec: Dict[str, Any],
|
64 |
-
schema: Dict[str, Any],
|
65 |
-
rootschema: Optional[Dict[str, Any]] = None,
|
66 |
-
raise_error: bool = True,
|
67 |
-
) -> Optional[jsonschema.exceptions.ValidationError]:
|
68 |
-
"""Validates the passed in spec against the schema in the context of the
|
69 |
-
rootschema. If any errors are found, they are deduplicated and prioritized
|
70 |
-
and only the most relevant errors are kept. Errors are then either raised
|
71 |
-
or returned, depending on the value of `raise_error`.
|
72 |
-
"""
|
73 |
-
errors = _get_errors_from_spec(spec, schema, rootschema=rootschema)
|
74 |
-
if errors:
|
75 |
-
leaf_errors = _get_leaves_of_error_tree(errors)
|
76 |
-
grouped_errors = _group_errors_by_json_path(leaf_errors)
|
77 |
-
grouped_errors = _subset_to_most_specific_json_paths(grouped_errors)
|
78 |
-
grouped_errors = _deduplicate_errors(grouped_errors)
|
79 |
-
|
80 |
-
# Nothing special about this first error but we need to choose one
|
81 |
-
# which can be raised
|
82 |
-
main_error = list(grouped_errors.values())[0][0]
|
83 |
-
# All errors are then attached as a new attribute to ValidationError so that
|
84 |
-
# they can be used in SchemaValidationError to craft a more helpful
|
85 |
-
# error message. Setting a new attribute like this is not ideal as
|
86 |
-
# it then no longer matches the type ValidationError. It would be better
|
87 |
-
# to refactor this function to never raise but only return errors.
|
88 |
-
main_error._all_errors = grouped_errors # type: ignore[attr-defined]
|
89 |
-
if raise_error:
|
90 |
-
raise main_error
|
91 |
-
else:
|
92 |
-
return main_error
|
93 |
-
else:
|
94 |
-
return None
|
95 |
-
|
96 |
-
|
97 |
-
def _get_errors_from_spec(
|
98 |
-
spec: Dict[str, Any],
|
99 |
-
schema: Dict[str, Any],
|
100 |
-
rootschema: Optional[Dict[str, Any]] = None,
|
101 |
-
) -> ValidationErrorList:
|
102 |
-
"""Uses the relevant jsonschema validator to validate the passed in spec
|
103 |
-
against the schema using the rootschema to resolve references.
|
104 |
-
The schema and rootschema themselves are not validated but instead considered
|
105 |
-
as valid.
|
106 |
-
"""
|
107 |
-
# We don't use jsonschema.validate as this would validate the schema itself.
|
108 |
-
# Instead, we pass the schema directly to the validator class. This is done for
|
109 |
-
# two reasons: The schema comes from Vega-Lite and is not based on the user
|
110 |
-
# input, therefore there is no need to validate it in the first place. Furthermore,
|
111 |
-
# the "uri-reference" format checker fails for some of the references as URIs in
|
112 |
-
# "$ref" are not encoded,
|
113 |
-
# e.g. '#/definitions/ValueDefWithCondition<MarkPropFieldOrDatumDef,
|
114 |
-
# (Gradient|string|null)>' would be a valid $ref in a Vega-Lite schema but
|
115 |
-
# it is not a valid URI reference due to the characters such as '<'.
|
116 |
-
if rootschema is not None:
|
117 |
-
validator_cls = jsonschema.validators.validator_for(rootschema)
|
118 |
-
resolver = jsonschema.RefResolver.from_schema(rootschema)
|
119 |
-
else:
|
120 |
-
validator_cls = jsonschema.validators.validator_for(schema)
|
121 |
-
# No resolver is necessary if the schema is already the full schema
|
122 |
-
resolver = None
|
123 |
-
|
124 |
-
validator_kwargs = {"resolver": resolver}
|
125 |
-
if hasattr(validator_cls, "FORMAT_CHECKER"):
|
126 |
-
validator_kwargs["format_checker"] = validator_cls.FORMAT_CHECKER
|
127 |
-
validator = validator_cls(schema, **validator_kwargs)
|
128 |
-
errors = list(validator.iter_errors(spec))
|
129 |
-
return errors
|
130 |
-
|
131 |
-
|
132 |
-
def _json_path(err: jsonschema.exceptions.ValidationError) -> str:
|
133 |
-
"""Drop in replacement for the .json_path property of the jsonschema
|
134 |
-
ValidationError class, which is not available as property for
|
135 |
-
ValidationError with jsonschema<4.0.1.
|
136 |
-
More info, see https://github.com/altair-viz/altair/issues/3038
|
137 |
-
"""
|
138 |
-
path = "$"
|
139 |
-
for elem in err.absolute_path:
|
140 |
-
if isinstance(elem, int):
|
141 |
-
path += "[" + str(elem) + "]"
|
142 |
-
else:
|
143 |
-
path += "." + elem
|
144 |
-
return path
|
145 |
-
|
146 |
-
|
147 |
-
def _group_errors_by_json_path(
|
148 |
-
errors: ValidationErrorList,
|
149 |
-
) -> GroupedValidationErrors:
|
150 |
-
"""Groups errors by the `json_path` attribute of the jsonschema ValidationError
|
151 |
-
class. This attribute contains the path to the offending element within
|
152 |
-
a chart specification and can therefore be considered as an identifier of an
|
153 |
-
'issue' in the chart that needs to be fixed.
|
154 |
-
"""
|
155 |
-
errors_by_json_path = collections.defaultdict(list)
|
156 |
-
for err in errors:
|
157 |
-
err_key = getattr(err, "json_path", _json_path(err))
|
158 |
-
errors_by_json_path[err_key].append(err)
|
159 |
-
return dict(errors_by_json_path)
|
160 |
-
|
161 |
-
|
162 |
-
def _get_leaves_of_error_tree(
|
163 |
-
errors: ValidationErrorList,
|
164 |
-
) -> ValidationErrorList:
|
165 |
-
"""For each error in `errors`, it traverses down the "error tree" that is generated
|
166 |
-
by the jsonschema library to find and return all "leaf" errors. These are errors
|
167 |
-
which have no further errors that caused it and so they are the most specific errors
|
168 |
-
with the most specific error messages.
|
169 |
-
"""
|
170 |
-
leaves: ValidationErrorList = []
|
171 |
-
for err in errors:
|
172 |
-
if err.context:
|
173 |
-
# This means that the error `err` was caused by errors in subschemas.
|
174 |
-
# The list of errors from the subschemas are available in the property
|
175 |
-
# `context`.
|
176 |
-
leaves.extend(_get_leaves_of_error_tree(err.context))
|
177 |
-
else:
|
178 |
-
leaves.append(err)
|
179 |
-
return leaves
|
180 |
-
|
181 |
-
|
182 |
-
def _subset_to_most_specific_json_paths(
|
183 |
-
errors_by_json_path: GroupedValidationErrors,
|
184 |
-
) -> GroupedValidationErrors:
|
185 |
-
"""Removes key (json path), value (errors) pairs where the json path is fully
|
186 |
-
contained in another json path. For example if `errors_by_json_path` has two
|
187 |
-
keys, `$.encoding.X` and `$.encoding.X.tooltip`, then the first one will be removed
|
188 |
-
and only the second one is returned. This is done under the assumption that
|
189 |
-
more specific json paths give more helpful error messages to the user.
|
190 |
-
"""
|
191 |
-
errors_by_json_path_specific: GroupedValidationErrors = {}
|
192 |
-
for json_path, errors in errors_by_json_path.items():
|
193 |
-
if not _contained_at_start_of_one_of_other_values(
|
194 |
-
json_path, list(errors_by_json_path.keys())
|
195 |
-
):
|
196 |
-
errors_by_json_path_specific[json_path] = errors
|
197 |
-
return errors_by_json_path_specific
|
198 |
-
|
199 |
-
|
200 |
-
def _contained_at_start_of_one_of_other_values(x: str, values: Sequence[str]) -> bool:
|
201 |
-
# Does not count as "contained at start of other value" if the values are
|
202 |
-
# the same. These cases should be handled separately
|
203 |
-
return any(value.startswith(x) for value in values if x != value)
|
204 |
-
|
205 |
-
|
206 |
-
def _deduplicate_errors(
|
207 |
-
grouped_errors: GroupedValidationErrors,
|
208 |
-
) -> GroupedValidationErrors:
|
209 |
-
"""Some errors have very similar error messages or are just in general not helpful
|
210 |
-
for a user. This function removes as many of these cases as possible and
|
211 |
-
can be extended over time to handle new cases that come up.
|
212 |
-
"""
|
213 |
-
grouped_errors_deduplicated: GroupedValidationErrors = {}
|
214 |
-
for json_path, element_errors in grouped_errors.items():
|
215 |
-
errors_by_validator = _group_errors_by_validator(element_errors)
|
216 |
-
|
217 |
-
deduplication_functions = {
|
218 |
-
"enum": _deduplicate_enum_errors,
|
219 |
-
"additionalProperties": _deduplicate_additional_properties_errors,
|
220 |
-
}
|
221 |
-
deduplicated_errors: ValidationErrorList = []
|
222 |
-
for validator, errors in errors_by_validator.items():
|
223 |
-
deduplication_func = deduplication_functions.get(validator, None)
|
224 |
-
if deduplication_func is not None:
|
225 |
-
errors = deduplication_func(errors)
|
226 |
-
deduplicated_errors.extend(_deduplicate_by_message(errors))
|
227 |
-
|
228 |
-
# Removes any ValidationError "'value' is a required property" as these
|
229 |
-
# errors are unlikely to be the relevant ones for the user. They come from
|
230 |
-
# validation against a schema definition where the output of `alt.value`
|
231 |
-
# would be valid. However, if a user uses `alt.value`, the `value` keyword
|
232 |
-
# is included automatically from that function and so it's unlikely
|
233 |
-
# that this was what the user intended if the keyword is not present
|
234 |
-
# in the first place.
|
235 |
-
deduplicated_errors = [
|
236 |
-
err for err in deduplicated_errors if not _is_required_value_error(err)
|
237 |
-
]
|
238 |
-
|
239 |
-
grouped_errors_deduplicated[json_path] = deduplicated_errors
|
240 |
-
return grouped_errors_deduplicated
|
241 |
-
|
242 |
-
|
243 |
-
def _is_required_value_error(err: jsonschema.exceptions.ValidationError) -> bool:
|
244 |
-
return err.validator == "required" and err.validator_value == ["value"]
|
245 |
-
|
246 |
-
|
247 |
-
def _group_errors_by_validator(errors: ValidationErrorList) -> GroupedValidationErrors:
|
248 |
-
"""Groups the errors by the json schema "validator" that casued the error. For
|
249 |
-
example if the error is that a value is not one of an enumeration in the json schema
|
250 |
-
then the "validator" is `"enum"`, if the error is due to an unknown property that
|
251 |
-
was set although no additional properties are allowed then "validator" is
|
252 |
-
`"additionalProperties`, etc.
|
253 |
-
"""
|
254 |
-
errors_by_validator: DefaultDict[
|
255 |
-
str, ValidationErrorList
|
256 |
-
] = collections.defaultdict(list)
|
257 |
-
for err in errors:
|
258 |
-
# Ignore mypy error as err.validator as it wrongly sees err.validator
|
259 |
-
# as of type Optional[Validator] instead of str which it is according
|
260 |
-
# to the documentation and all tested cases
|
261 |
-
errors_by_validator[err.validator].append(err) # type: ignore[index]
|
262 |
-
return dict(errors_by_validator)
|
263 |
-
|
264 |
-
|
265 |
-
def _deduplicate_enum_errors(errors: ValidationErrorList) -> ValidationErrorList:
|
266 |
-
"""Deduplicate enum errors by removing the errors where the allowed values
|
267 |
-
are a subset of another error. For example, if `enum` contains two errors
|
268 |
-
and one has `validator_value` (i.e. accepted values) ["A", "B"] and the
|
269 |
-
other one ["A", "B", "C"] then the first one is removed and the final
|
270 |
-
`enum` list only contains the error with ["A", "B", "C"].
|
271 |
-
"""
|
272 |
-
if len(errors) > 1:
|
273 |
-
# Values (and therefore `validator_value`) of an enum are always arrays,
|
274 |
-
# see https://json-schema.org/understanding-json-schema/reference/generic.html#enumerated-values
|
275 |
-
# which is why we can use join below
|
276 |
-
value_strings = [",".join(err.validator_value) for err in errors]
|
277 |
-
longest_enums: ValidationErrorList = []
|
278 |
-
for value_str, err in zip(value_strings, errors):
|
279 |
-
if not _contained_at_start_of_one_of_other_values(value_str, value_strings):
|
280 |
-
longest_enums.append(err)
|
281 |
-
errors = longest_enums
|
282 |
-
return errors
|
283 |
-
|
284 |
-
|
285 |
-
def _deduplicate_additional_properties_errors(
|
286 |
-
errors: ValidationErrorList,
|
287 |
-
) -> ValidationErrorList:
|
288 |
-
"""If there are multiple additional property errors it usually means that
|
289 |
-
the offending element was validated against multiple schemas and
|
290 |
-
its parent is a common anyOf validator.
|
291 |
-
The error messages produced from these cases are usually
|
292 |
-
very similar and we just take the shortest one. For example,
|
293 |
-
the following 3 errors are raised for the `unknown` channel option in
|
294 |
-
`alt.X("variety", unknown=2)`:
|
295 |
-
- "Additional properties are not allowed ('unknown' was unexpected)"
|
296 |
-
- "Additional properties are not allowed ('field', 'unknown' were unexpected)"
|
297 |
-
- "Additional properties are not allowed ('field', 'type', 'unknown' were unexpected)"
|
298 |
-
"""
|
299 |
-
if len(errors) > 1:
|
300 |
-
# Test if all parent errors are the same anyOf error and only do
|
301 |
-
# the prioritization in these cases. Can't think of a chart spec where this
|
302 |
-
# would not be the case but still allow for it below to not break anything.
|
303 |
-
parent = errors[0].parent
|
304 |
-
if (
|
305 |
-
parent is not None
|
306 |
-
and parent.validator == "anyOf"
|
307 |
-
# Use [1:] as don't have to check for first error as it was used
|
308 |
-
# above to define `parent`
|
309 |
-
and all(err.parent is parent for err in errors[1:])
|
310 |
-
):
|
311 |
-
errors = [min(errors, key=lambda x: len(x.message))]
|
312 |
-
return errors
|
313 |
-
|
314 |
-
|
315 |
-
def _deduplicate_by_message(errors: ValidationErrorList) -> ValidationErrorList:
|
316 |
-
"""Deduplicate errors by message. This keeps the original order in case
|
317 |
-
it was chosen intentionally.
|
318 |
-
"""
|
319 |
-
return list({e.message: e for e in errors}.values())
|
320 |
-
|
321 |
-
|
322 |
-
def _subclasses(cls):
|
323 |
-
"""Breadth-first sequence of all classes which inherit from cls."""
|
324 |
-
seen = set()
|
325 |
-
current_set = {cls}
|
326 |
-
while current_set:
|
327 |
-
seen |= current_set
|
328 |
-
current_set = set.union(*(set(cls.__subclasses__()) for cls in current_set))
|
329 |
-
for cls in current_set - seen:
|
330 |
-
yield cls
|
331 |
-
|
332 |
-
|
333 |
-
def _todict(obj, context):
|
334 |
-
"""Convert an object to a dict representation."""
|
335 |
-
if isinstance(obj, SchemaBase):
|
336 |
-
return obj.to_dict(validate=False, context=context)
|
337 |
-
elif isinstance(obj, (list, tuple, np.ndarray)):
|
338 |
-
return [_todict(v, context) for v in obj]
|
339 |
-
elif isinstance(obj, dict):
|
340 |
-
return {k: _todict(v, context) for k, v in obj.items() if v is not Undefined}
|
341 |
-
elif hasattr(obj, "to_dict"):
|
342 |
-
return obj.to_dict()
|
343 |
-
elif isinstance(obj, np.number):
|
344 |
-
return float(obj)
|
345 |
-
elif isinstance(obj, (pd.Timestamp, np.datetime64)):
|
346 |
-
return pd.Timestamp(obj).isoformat()
|
347 |
-
else:
|
348 |
-
return obj
|
349 |
-
|
350 |
-
|
351 |
-
def _resolve_references(schema, root=None):
|
352 |
-
"""Resolve schema references."""
|
353 |
-
resolver = jsonschema.RefResolver.from_schema(root or schema)
|
354 |
-
while "$ref" in schema:
|
355 |
-
with resolver.resolving(schema["$ref"]) as resolved:
|
356 |
-
schema = resolved
|
357 |
-
return schema
|
358 |
-
|
359 |
-
|
360 |
-
class SchemaValidationError(jsonschema.ValidationError):
|
361 |
-
"""A wrapper for jsonschema.ValidationError with friendlier traceback"""
|
362 |
-
|
363 |
-
def __init__(self, obj: "SchemaBase", err: jsonschema.ValidationError) -> None:
|
364 |
-
super().__init__(**err._contents())
|
365 |
-
self.obj = obj
|
366 |
-
self._errors: GroupedValidationErrors = getattr(
|
367 |
-
err, "_all_errors", {getattr(err, "json_path", _json_path(err)): [err]}
|
368 |
-
)
|
369 |
-
# This is the message from err
|
370 |
-
self._original_message = self.message
|
371 |
-
self.message = self._get_message()
|
372 |
-
|
373 |
-
def __str__(self) -> str:
|
374 |
-
return self.message
|
375 |
-
|
376 |
-
def _get_message(self) -> str:
|
377 |
-
def indent_second_line_onwards(message: str, indent: int = 4) -> str:
|
378 |
-
modified_lines: List[str] = []
|
379 |
-
for idx, line in enumerate(message.split("\n")):
|
380 |
-
if idx > 0 and len(line) > 0:
|
381 |
-
line = " " * indent + line
|
382 |
-
modified_lines.append(line)
|
383 |
-
return "\n".join(modified_lines)
|
384 |
-
|
385 |
-
error_messages: List[str] = []
|
386 |
-
# Only show a maximum of 3 errors as else the final message returned by this
|
387 |
-
# method could get very long.
|
388 |
-
for errors in list(self._errors.values())[:3]:
|
389 |
-
error_messages.append(self._get_message_for_errors_group(errors))
|
390 |
-
|
391 |
-
message = ""
|
392 |
-
if len(error_messages) > 1:
|
393 |
-
error_messages = [
|
394 |
-
indent_second_line_onwards(f"Error {error_id}: {m}")
|
395 |
-
for error_id, m in enumerate(error_messages, start=1)
|
396 |
-
]
|
397 |
-
message += "Multiple errors were found.\n\n"
|
398 |
-
message += "\n\n".join(error_messages)
|
399 |
-
return message
|
400 |
-
|
401 |
-
def _get_message_for_errors_group(
|
402 |
-
self,
|
403 |
-
errors: ValidationErrorList,
|
404 |
-
) -> str:
|
405 |
-
if errors[0].validator == "additionalProperties":
|
406 |
-
# During development, we only found cases where an additionalProperties
|
407 |
-
# error was raised if that was the only error for the offending instance
|
408 |
-
# as identifiable by the json path. Therefore, we just check here the first
|
409 |
-
# error. However, other constellations might exist in which case
|
410 |
-
# this should be adapted so that other error messages are shown as well.
|
411 |
-
message = self._get_additional_properties_error_message(errors[0])
|
412 |
-
else:
|
413 |
-
message = self._get_default_error_message(errors=errors)
|
414 |
-
|
415 |
-
return message.strip()
|
416 |
-
|
417 |
-
def _get_additional_properties_error_message(
|
418 |
-
self,
|
419 |
-
error: jsonschema.exceptions.ValidationError,
|
420 |
-
) -> str:
|
421 |
-
"""Output all existing parameters when an unknown parameter is specified."""
|
422 |
-
altair_cls = self._get_altair_class_for_error(error)
|
423 |
-
param_dict_keys = inspect.signature(altair_cls).parameters.keys()
|
424 |
-
param_names_table = self._format_params_as_table(param_dict_keys)
|
425 |
-
|
426 |
-
# Error messages for these errors look like this:
|
427 |
-
# "Additional properties are not allowed ('unknown' was unexpected)"
|
428 |
-
# Line below extracts "unknown" from this string
|
429 |
-
parameter_name = error.message.split("('")[-1].split("'")[0]
|
430 |
-
message = f"""\
|
431 |
-
`{altair_cls.__name__}` has no parameter named '{parameter_name}'
|
432 |
-
|
433 |
-
Existing parameter names are:
|
434 |
-
{param_names_table}
|
435 |
-
See the help for `{altair_cls.__name__}` to read the full description of these parameters"""
|
436 |
-
return message
|
437 |
-
|
438 |
-
def _get_altair_class_for_error(
|
439 |
-
self, error: jsonschema.exceptions.ValidationError
|
440 |
-
) -> Type["SchemaBase"]:
|
441 |
-
"""Try to get the lowest class possible in the chart hierarchy so
|
442 |
-
it can be displayed in the error message. This should lead to more informative
|
443 |
-
error messages pointing the user closer to the source of the issue.
|
444 |
-
"""
|
445 |
-
for prop_name in reversed(error.absolute_path):
|
446 |
-
# Check if str as e.g. first item can be a 0
|
447 |
-
if isinstance(prop_name, str):
|
448 |
-
potential_class_name = prop_name[0].upper() + prop_name[1:]
|
449 |
-
cls = getattr(vegalite, potential_class_name, None)
|
450 |
-
if cls is not None:
|
451 |
-
break
|
452 |
-
else:
|
453 |
-
# Did not find a suitable class based on traversing the path so we fall
|
454 |
-
# back on the class of the top-level object which created
|
455 |
-
# the SchemaValidationError
|
456 |
-
cls = self.obj.__class__
|
457 |
-
return cls
|
458 |
-
|
459 |
-
@staticmethod
|
460 |
-
def _format_params_as_table(param_dict_keys: Iterable[str]) -> str:
|
461 |
-
"""Format param names into a table so that they are easier to read"""
|
462 |
-
param_names: Tuple[str, ...]
|
463 |
-
name_lengths: Tuple[int, ...]
|
464 |
-
param_names, name_lengths = zip( # type: ignore[assignment] # Mypy does think it's Tuple[Any]
|
465 |
-
*[
|
466 |
-
(name, len(name))
|
467 |
-
for name in param_dict_keys
|
468 |
-
if name not in ["kwds", "self"]
|
469 |
-
]
|
470 |
-
)
|
471 |
-
# Worst case scenario with the same longest param name in the same
|
472 |
-
# row for all columns
|
473 |
-
max_name_length = max(name_lengths)
|
474 |
-
max_column_width = 80
|
475 |
-
# Output a square table if not too big (since it is easier to read)
|
476 |
-
num_param_names = len(param_names)
|
477 |
-
square_columns = int(np.ceil(num_param_names**0.5))
|
478 |
-
columns = min(max_column_width // max_name_length, square_columns)
|
479 |
-
|
480 |
-
# Compute roughly equal column heights to evenly divide the param names
|
481 |
-
def split_into_equal_parts(n: int, p: int) -> List[int]:
|
482 |
-
return [n // p + 1] * (n % p) + [n // p] * (p - n % p)
|
483 |
-
|
484 |
-
column_heights = split_into_equal_parts(num_param_names, columns)
|
485 |
-
|
486 |
-
# Section the param names into columns and compute their widths
|
487 |
-
param_names_columns: List[Tuple[str, ...]] = []
|
488 |
-
column_max_widths: List[int] = []
|
489 |
-
last_end_idx: int = 0
|
490 |
-
for ch in column_heights:
|
491 |
-
param_names_columns.append(param_names[last_end_idx : last_end_idx + ch])
|
492 |
-
column_max_widths.append(
|
493 |
-
max([len(param_name) for param_name in param_names_columns[-1]])
|
494 |
-
)
|
495 |
-
last_end_idx = ch + last_end_idx
|
496 |
-
|
497 |
-
# Transpose the param name columns into rows to facilitate looping
|
498 |
-
param_names_rows: List[Tuple[str, ...]] = []
|
499 |
-
for li in zip_longest(*param_names_columns, fillvalue=""):
|
500 |
-
param_names_rows.append(li)
|
501 |
-
# Build the table as a string by iterating over and formatting the rows
|
502 |
-
param_names_table: str = ""
|
503 |
-
for param_names_row in param_names_rows:
|
504 |
-
for num, param_name in enumerate(param_names_row):
|
505 |
-
# Set column width based on the longest param in the column
|
506 |
-
max_name_length_column = column_max_widths[num]
|
507 |
-
column_pad = 3
|
508 |
-
param_names_table += "{:<{}}".format(
|
509 |
-
param_name, max_name_length_column + column_pad
|
510 |
-
)
|
511 |
-
# Insert newlines and spacing after the last element in each row
|
512 |
-
if num == (len(param_names_row) - 1):
|
513 |
-
param_names_table += "\n"
|
514 |
-
return param_names_table
|
515 |
-
|
516 |
-
def _get_default_error_message(
|
517 |
-
self,
|
518 |
-
errors: ValidationErrorList,
|
519 |
-
) -> str:
|
520 |
-
bullet_points: List[str] = []
|
521 |
-
errors_by_validator = _group_errors_by_validator(errors)
|
522 |
-
if "enum" in errors_by_validator:
|
523 |
-
for error in errors_by_validator["enum"]:
|
524 |
-
bullet_points.append(f"one of {error.validator_value}")
|
525 |
-
|
526 |
-
if "type" in errors_by_validator:
|
527 |
-
types = [f"'{err.validator_value}'" for err in errors_by_validator["type"]]
|
528 |
-
point = "of type "
|
529 |
-
if len(types) == 1:
|
530 |
-
point += types[0]
|
531 |
-
elif len(types) == 2:
|
532 |
-
point += f"{types[0]} or {types[1]}"
|
533 |
-
else:
|
534 |
-
point += ", ".join(types[:-1]) + f", or {types[-1]}"
|
535 |
-
bullet_points.append(point)
|
536 |
-
|
537 |
-
# It should not matter which error is specifically used as they are all
|
538 |
-
# about the same offending instance (i.e. invalid value), so we can just
|
539 |
-
# take the first one
|
540 |
-
error = errors[0]
|
541 |
-
# Add a summary line when parameters are passed an invalid value
|
542 |
-
# For example: "'asdf' is an invalid value for `stack`
|
543 |
-
message = f"'{error.instance}' is an invalid value"
|
544 |
-
if error.absolute_path:
|
545 |
-
message += f" for `{error.absolute_path[-1]}`"
|
546 |
-
|
547 |
-
# Add bullet points
|
548 |
-
if len(bullet_points) == 0:
|
549 |
-
message += ".\n\n"
|
550 |
-
elif len(bullet_points) == 1:
|
551 |
-
message += f". Valid values are {bullet_points[0]}.\n\n"
|
552 |
-
else:
|
553 |
-
# We don't use .capitalize below to make the first letter uppercase
|
554 |
-
# as that makes the rest of the message lowercase
|
555 |
-
bullet_points = [point[0].upper() + point[1:] for point in bullet_points]
|
556 |
-
message += ". Valid values are:\n\n"
|
557 |
-
message += "\n".join([f"- {point}" for point in bullet_points])
|
558 |
-
message += "\n\n"
|
559 |
-
|
560 |
-
# Add unformatted messages of any remaining errors which were not
|
561 |
-
# considered so far. This is not expected to be used but more exists
|
562 |
-
# as a fallback for cases which were not known during development.
|
563 |
-
for validator, errors in errors_by_validator.items():
|
564 |
-
if validator not in ("enum", "type"):
|
565 |
-
message += "\n".join([e.message for e in errors])
|
566 |
-
|
567 |
-
return message
|
568 |
-
|
569 |
-
|
570 |
-
class UndefinedType:
|
571 |
-
"""A singleton object for marking undefined parameters"""
|
572 |
-
|
573 |
-
__instance = None
|
574 |
-
|
575 |
-
def __new__(cls, *args, **kwargs):
|
576 |
-
if not isinstance(cls.__instance, cls):
|
577 |
-
cls.__instance = object.__new__(cls, *args, **kwargs)
|
578 |
-
return cls.__instance
|
579 |
-
|
580 |
-
def __repr__(self):
|
581 |
-
return "Undefined"
|
582 |
-
|
583 |
-
|
584 |
-
# In the future Altair may implement a more complete set of type hints.
|
585 |
-
# But for now, we'll add an annotation to indicate that the type checker
|
586 |
-
# should permit any value passed to a function argument whose default
|
587 |
-
# value is Undefined.
|
588 |
-
Undefined: Any = UndefinedType()
|
589 |
-
|
590 |
-
|
591 |
-
class SchemaBase:
|
592 |
-
"""Base class for schema wrappers.
|
593 |
-
|
594 |
-
Each derived class should set the _schema class attribute (and optionally
|
595 |
-
the _rootschema class attribute) which is used for validation.
|
596 |
-
"""
|
597 |
-
|
598 |
-
_schema: Optional[Dict[str, Any]] = None
|
599 |
-
_rootschema: Optional[Dict[str, Any]] = None
|
600 |
-
_class_is_valid_at_instantiation = True
|
601 |
-
|
602 |
-
def __init__(self, *args, **kwds):
|
603 |
-
# Two valid options for initialization, which should be handled by
|
604 |
-
# derived classes:
|
605 |
-
# - a single arg with no kwds, for, e.g. {'type': 'string'}
|
606 |
-
# - zero args with zero or more kwds for {'type': 'object'}
|
607 |
-
if self._schema is None:
|
608 |
-
raise ValueError(
|
609 |
-
"Cannot instantiate object of type {}: "
|
610 |
-
"_schema class attribute is not defined."
|
611 |
-
"".format(self.__class__)
|
612 |
-
)
|
613 |
-
|
614 |
-
if kwds:
|
615 |
-
assert len(args) == 0
|
616 |
-
else:
|
617 |
-
assert len(args) in [0, 1]
|
618 |
-
|
619 |
-
# use object.__setattr__ because we override setattr below.
|
620 |
-
object.__setattr__(self, "_args", args)
|
621 |
-
object.__setattr__(self, "_kwds", kwds)
|
622 |
-
|
623 |
-
if DEBUG_MODE and self._class_is_valid_at_instantiation:
|
624 |
-
self.to_dict(validate=True)
|
625 |
-
|
626 |
-
def copy(self, deep=True, ignore=()):
|
627 |
-
"""Return a copy of the object
|
628 |
-
|
629 |
-
Parameters
|
630 |
-
----------
|
631 |
-
deep : boolean or list, optional
|
632 |
-
If True (default) then return a deep copy of all dict, list, and
|
633 |
-
SchemaBase objects within the object structure.
|
634 |
-
If False, then only copy the top object.
|
635 |
-
If a list or iterable, then only copy the listed attributes.
|
636 |
-
ignore : list, optional
|
637 |
-
A list of keys for which the contents should not be copied, but
|
638 |
-
only stored by reference.
|
639 |
-
"""
|
640 |
-
|
641 |
-
def _shallow_copy(obj):
|
642 |
-
if isinstance(obj, SchemaBase):
|
643 |
-
return obj.copy(deep=False)
|
644 |
-
elif isinstance(obj, list):
|
645 |
-
return obj[:]
|
646 |
-
elif isinstance(obj, dict):
|
647 |
-
return obj.copy()
|
648 |
-
else:
|
649 |
-
return obj
|
650 |
-
|
651 |
-
def _deep_copy(obj, ignore=()):
|
652 |
-
if isinstance(obj, SchemaBase):
|
653 |
-
args = tuple(_deep_copy(arg) for arg in obj._args)
|
654 |
-
kwds = {
|
655 |
-
k: (_deep_copy(v, ignore=ignore) if k not in ignore else v)
|
656 |
-
for k, v in obj._kwds.items()
|
657 |
-
}
|
658 |
-
with debug_mode(False):
|
659 |
-
return obj.__class__(*args, **kwds)
|
660 |
-
elif isinstance(obj, list):
|
661 |
-
return [_deep_copy(v, ignore=ignore) for v in obj]
|
662 |
-
elif isinstance(obj, dict):
|
663 |
-
return {
|
664 |
-
k: (_deep_copy(v, ignore=ignore) if k not in ignore else v)
|
665 |
-
for k, v in obj.items()
|
666 |
-
}
|
667 |
-
else:
|
668 |
-
return obj
|
669 |
-
|
670 |
-
try:
|
671 |
-
deep = list(deep)
|
672 |
-
except TypeError:
|
673 |
-
deep_is_list = False
|
674 |
-
else:
|
675 |
-
deep_is_list = True
|
676 |
-
|
677 |
-
if deep and not deep_is_list:
|
678 |
-
return _deep_copy(self, ignore=ignore)
|
679 |
-
|
680 |
-
with debug_mode(False):
|
681 |
-
copy = self.__class__(*self._args, **self._kwds)
|
682 |
-
if deep_is_list:
|
683 |
-
for attr in deep:
|
684 |
-
copy[attr] = _shallow_copy(copy._get(attr))
|
685 |
-
return copy
|
686 |
-
|
687 |
-
def _get(self, attr, default=Undefined):
|
688 |
-
"""Get an attribute, returning default if not present."""
|
689 |
-
attr = self._kwds.get(attr, Undefined)
|
690 |
-
if attr is Undefined:
|
691 |
-
attr = default
|
692 |
-
return attr
|
693 |
-
|
694 |
-
def __getattr__(self, attr):
|
695 |
-
# reminder: getattr is called after the normal lookups
|
696 |
-
if attr == "_kwds":
|
697 |
-
raise AttributeError()
|
698 |
-
if attr in self._kwds:
|
699 |
-
return self._kwds[attr]
|
700 |
-
else:
|
701 |
-
try:
|
702 |
-
_getattr = super(SchemaBase, self).__getattr__
|
703 |
-
except AttributeError:
|
704 |
-
_getattr = super(SchemaBase, self).__getattribute__
|
705 |
-
return _getattr(attr)
|
706 |
-
|
707 |
-
def __setattr__(self, item, val):
|
708 |
-
self._kwds[item] = val
|
709 |
-
|
710 |
-
def __getitem__(self, item):
|
711 |
-
return self._kwds[item]
|
712 |
-
|
713 |
-
def __setitem__(self, item, val):
|
714 |
-
self._kwds[item] = val
|
715 |
-
|
716 |
-
def __repr__(self):
|
717 |
-
if self._kwds:
|
718 |
-
args = (
|
719 |
-
"{}: {!r}".format(key, val)
|
720 |
-
for key, val in sorted(self._kwds.items())
|
721 |
-
if val is not Undefined
|
722 |
-
)
|
723 |
-
args = "\n" + ",\n".join(args)
|
724 |
-
return "{0}({{{1}\n}})".format(
|
725 |
-
self.__class__.__name__, args.replace("\n", "\n ")
|
726 |
-
)
|
727 |
-
else:
|
728 |
-
return "{}({!r})".format(self.__class__.__name__, self._args[0])
|
729 |
-
|
730 |
-
def __eq__(self, other):
|
731 |
-
return (
|
732 |
-
type(self) is type(other)
|
733 |
-
and self._args == other._args
|
734 |
-
and self._kwds == other._kwds
|
735 |
-
)
|
736 |
-
|
737 |
-
def to_dict(self, validate=True, ignore=None, context=None):
|
738 |
-
"""Return a dictionary representation of the object
|
739 |
-
|
740 |
-
Parameters
|
741 |
-
----------
|
742 |
-
validate : boolean
|
743 |
-
If True (default), then validate the output dictionary
|
744 |
-
against the schema.
|
745 |
-
ignore : list
|
746 |
-
A list of keys to ignore. This will *not* passed to child to_dict
|
747 |
-
function calls.
|
748 |
-
context : dict (optional)
|
749 |
-
A context dictionary that will be passed to all child to_dict
|
750 |
-
function calls
|
751 |
-
|
752 |
-
Returns
|
753 |
-
-------
|
754 |
-
dct : dictionary
|
755 |
-
The dictionary representation of this object
|
756 |
-
|
757 |
-
Raises
|
758 |
-
------
|
759 |
-
jsonschema.ValidationError :
|
760 |
-
if validate=True and the dict does not conform to the schema
|
761 |
-
"""
|
762 |
-
if context is None:
|
763 |
-
context = {}
|
764 |
-
if ignore is None:
|
765 |
-
ignore = []
|
766 |
-
|
767 |
-
if self._args and not self._kwds:
|
768 |
-
result = _todict(self._args[0], context=context)
|
769 |
-
elif not self._args:
|
770 |
-
kwds = self._kwds.copy()
|
771 |
-
# parsed_shorthand is added by FieldChannelMixin.
|
772 |
-
# It's used below to replace shorthand with its long form equivalent
|
773 |
-
# parsed_shorthand is removed from context if it exists so that it is
|
774 |
-
# not passed to child to_dict function calls
|
775 |
-
parsed_shorthand = context.pop("parsed_shorthand", {})
|
776 |
-
# Prevent that pandas categorical data is automatically sorted
|
777 |
-
# when a non-ordinal data type is specifed manually
|
778 |
-
# or if the encoding channel does not support sorting
|
779 |
-
if "sort" in parsed_shorthand and (
|
780 |
-
"sort" not in kwds or kwds["type"] not in ["ordinal", Undefined]
|
781 |
-
):
|
782 |
-
parsed_shorthand.pop("sort")
|
783 |
-
|
784 |
-
kwds.update(
|
785 |
-
{
|
786 |
-
k: v
|
787 |
-
for k, v in parsed_shorthand.items()
|
788 |
-
if kwds.get(k, Undefined) is Undefined
|
789 |
-
}
|
790 |
-
)
|
791 |
-
kwds = {
|
792 |
-
k: v for k, v in kwds.items() if k not in list(ignore) + ["shorthand"]
|
793 |
-
}
|
794 |
-
if "mark" in kwds and isinstance(kwds["mark"], str):
|
795 |
-
kwds["mark"] = {"type": kwds["mark"]}
|
796 |
-
result = _todict(
|
797 |
-
kwds,
|
798 |
-
context=context,
|
799 |
-
)
|
800 |
-
else:
|
801 |
-
raise ValueError(
|
802 |
-
"{} instance has both a value and properties : "
|
803 |
-
"cannot serialize to dict".format(self.__class__)
|
804 |
-
)
|
805 |
-
if validate:
|
806 |
-
try:
|
807 |
-
self.validate(result)
|
808 |
-
except jsonschema.ValidationError as err:
|
809 |
-
# We do not raise `from err` as else the resulting
|
810 |
-
# traceback is very long as it contains part
|
811 |
-
# of the Vega-Lite schema. It would also first
|
812 |
-
# show the less helpful ValidationError instead of
|
813 |
-
# the more user friendly SchemaValidationError
|
814 |
-
raise SchemaValidationError(self, err) from None
|
815 |
-
return result
|
816 |
-
|
817 |
-
def to_json(
|
818 |
-
self,
|
819 |
-
validate=True,
|
820 |
-
ignore=None,
|
821 |
-
context=None,
|
822 |
-
indent=2,
|
823 |
-
sort_keys=True,
|
824 |
-
**kwargs,
|
825 |
-
):
|
826 |
-
"""Emit the JSON representation for this object as a string.
|
827 |
-
|
828 |
-
Parameters
|
829 |
-
----------
|
830 |
-
validate : boolean
|
831 |
-
If True (default), then validate the output dictionary
|
832 |
-
against the schema.
|
833 |
-
ignore : list (optional)
|
834 |
-
A list of keys to ignore. This will *not* passed to child to_dict
|
835 |
-
function calls.
|
836 |
-
context : dict (optional)
|
837 |
-
A context dictionary that will be passed to all child to_dict
|
838 |
-
function calls
|
839 |
-
indent : integer, default 2
|
840 |
-
the number of spaces of indentation to use
|
841 |
-
sort_keys : boolean, default True
|
842 |
-
if True, sort keys in the output
|
843 |
-
**kwargs
|
844 |
-
Additional keyword arguments are passed to ``json.dumps()``
|
845 |
-
|
846 |
-
Returns
|
847 |
-
-------
|
848 |
-
spec : string
|
849 |
-
The JSON specification of the chart object.
|
850 |
-
"""
|
851 |
-
if ignore is None:
|
852 |
-
ignore = []
|
853 |
-
if context is None:
|
854 |
-
context = {}
|
855 |
-
dct = self.to_dict(validate=validate, ignore=ignore, context=context)
|
856 |
-
return json.dumps(dct, indent=indent, sort_keys=sort_keys, **kwargs)
|
857 |
-
|
858 |
-
@classmethod
|
859 |
-
def _default_wrapper_classes(cls):
|
860 |
-
"""Return the set of classes used within cls.from_dict()"""
|
861 |
-
return _subclasses(SchemaBase)
|
862 |
-
|
863 |
-
@classmethod
|
864 |
-
def from_dict(cls, dct, validate=True, _wrapper_classes=None):
|
865 |
-
"""Construct class from a dictionary representation
|
866 |
-
|
867 |
-
Parameters
|
868 |
-
----------
|
869 |
-
dct : dictionary
|
870 |
-
The dict from which to construct the class
|
871 |
-
validate : boolean
|
872 |
-
If True (default), then validate the input against the schema.
|
873 |
-
_wrapper_classes : list (optional)
|
874 |
-
The set of SchemaBase classes to use when constructing wrappers
|
875 |
-
of the dict inputs. If not specified, the result of
|
876 |
-
cls._default_wrapper_classes will be used.
|
877 |
-
|
878 |
-
Returns
|
879 |
-
-------
|
880 |
-
obj : Schema object
|
881 |
-
The wrapped schema
|
882 |
-
|
883 |
-
Raises
|
884 |
-
------
|
885 |
-
jsonschema.ValidationError :
|
886 |
-
if validate=True and dct does not conform to the schema
|
887 |
-
"""
|
888 |
-
if validate:
|
889 |
-
cls.validate(dct)
|
890 |
-
if _wrapper_classes is None:
|
891 |
-
_wrapper_classes = cls._default_wrapper_classes()
|
892 |
-
converter = _FromDict(_wrapper_classes)
|
893 |
-
return converter.from_dict(dct, cls)
|
894 |
-
|
895 |
-
@classmethod
|
896 |
-
def from_json(cls, json_string, validate=True, **kwargs):
|
897 |
-
"""Instantiate the object from a valid JSON string
|
898 |
-
|
899 |
-
Parameters
|
900 |
-
----------
|
901 |
-
json_string : string
|
902 |
-
The string containing a valid JSON chart specification.
|
903 |
-
validate : boolean
|
904 |
-
If True (default), then validate the input against the schema.
|
905 |
-
**kwargs :
|
906 |
-
Additional keyword arguments are passed to json.loads
|
907 |
-
|
908 |
-
Returns
|
909 |
-
-------
|
910 |
-
chart : Chart object
|
911 |
-
The altair Chart object built from the specification.
|
912 |
-
"""
|
913 |
-
dct = json.loads(json_string, **kwargs)
|
914 |
-
return cls.from_dict(dct, validate=validate)
|
915 |
-
|
916 |
-
@classmethod
|
917 |
-
def validate(cls, instance, schema=None):
|
918 |
-
"""
|
919 |
-
Validate the instance against the class schema in the context of the
|
920 |
-
rootschema.
|
921 |
-
"""
|
922 |
-
if schema is None:
|
923 |
-
schema = cls._schema
|
924 |
-
return validate_jsonschema(
|
925 |
-
instance, schema, rootschema=cls._rootschema or cls._schema
|
926 |
-
)
|
927 |
-
|
928 |
-
@classmethod
|
929 |
-
def resolve_references(cls, schema=None):
|
930 |
-
"""Resolve references in the context of this object's schema or root schema."""
|
931 |
-
return _resolve_references(
|
932 |
-
schema=(schema or cls._schema),
|
933 |
-
root=(cls._rootschema or cls._schema or schema),
|
934 |
-
)
|
935 |
-
|
936 |
-
@classmethod
|
937 |
-
def validate_property(cls, name, value, schema=None):
|
938 |
-
"""
|
939 |
-
Validate a property against property schema in the context of the
|
940 |
-
rootschema
|
941 |
-
"""
|
942 |
-
value = _todict(value, context={})
|
943 |
-
props = cls.resolve_references(schema or cls._schema).get("properties", {})
|
944 |
-
return validate_jsonschema(
|
945 |
-
value, props.get(name, {}), rootschema=cls._rootschema or cls._schema
|
946 |
-
)
|
947 |
-
|
948 |
-
def __dir__(self):
|
949 |
-
return sorted(super().__dir__() + list(self._kwds.keys()))
|
950 |
-
|
951 |
-
|
952 |
-
def _passthrough(*args, **kwds):
|
953 |
-
return args[0] if args else kwds
|
954 |
-
|
955 |
-
|
956 |
-
class _FromDict:
|
957 |
-
"""Class used to construct SchemaBase class hierarchies from a dict
|
958 |
-
|
959 |
-
The primary purpose of using this class is to be able to build a hash table
|
960 |
-
that maps schemas to their wrapper classes. The candidate classes are
|
961 |
-
specified in the ``class_list`` argument to the constructor.
|
962 |
-
"""
|
963 |
-
|
964 |
-
_hash_exclude_keys = ("definitions", "title", "description", "$schema", "id")
|
965 |
-
|
966 |
-
def __init__(self, class_list):
|
967 |
-
# Create a mapping of a schema hash to a list of matching classes
|
968 |
-
# This lets us quickly determine the correct class to construct
|
969 |
-
self.class_dict = collections.defaultdict(list)
|
970 |
-
for cls in class_list:
|
971 |
-
if cls._schema is not None:
|
972 |
-
self.class_dict[self.hash_schema(cls._schema)].append(cls)
|
973 |
-
|
974 |
-
@classmethod
|
975 |
-
def hash_schema(cls, schema, use_json=True):
|
976 |
-
"""
|
977 |
-
Compute a python hash for a nested dictionary which
|
978 |
-
properly handles dicts, lists, sets, and tuples.
|
979 |
-
|
980 |
-
At the top level, the function excludes from the hashed schema all keys
|
981 |
-
listed in `exclude_keys`.
|
982 |
-
|
983 |
-
This implements two methods: one based on conversion to JSON, and one based
|
984 |
-
on recursive conversions of unhashable to hashable types; the former seems
|
985 |
-
to be slightly faster in several benchmarks.
|
986 |
-
"""
|
987 |
-
if cls._hash_exclude_keys and isinstance(schema, dict):
|
988 |
-
schema = {
|
989 |
-
key: val
|
990 |
-
for key, val in schema.items()
|
991 |
-
if key not in cls._hash_exclude_keys
|
992 |
-
}
|
993 |
-
if use_json:
|
994 |
-
s = json.dumps(schema, sort_keys=True)
|
995 |
-
return hash(s)
|
996 |
-
else:
|
997 |
-
|
998 |
-
def _freeze(val):
|
999 |
-
if isinstance(val, dict):
|
1000 |
-
return frozenset((k, _freeze(v)) for k, v in val.items())
|
1001 |
-
elif isinstance(val, set):
|
1002 |
-
return frozenset(map(_freeze, val))
|
1003 |
-
elif isinstance(val, list) or isinstance(val, tuple):
|
1004 |
-
return tuple(map(_freeze, val))
|
1005 |
-
else:
|
1006 |
-
return val
|
1007 |
-
|
1008 |
-
return hash(_freeze(schema))
|
1009 |
-
|
1010 |
-
def from_dict(
|
1011 |
-
self, dct, cls=None, schema=None, rootschema=None, default_class=_passthrough
|
1012 |
-
):
|
1013 |
-
"""Construct an object from a dict representation"""
|
1014 |
-
if (schema is None) == (cls is None):
|
1015 |
-
raise ValueError("Must provide either cls or schema, but not both.")
|
1016 |
-
if schema is None:
|
1017 |
-
schema = schema or cls._schema
|
1018 |
-
rootschema = rootschema or cls._rootschema
|
1019 |
-
rootschema = rootschema or schema
|
1020 |
-
|
1021 |
-
if isinstance(dct, SchemaBase):
|
1022 |
-
return dct
|
1023 |
-
|
1024 |
-
if cls is None:
|
1025 |
-
# If there are multiple matches, we use the first one in the dict.
|
1026 |
-
# Our class dict is constructed breadth-first from top to bottom,
|
1027 |
-
# so the first class that matches is the most general match.
|
1028 |
-
matches = self.class_dict[self.hash_schema(schema)]
|
1029 |
-
if matches:
|
1030 |
-
cls = matches[0]
|
1031 |
-
else:
|
1032 |
-
cls = default_class
|
1033 |
-
schema = _resolve_references(schema, rootschema)
|
1034 |
-
|
1035 |
-
if "anyOf" in schema or "oneOf" in schema:
|
1036 |
-
schemas = schema.get("anyOf", []) + schema.get("oneOf", [])
|
1037 |
-
for possible_schema in schemas:
|
1038 |
-
try:
|
1039 |
-
validate_jsonschema(dct, possible_schema, rootschema=rootschema)
|
1040 |
-
except jsonschema.ValidationError:
|
1041 |
-
continue
|
1042 |
-
else:
|
1043 |
-
return self.from_dict(
|
1044 |
-
dct,
|
1045 |
-
schema=possible_schema,
|
1046 |
-
rootschema=rootschema,
|
1047 |
-
default_class=cls,
|
1048 |
-
)
|
1049 |
-
|
1050 |
-
if isinstance(dct, dict):
|
1051 |
-
# TODO: handle schemas for additionalProperties/patternProperties
|
1052 |
-
props = schema.get("properties", {})
|
1053 |
-
kwds = {}
|
1054 |
-
for key, val in dct.items():
|
1055 |
-
if key in props:
|
1056 |
-
val = self.from_dict(val, schema=props[key], rootschema=rootschema)
|
1057 |
-
kwds[key] = val
|
1058 |
-
return cls(**kwds)
|
1059 |
-
|
1060 |
-
elif isinstance(dct, list):
|
1061 |
-
item_schema = schema.get("items", {})
|
1062 |
-
dct = [
|
1063 |
-
self.from_dict(val, schema=item_schema, rootschema=rootschema)
|
1064 |
-
for val in dct
|
1065 |
-
]
|
1066 |
-
return cls(dct)
|
1067 |
-
else:
|
1068 |
-
return cls(dct)
|
1069 |
-
|
1070 |
-
|
1071 |
-
class _PropertySetter:
|
1072 |
-
def __init__(self, prop, schema):
|
1073 |
-
self.prop = prop
|
1074 |
-
self.schema = schema
|
1075 |
-
|
1076 |
-
def __get__(self, obj, cls):
|
1077 |
-
self.obj = obj
|
1078 |
-
self.cls = cls
|
1079 |
-
# The docs from the encoding class parameter (e.g. `bin` in X, Color,
|
1080 |
-
# etc); this provides a general description of the parameter.
|
1081 |
-
self.__doc__ = self.schema["description"].replace("__", "**")
|
1082 |
-
property_name = f"{self.prop}"[0].upper() + f"{self.prop}"[1:]
|
1083 |
-
if hasattr(vegalite, property_name):
|
1084 |
-
altair_prop = getattr(vegalite, property_name)
|
1085 |
-
# Add the docstring from the helper class (e.g. `BinParams`) so
|
1086 |
-
# that all the parameter names of the helper class are included in
|
1087 |
-
# the final docstring
|
1088 |
-
parameter_index = altair_prop.__doc__.find("Parameters\n")
|
1089 |
-
if parameter_index > -1:
|
1090 |
-
self.__doc__ = (
|
1091 |
-
altair_prop.__doc__[:parameter_index].replace(" ", "")
|
1092 |
-
+ self.__doc__
|
1093 |
-
+ textwrap.dedent(
|
1094 |
-
f"\n\n {altair_prop.__doc__[parameter_index:]}"
|
1095 |
-
)
|
1096 |
-
)
|
1097 |
-
# For short docstrings such as Aggregate, Stack, et
|
1098 |
-
else:
|
1099 |
-
self.__doc__ = (
|
1100 |
-
altair_prop.__doc__.replace(" ", "") + "\n" + self.__doc__
|
1101 |
-
)
|
1102 |
-
# Add signatures and tab completion for the method and parameter names
|
1103 |
-
self.__signature__ = inspect.signature(altair_prop)
|
1104 |
-
self.__wrapped__ = inspect.getfullargspec(altair_prop)
|
1105 |
-
self.__name__ = altair_prop.__name__
|
1106 |
-
else:
|
1107 |
-
# It seems like bandPosition is the only parameter that doesn't
|
1108 |
-
# have a helper class.
|
1109 |
-
pass
|
1110 |
-
return self
|
1111 |
-
|
1112 |
-
def __call__(self, *args, **kwargs):
|
1113 |
-
obj = self.obj.copy()
|
1114 |
-
# TODO: use schema to validate
|
1115 |
-
obj[self.prop] = args[0] if args else kwargs
|
1116 |
-
return obj
|
1117 |
-
|
1118 |
-
|
1119 |
-
def with_property_setters(cls):
|
1120 |
-
"""
|
1121 |
-
Decorator to add property setters to a Schema class.
|
1122 |
-
"""
|
1123 |
-
schema = cls.resolve_references()
|
1124 |
-
for prop, propschema in schema.get("properties", {}).items():
|
1125 |
-
setattr(cls, prop, _PropertySetter(prop, propschema))
|
1126 |
-
return cls
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Datasculptor/StyleGAN-NADA/e4e/configs/paths_config.py
DELETED
@@ -1,28 +0,0 @@
|
|
1 |
-
dataset_paths = {
|
2 |
-
# Face Datasets (In the paper: FFHQ - train, CelebAHQ - test)
|
3 |
-
'ffhq': '',
|
4 |
-
'celeba_test': '',
|
5 |
-
|
6 |
-
# Cars Dataset (In the paper: Stanford cars)
|
7 |
-
'cars_train': '',
|
8 |
-
'cars_test': '',
|
9 |
-
|
10 |
-
# Horse Dataset (In the paper: LSUN Horse)
|
11 |
-
'horse_train': '',
|
12 |
-
'horse_test': '',
|
13 |
-
|
14 |
-
# Church Dataset (In the paper: LSUN Church)
|
15 |
-
'church_train': '',
|
16 |
-
'church_test': '',
|
17 |
-
|
18 |
-
# Cats Dataset (In the paper: LSUN Cat)
|
19 |
-
'cats_train': '',
|
20 |
-
'cats_test': ''
|
21 |
-
}
|
22 |
-
|
23 |
-
model_paths = {
|
24 |
-
'stylegan_ffhq': 'pretrained_models/stylegan2-ffhq-config-f.pt',
|
25 |
-
'ir_se50': 'pretrained_models/model_ir_se50.pth',
|
26 |
-
'shape_predictor': 'pretrained_models/shape_predictor_68_face_landmarks.dat',
|
27 |
-
'moco': 'pretrained_models/moco_v2_800ep_pretrain.pth'
|
28 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/ECCV2022/PSG/OpenPSG/configs/_base_/models/detr4seg_r101_psg.py
DELETED
@@ -1,137 +0,0 @@
|
|
1 |
-
_base_ = [
|
2 |
-
'../_base_/models/detr4seg_r101.py', '../_base_/datasets/psg.py',
|
3 |
-
'../_base_/custom_runtime.py'
|
4 |
-
]
|
5 |
-
|
6 |
-
custom_imports = dict(imports=[
|
7 |
-
'openpsg.models.frameworks.detr4seg',
|
8 |
-
'openpsg.models.relation_heads.detr4seg_head', 'openpsg.datasets',
|
9 |
-
'openpsg.datasets.pipelines.loading',
|
10 |
-
'openpsg.datasets.pipelines.rel_randomcrop',
|
11 |
-
'openpsg.models.relation_heads.approaches.matcher',
|
12 |
-
'openpsg.models.losses.seg_losses'
|
13 |
-
],
|
14 |
-
allow_failed_imports=False)
|
15 |
-
|
16 |
-
object_classes = [
|
17 |
-
'person', 'bicycle', 'car', 'motorcycle', 'airplane', 'bus', 'train',
|
18 |
-
'truck', 'boat', 'traffic light', 'fire hydrant', 'stop sign',
|
19 |
-
'parking meter', 'bench', 'bird', 'cat', 'dog', 'horse', 'sheep', 'cow',
|
20 |
-
'elephant', 'bear', 'zebra', 'giraffe', 'backpack', 'umbrella', 'handbag',
|
21 |
-
'tie', 'suitcase', 'frisbee', 'skis', 'snowboard', 'sports ball', 'kite',
|
22 |
-
'baseball bat', 'baseball glove', 'skateboard', 'surfboard',
|
23 |
-
'tennis racket', 'bottle', 'wine glass', 'cup', 'fork', 'knife', 'spoon',
|
24 |
-
'bowl', 'banana', 'apple', 'sandwich', 'orange', 'broccoli', 'carrot',
|
25 |
-
'hot dog', 'pizza', 'donut', 'cake', 'chair', 'couch', 'potted plant',
|
26 |
-
'bed', 'dining table', 'toilet', 'tv', 'laptop', 'mouse', 'remote',
|
27 |
-
'keyboard', 'cell phone', 'microwave', 'oven', 'toaster', 'sink',
|
28 |
-
'refrigerator', 'book', 'clock', 'vase', 'scissors', 'teddy bear',
|
29 |
-
'hair drier', 'toothbrush', 'banner', 'blanket', 'bridge', 'cardboard',
|
30 |
-
'counter', 'curtain', 'door-stuff', 'floor-wood', 'flower', 'fruit',
|
31 |
-
'gravel', 'house', 'light', 'mirror-stuff', 'net', 'pillow', 'platform',
|
32 |
-
'playingfield', 'railroad', 'river', 'road', 'roof', 'sand', 'sea',
|
33 |
-
'shelf', 'snow', 'stairs', 'tent', 'towel', 'wall-brick', 'wall-stone',
|
34 |
-
'wall-tile', 'wall-wood', 'water-other', 'window-blind', 'window-other',
|
35 |
-
'tree-merged', 'fence-merged', 'ceiling-merged', 'sky-other-merged',
|
36 |
-
'cabinet-merged', 'table-merged', 'floor-other-merged', 'pavement-merged',
|
37 |
-
'mountain-merged', 'grass-merged', 'dirt-merged', 'paper-merged',
|
38 |
-
'food-other-merged', 'building-other-merged', 'rock-merged',
|
39 |
-
'wall-other-merged', 'rug-merged'
|
40 |
-
]
|
41 |
-
|
42 |
-
model = dict(bbox_head=dict(
|
43 |
-
num_classes=len(object_classes),
|
44 |
-
object_classes=object_classes,
|
45 |
-
))
|
46 |
-
|
47 |
-
img_norm_cfg = dict(mean=[123.675, 116.28, 103.53],
|
48 |
-
std=[58.395, 57.12, 57.375],
|
49 |
-
to_rgb=True)
|
50 |
-
# train_pipeline, NOTE the img_scale and the Pad's size_divisor is different
|
51 |
-
# from the default setting in mmdet.
|
52 |
-
train_pipeline = [
|
53 |
-
dict(type='LoadImageFromFile'),
|
54 |
-
dict(type='LoadSceneGraphAnnotations', with_bbox=True, with_rel=True),
|
55 |
-
dict(type='RandomFlip', flip_ratio=0.5),
|
56 |
-
dict(
|
57 |
-
type='AutoAugment',
|
58 |
-
policies=[
|
59 |
-
[
|
60 |
-
dict(type='Resize',
|
61 |
-
img_scale=[(480, 1333), (512, 1333), (544, 1333),
|
62 |
-
(576, 1333), (608, 1333), (640, 1333),
|
63 |
-
(672, 1333), (704, 1333), (736, 1333),
|
64 |
-
(768, 1333), (800, 1333)],
|
65 |
-
multiscale_mode='value',
|
66 |
-
keep_ratio=True)
|
67 |
-
],
|
68 |
-
[
|
69 |
-
dict(type='Resize',
|
70 |
-
img_scale=[(400, 1333), (500, 1333), (600, 1333)],
|
71 |
-
multiscale_mode='value',
|
72 |
-
keep_ratio=True),
|
73 |
-
dict(type='RandomCrop',
|
74 |
-
crop_type='absolute_range',
|
75 |
-
crop_size=(384, 600),
|
76 |
-
allow_negative_crop=False), # no empty relations
|
77 |
-
dict(type='Resize',
|
78 |
-
img_scale=[(480, 1333), (512, 1333), (544, 1333),
|
79 |
-
(576, 1333), (608, 1333), (640, 1333),
|
80 |
-
(672, 1333), (704, 1333), (736, 1333),
|
81 |
-
(768, 1333), (800, 1333)],
|
82 |
-
multiscale_mode='value',
|
83 |
-
override=True,
|
84 |
-
keep_ratio=True)
|
85 |
-
]
|
86 |
-
]),
|
87 |
-
dict(type='Normalize', **img_norm_cfg),
|
88 |
-
dict(type='Pad', size_divisor=1),
|
89 |
-
dict(type='RelsFormatBundle'),
|
90 |
-
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels'])
|
91 |
-
]
|
92 |
-
# test_pipeline, NOTE the Pad's size_divisor is different from the default
|
93 |
-
# setting (size_divisor=32). While there is little effect on the performance
|
94 |
-
# whether we use the default setting or use size_divisor=1.
|
95 |
-
test_pipeline = [
|
96 |
-
dict(type='LoadImageFromFile'),
|
97 |
-
dict(type='MultiScaleFlipAug',
|
98 |
-
img_scale=(1333, 800),
|
99 |
-
flip=False,
|
100 |
-
transforms=[
|
101 |
-
dict(type='Resize', keep_ratio=True),
|
102 |
-
dict(type='RandomFlip'),
|
103 |
-
dict(type='Normalize', **img_norm_cfg),
|
104 |
-
dict(type='Pad', size_divisor=1),
|
105 |
-
dict(type='ImageToTensor', keys=['img']),
|
106 |
-
dict(type='Collect', keys=['img'])
|
107 |
-
])
|
108 |
-
]
|
109 |
-
data = dict(samples_per_gpu=2,
|
110 |
-
workers_per_gpu=2,
|
111 |
-
train=dict(pipeline=train_pipeline),
|
112 |
-
val=dict(pipeline=test_pipeline),
|
113 |
-
test=dict(pipeline=test_pipeline))
|
114 |
-
# optimizer
|
115 |
-
optimizer = dict(
|
116 |
-
type='AdamW',
|
117 |
-
lr=0.0001,
|
118 |
-
weight_decay=0.0001,
|
119 |
-
paramwise_cfg=dict(
|
120 |
-
custom_keys={'backbone': dict(lr_mult=0.1, decay_mult=1.0)}))
|
121 |
-
optimizer_config = dict(grad_clip=dict(max_norm=0.1, norm_type=2))
|
122 |
-
|
123 |
-
# learning policy
|
124 |
-
lr_config = dict(policy='step', step=110)
|
125 |
-
runner = dict(type='EpochBasedRunner', max_epochs=150)
|
126 |
-
|
127 |
-
project_name = 'detr4seg'
|
128 |
-
expt_name = 'detr4seg_r101_coco'
|
129 |
-
work_dir = f'./work_dirs/{expt_name}'
|
130 |
-
|
131 |
-
log_config = dict(
|
132 |
-
interval=50,
|
133 |
-
hooks=[dict(type='TextLoggerHook'),
|
134 |
-
dict(type='TensorboardLoggerHook')],
|
135 |
-
)
|
136 |
-
|
137 |
-
load_from = '/mnt/ssd/gzj/test/OpenPSG/detr_r50_fb_origin.pth'
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
spaces/Eddycrack864/Applio-Inference/infer/modules/train/preprocess.py
DELETED
@@ -1,147 +0,0 @@
|
|
1 |
-
import multiprocessing
|
2 |
-
import os
|
3 |
-
import sys
|
4 |
-
|
5 |
-
from scipy import signal
|
6 |
-
|
7 |
-
now_dir = os.getcwd()
|
8 |
-
sys.path.append(now_dir)
|
9 |
-
print(sys.argv)
|
10 |
-
inp_root = sys.argv[1]
|
11 |
-
sr = int(sys.argv[2])
|
12 |
-
n_p = int(sys.argv[3])
|
13 |
-
exp_dir = sys.argv[4]
|
14 |
-
noparallel = sys.argv[5] == "True"
|
15 |
-
per = float(sys.argv[6])
|
16 |
-
import multiprocessing
|
17 |
-
import os
|
18 |
-
import traceback
|
19 |
-
|
20 |
-
import librosa
|
21 |
-
import numpy as np
|
22 |
-
from scipy.io import wavfile
|
23 |
-
|
24 |
-
from infer.lib.audio import load_audio
|
25 |
-
from infer.lib.slicer2 import Slicer
|
26 |
-
|
27 |
-
mutex = multiprocessing.Lock()
|
28 |
-
f = open("%s/preprocess.log" % exp_dir, "a+")
|
29 |
-
|
30 |
-
|
31 |
-
def println(strr):
|
32 |
-
mutex.acquire()
|
33 |
-
print(strr)
|
34 |
-
f.write("%s\n" % strr)
|
35 |
-
f.flush()
|
36 |
-
mutex.release()
|
37 |
-
|
38 |
-
|
39 |
-
class PreProcess:
|
40 |
-
def __init__(self, sr, exp_dir, per=3.7):
|
41 |
-
self.slicer = Slicer(
|
42 |
-
sr=sr,
|
43 |
-
threshold=-42,
|
44 |
-
min_length=1500,
|
45 |
-
min_interval=400,
|
46 |
-
hop_size=15,
|
47 |
-
max_sil_kept=500,
|
48 |
-
)
|
49 |
-
self.sr = sr
|
50 |
-
self.bh, self.ah = signal.butter(N=5, Wn=48, btype="high", fs=self.sr)
|
51 |
-
self.per = per
|
52 |
-
self.overlap = 0.3
|
53 |
-
self.tail = self.per + self.overlap
|
54 |
-
self.max = 0.9
|
55 |
-
self.alpha = 0.75
|
56 |
-
self.exp_dir = exp_dir
|
57 |
-
self.gt_wavs_dir = "%s/0_gt_wavs" % exp_dir
|
58 |
-
self.wavs16k_dir = "%s/1_16k_wavs" % exp_dir
|
59 |
-
os.makedirs(self.exp_dir, exist_ok=True)
|
60 |
-
os.makedirs(self.gt_wavs_dir, exist_ok=True)
|
61 |
-
os.makedirs(self.wavs16k_dir, exist_ok=True)
|
62 |
-
|
63 |
-
def norm_write(self, tmp_audio, idx0, idx1):
|
64 |
-
tmp_max = np.abs(tmp_audio).max()
|
65 |
-
if tmp_max > 2.5:
|
66 |
-
print("%s-%s-%s-filtered" % (idx0, idx1, tmp_max))
|
67 |
-
return
|
68 |
-
tmp_audio = (tmp_audio / tmp_max * (self.max * self.alpha)) + (
|
69 |
-
1 - self.alpha
|
70 |
-
) * tmp_audio
|
71 |
-
wavfile.write(
|
72 |
-
"%s/%s_%s.wav" % (self.gt_wavs_dir, idx0, idx1),
|
73 |
-
self.sr,
|
74 |
-
tmp_audio.astype(np.float32),
|
75 |
-
)
|
76 |
-
tmp_audio = librosa.resample(
|
77 |
-
tmp_audio, orig_sr=self.sr, target_sr=16000
|
78 |
-
) # , res_type="soxr_vhq"
|
79 |
-
wavfile.write(
|
80 |
-
"%s/%s_%s.wav" % (self.wavs16k_dir, idx0, idx1),
|
81 |
-
16000,
|
82 |
-
tmp_audio.astype(np.float32),
|
83 |
-
)
|
84 |
-
|
85 |
-
def pipeline(self, path, idx0):
|
86 |
-
try:
|
87 |
-
audio = load_audio(path, self.sr)
|
88 |
-
# zero phased digital filter cause pre-ringing noise...
|
89 |
-
# audio = signal.filtfilt(self.bh, self.ah, audio)
|
90 |
-
audio = signal.lfilter(self.bh, self.ah, audio)
|
91 |
-
|
92 |
-
idx1 = 0
|
93 |
-
for audio in self.slicer.slice(audio):
|
94 |
-
i = 0
|
95 |
-
while 1:
|
96 |
-
start = int(self.sr * (self.per - self.overlap) * i)
|
97 |
-
i += 1
|
98 |
-
if len(audio[start:]) > self.tail * self.sr:
|
99 |
-
tmp_audio = audio[start : start + int(self.per * self.sr)]
|
100 |
-
self.norm_write(tmp_audio, idx0, idx1)
|
101 |
-
idx1 += 1
|
102 |
-
else:
|
103 |
-
tmp_audio = audio[start:]
|
104 |
-
idx1 += 1
|
105 |
-
break
|
106 |
-
self.norm_write(tmp_audio, idx0, idx1)
|
107 |
-
println("%s->Suc." % path)
|
108 |
-
except:
|
109 |
-
println("%s->%s" % (path, traceback.format_exc()))
|
110 |
-
|
111 |
-
def pipeline_mp(self, infos):
|
112 |
-
for path, idx0 in infos:
|
113 |
-
self.pipeline(path, idx0)
|
114 |
-
|
115 |
-
def pipeline_mp_inp_dir(self, inp_root, n_p):
|
116 |
-
try:
|
117 |
-
infos = [
|
118 |
-
("%s/%s" % (inp_root, name), idx)
|
119 |
-
for idx, name in enumerate(sorted(list(os.listdir(inp_root))))
|
120 |
-
]
|
121 |
-
if noparallel:
|
122 |
-
for i in range(n_p):
|
123 |
-
self.pipeline_mp(infos[i::n_p])
|
124 |
-
else:
|
125 |
-
ps = []
|
126 |
-
for i in range(n_p):
|
127 |
-
p = multiprocessing.Process(
|
128 |
-
target=self.pipeline_mp, args=(infos[i::n_p],)
|
129 |
-
)
|
130 |
-
ps.append(p)
|
131 |
-
p.start()
|
132 |
-
for i in range(n_p):
|
133 |
-
ps[i].join()
|
134 |
-
except:
|
135 |
-
println("Fail. %s" % traceback.format_exc())
|
136 |
-
|
137 |
-
|
138 |
-
def preprocess_trainset(inp_root, sr, n_p, exp_dir, per):
|
139 |
-
pp = PreProcess(sr, exp_dir, per)
|
140 |
-
println("start preprocess")
|
141 |
-
println(sys.argv)
|
142 |
-
pp.pipeline_mp_inp_dir(inp_root, n_p)
|
143 |
-
println("end preprocess")
|
144 |
-
|
145 |
-
|
146 |
-
if __name__ == "__main__":
|
147 |
-
preprocess_trainset(inp_root, sr, n_p, exp_dir, per)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|