Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -4,7 +4,7 @@ import random
|
|
4 |
import string
|
5 |
import tempfile
|
6 |
import time
|
7 |
-
from concurrent.futures import ThreadPoolExecutor
|
8 |
from typing import Iterable, List
|
9 |
|
10 |
import gradio as gr
|
@@ -14,7 +14,6 @@ import yaml
|
|
14 |
from gradio_logsview.logsview import Log, LogsView, LogsViewRunner
|
15 |
from mergekit.config import MergeConfiguration
|
16 |
from clean_community_org import garbage_collect_empty_models
|
17 |
-
import spaces
|
18 |
|
19 |
has_gpu = torch.cuda.is_available()
|
20 |
|
@@ -62,18 +61,19 @@ If you use it in your research, please cite the following paper:
|
|
62 |
year={2024}
|
63 |
}
|
64 |
This Space is heavily inspired by LazyMergeKit by Maxime Labonne (see [Colab](https://colab.research.google.com/drive/1obulZ1ROXHjYLn6PPZJwRR6GzgQogxxb)).
|
|
|
65 |
"""
|
66 |
|
67 |
examples = [[str(f)] for f in pathlib.Path("examples").glob("*.yaml")]
|
68 |
COMMUNITY_HF_TOKEN = os.getenv("COMMUNITY_HF_TOKEN")
|
69 |
|
70 |
-
|
71 |
def merge(yaml_config, hf_token, repo_name, profile_name):
|
72 |
runner = LogsViewRunner()
|
73 |
|
74 |
if not yaml_config:
|
75 |
yield runner.log("Empty yaml, pick an example below", level="ERROR")
|
76 |
return
|
|
|
77 |
try:
|
78 |
merge_config = MergeConfiguration.model_validate(yaml.safe_load(yaml_config))
|
79 |
except Exception as e:
|
@@ -83,10 +83,7 @@ def merge(yaml_config, hf_token, repo_name, profile_name):
|
|
83 |
is_community_model = False
|
84 |
if not hf_token:
|
85 |
if "/" in repo_name and not repo_name.startswith("mergekit-community/"):
|
86 |
-
yield runner.log(
|
87 |
-
f"Cannot upload merge model to namespace {repo_name.split('/')[0]}: you must provide a valid token.",
|
88 |
-
level="ERROR",
|
89 |
-
)
|
90 |
return
|
91 |
yield runner.log("No HF token provided. Your merged model will be uploaded to the https://huggingface.co/mergekit-community organization.")
|
92 |
is_community_model = True
|
@@ -132,11 +129,7 @@ def merge(yaml_config, hf_token, repo_name, profile_name):
|
|
132 |
return
|
133 |
|
134 |
yield runner.log("Model merged successfully. Uploading to HF.")
|
135 |
-
yield from runner.run_python(
|
136 |
-
api.upload_folder,
|
137 |
-
repo_id=repo_url.repo_id,
|
138 |
-
folder_path=merged_path / "merge",
|
139 |
-
)
|
140 |
yield runner.log(f"Model successfully uploaded to HF: {repo_url.repo_id}")
|
141 |
|
142 |
|
@@ -158,22 +151,25 @@ with gr.Blocks() as demo:
|
|
158 |
button.click(fn=merge, inputs=[config, token, repo_name, profile_name], outputs=[logs])
|
159 |
|
160 |
|
161 |
-
def
|
162 |
-
|
163 |
-
|
164 |
-
|
165 |
-
|
166 |
-
print("Error running garbage collection", e)
|
167 |
-
time.sleep(3600)
|
168 |
|
169 |
|
170 |
-
|
171 |
-
|
|
|
|
|
172 |
|
173 |
|
174 |
@spaces.GPU
|
175 |
def launch():
|
176 |
-
|
|
|
|
|
|
|
177 |
|
178 |
|
179 |
if __name__ == "__main__":
|
|
|
4 |
import string
|
5 |
import tempfile
|
6 |
import time
|
7 |
+
from concurrent.futures import ThreadPoolExecutor, as_completed
|
8 |
from typing import Iterable, List
|
9 |
|
10 |
import gradio as gr
|
|
|
14 |
from gradio_logsview.logsview import Log, LogsView, LogsViewRunner
|
15 |
from mergekit.config import MergeConfiguration
|
16 |
from clean_community_org import garbage_collect_empty_models
|
|
|
17 |
|
18 |
has_gpu = torch.cuda.is_available()
|
19 |
|
|
|
61 |
year={2024}
|
62 |
}
|
63 |
This Space is heavily inspired by LazyMergeKit by Maxime Labonne (see [Colab](https://colab.research.google.com/drive/1obulZ1ROXHjYLn6PPZJwRR6GzgQogxxb)).
|
64 |
+
|
65 |
"""
|
66 |
|
67 |
examples = [[str(f)] for f in pathlib.Path("examples").glob("*.yaml")]
|
68 |
COMMUNITY_HF_TOKEN = os.getenv("COMMUNITY_HF_TOKEN")
|
69 |
|
|
|
70 |
def merge(yaml_config, hf_token, repo_name, profile_name):
|
71 |
runner = LogsViewRunner()
|
72 |
|
73 |
if not yaml_config:
|
74 |
yield runner.log("Empty yaml, pick an example below", level="ERROR")
|
75 |
return
|
76 |
+
|
77 |
try:
|
78 |
merge_config = MergeConfiguration.model_validate(yaml.safe_load(yaml_config))
|
79 |
except Exception as e:
|
|
|
83 |
is_community_model = False
|
84 |
if not hf_token:
|
85 |
if "/" in repo_name and not repo_name.startswith("mergekit-community/"):
|
86 |
+
yield runner.log(f"Cannot upload merge model to namespace {repo_name.split('/')[0]}: you must provide a valid token.", level="ERROR")
|
|
|
|
|
|
|
87 |
return
|
88 |
yield runner.log("No HF token provided. Your merged model will be uploaded to the https://huggingface.co/mergekit-community organization.")
|
89 |
is_community_model = True
|
|
|
129 |
return
|
130 |
|
131 |
yield runner.log("Model merged successfully. Uploading to HF.")
|
132 |
+
yield from runner.run_python(api.upload_folder, repo_id=repo_url.repo_id, folder_path=merged_path / "merge")
|
|
|
|
|
|
|
|
|
133 |
yield runner.log(f"Model successfully uploaded to HF: {repo_url.repo_id}")
|
134 |
|
135 |
|
|
|
151 |
button.click(fn=merge, inputs=[config, token, repo_name, profile_name], outputs=[logs])
|
152 |
|
153 |
|
154 |
+
def garbage_collect():
|
155 |
+
try:
|
156 |
+
garbage_collect_empty_models(token=COMMUNITY_HF_TOKEN)
|
157 |
+
except Exception as e:
|
158 |
+
print(f"Error running garbage collection: {e}")
|
|
|
|
|
159 |
|
160 |
|
161 |
+
def schedule_garbage_collection(interval_seconds=3600):
|
162 |
+
while True:
|
163 |
+
time.sleep(interval_seconds)
|
164 |
+
garbage_collect()
|
165 |
|
166 |
|
167 |
@spaces.GPU
|
168 |
def launch():
|
169 |
+
pool = ThreadPoolExecutor(max_workers=1)
|
170 |
+
pool.submit(schedule_garbage_collection)
|
171 |
+
|
172 |
+
demo.launch(share=True)
|
173 |
|
174 |
|
175 |
if __name__ == "__main__":
|