Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -4,7 +4,7 @@ import random
|
|
4 |
import string
|
5 |
import tempfile
|
6 |
import time
|
7 |
-
from concurrent.futures import ThreadPoolExecutor
|
8 |
from typing import Iterable, List
|
9 |
|
10 |
import gradio as gr
|
@@ -13,8 +13,7 @@ import torch
|
|
13 |
import yaml
|
14 |
from gradio_logsview.logsview import Log, LogsView, LogsViewRunner
|
15 |
from mergekit.config import MergeConfiguration
|
16 |
-
from
|
17 |
-
import spaces
|
18 |
|
19 |
has_gpu = torch.cuda.is_available()
|
20 |
|
@@ -62,7 +61,6 @@ If you use it in your research, please cite the following paper:
|
|
62 |
year={2024}
|
63 |
}
|
64 |
This Space is heavily inspired by LazyMergeKit by Maxime Labonne (see [Colab](https://colab.research.google.com/drive/1obulZ1ROXHjYLn6PPZJwRR6GzgQogxxb)).
|
65 |
-
|
66 |
"""
|
67 |
|
68 |
examples = [[str(f)] for f in pathlib.Path("examples").glob("*.yaml")]
|
@@ -133,7 +131,6 @@ def merge(yaml_config, hf_token, repo_name, profile_name):
|
|
133 |
yield from runner.run_python(api.upload_folder, repo_id=repo_url.repo_id, folder_path=merged_path / "merge")
|
134 |
yield runner.log(f"Model successfully uploaded to HF: {repo_url.repo_id}")
|
135 |
|
136 |
-
|
137 |
with gr.Blocks() as demo:
|
138 |
gr.Markdown(MARKDOWN_DESCRIPTION)
|
139 |
|
@@ -149,27 +146,11 @@ with gr.Blocks() as demo:
|
|
149 |
gr.Examples(examples, fn=lambda s: (s,), run_on_click=True, label="Examples", inputs=[filename], outputs=[config])
|
150 |
gr.Markdown(MARKDOWN_ARTICLE)
|
151 |
|
152 |
-
button.click(fn=merge, inputs=[config, token, repo_name, profile_name], outputs=[logs])
|
153 |
-
|
154 |
-
|
155 |
-
def garbage_collect():
|
156 |
-
try:
|
157 |
-
garbage_collect_empty_models(token=COMMUNITY_HF_TOKEN)
|
158 |
-
except Exception as e:
|
159 |
-
print(f"Error running garbage collection: {e}")
|
160 |
-
|
161 |
-
|
162 |
-
def schedule_garbage_collection(interval_seconds=0):
|
163 |
-
while True:
|
164 |
-
time.sleep(interval_seconds)
|
165 |
-
garbage_collect()
|
166 |
|
167 |
|
168 |
@spaces.GPU
|
169 |
def launch():
|
170 |
-
pool = ThreadPoolExecutor(max_workers=1)
|
171 |
-
pool.submit(schedule_garbage_collection)
|
172 |
-
|
173 |
demo.launch(share=True)
|
174 |
|
175 |
|
|
|
4 |
import string
|
5 |
import tempfile
|
6 |
import time
|
7 |
+
from concurrent.futures import ThreadPoolExecutor
|
8 |
from typing import Iterable, List
|
9 |
|
10 |
import gradio as gr
|
|
|
13 |
import yaml
|
14 |
from gradio_logsview.logsview import Log, LogsView, LogsViewRunner
|
15 |
from mergekit.config import MergeConfiguration
|
16 |
+
from huggingface_hub import spaces
|
|
|
17 |
|
18 |
has_gpu = torch.cuda.is_available()
|
19 |
|
|
|
61 |
year={2024}
|
62 |
}
|
63 |
This Space is heavily inspired by LazyMergeKit by Maxime Labonne (see [Colab](https://colab.research.google.com/drive/1obulZ1ROXHjYLn6PPZJwRR6GzgQogxxb)).
|
|
|
64 |
"""
|
65 |
|
66 |
examples = [[str(f)] for f in pathlib.Path("examples").glob("*.yaml")]
|
|
|
131 |
yield from runner.run_python(api.upload_folder, repo_id=repo_url.repo_id, folder_path=merged_path / "merge")
|
132 |
yield runner.log(f"Model successfully uploaded to HF: {repo_url.repo_id}")
|
133 |
|
|
|
134 |
with gr.Blocks() as demo:
|
135 |
gr.Markdown(MARKDOWN_DESCRIPTION)
|
136 |
|
|
|
146 |
gr.Examples(examples, fn=lambda s: (s,), run_on_click=True, label="Examples", inputs=[filename], outputs=[config])
|
147 |
gr.Markdown(MARKDOWN_ARTICLE)
|
148 |
|
149 |
+
button.click(fn=merge, inputs=[config, token, repo_name, profile_name], outputs=[logs], queue=False)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
150 |
|
151 |
|
152 |
@spaces.GPU
|
153 |
def launch():
|
|
|
|
|
|
|
154 |
demo.launch(share=True)
|
155 |
|
156 |
|