Update app.py
Browse files
app.py
CHANGED
|
@@ -232,8 +232,6 @@ def get_last_mp3_file(output_dir):
|
|
| 232 |
mp3_files_with_path.sort(key=os.path.getmtime, reverse=True)
|
| 233 |
return mp3_files_with_path[0]
|
| 234 |
|
| 235 |
-
|
| 236 |
-
|
| 237 |
def infer(genre_txt_content, lyrics_txt_content, num_segments, max_new_tokens):
|
| 238 |
try:
|
| 239 |
# ๋ชจ๋ธ ์ ํ ๋ฐ ์ค์
|
|
@@ -255,7 +253,7 @@ def infer(genre_txt_content, lyrics_txt_content, num_segments, max_new_tokens):
|
|
| 255 |
os.makedirs(output_dir, exist_ok=True)
|
| 256 |
empty_output_folder(output_dir)
|
| 257 |
|
| 258 |
-
# ๊ธฐ๋ณธ ๋ช
๋ น์ด ๊ตฌ์ฑ
|
| 259 |
command = [
|
| 260 |
"python", "infer.py",
|
| 261 |
"--stage1_model", model_path,
|
|
@@ -266,16 +264,12 @@ def infer(genre_txt_content, lyrics_txt_content, num_segments, max_new_tokens):
|
|
| 266 |
"--stage2_batch_size", str(config['batch_size']),
|
| 267 |
"--output_dir", output_dir,
|
| 268 |
"--cuda_idx", "0",
|
| 269 |
-
"--max_new_tokens", str(actual_max_tokens)
|
| 270 |
-
"--temperature", str(config['temperature']),
|
| 271 |
-
"--chorus_strength", str(config['chorus_strength'])
|
| 272 |
]
|
| 273 |
|
| 274 |
# GPU๊ฐ ์๋ ๊ฒฝ์ฐ์๋ง ์ถ๊ฐ ์ต์
์ ์ฉ
|
| 275 |
if torch.cuda.is_available():
|
| 276 |
-
command.
|
| 277 |
-
"--disable_offload_model"
|
| 278 |
-
])
|
| 279 |
|
| 280 |
# CUDA ํ๊ฒฝ ๋ณ์ ์ค์
|
| 281 |
env = os.environ.copy()
|
|
@@ -288,6 +282,13 @@ def infer(genre_txt_content, lyrics_txt_content, num_segments, max_new_tokens):
|
|
| 288 |
"PYTORCH_CUDA_ALLOC_CONF": "max_split_size_mb:512"
|
| 289 |
})
|
| 290 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 291 |
# ๋ช
๋ น ์คํ
|
| 292 |
process = subprocess.run(
|
| 293 |
command,
|
|
@@ -333,16 +334,7 @@ def main():
|
|
| 333 |
with gr.Blocks() as demo:
|
| 334 |
with gr.Column():
|
| 335 |
gr.Markdown("# YuE: Open Music Foundation Models for Full-Song Generation (Multi-Language Support)")
|
| 336 |
-
|
| 337 |
-
<div style="display:flex;column-gap:4px;">
|
| 338 |
-
<a href="https://github.com/multimodal-art-projection/YuE">
|
| 339 |
-
<img src='https://img.shields.io/badge/GitHub-Repo-blue'>
|
| 340 |
-
</a>
|
| 341 |
-
<a href="https://map-yue.github.io">
|
| 342 |
-
<img src='https://img.shields.io/badge/Project-Page-green'>
|
| 343 |
-
</a>
|
| 344 |
-
</div>
|
| 345 |
-
""")
|
| 346 |
|
| 347 |
with gr.Row():
|
| 348 |
with gr.Column():
|
|
@@ -394,17 +386,6 @@ With you here beside me, everything's alright
|
|
| 394 |
Can't imagine life alone, don't want to let you go
|
| 395 |
Stay with me forever, let our love just flow
|
| 396 |
|
| 397 |
-
[verse]
|
| 398 |
-
Morning light is breaking, through the window pane
|
| 399 |
-
Memories of yesterday, like soft summer rain
|
| 400 |
-
In your arms I'm finding, all I'm dreaming of
|
| 401 |
-
Every day beside you, fills my heart with love
|
| 402 |
-
|
| 403 |
-
[chorus]
|
| 404 |
-
Don't let this moment fade, hold me close tonight
|
| 405 |
-
With you here beside me, everything's alright
|
| 406 |
-
Can't imagine life alone, don't want to let you go
|
| 407 |
-
Stay with me forever, let our love just flow
|
| 408 |
"""
|
| 409 |
],
|
| 410 |
# ํ๊ตญ์ด ์์
|
|
@@ -422,17 +403,6 @@ Stay with me forever, let our love just flow
|
|
| 422 |
์์ํ ๊ณ์๋ ์ฐ๋ฆฌ์ ๋
ธ๋
|
| 423 |
์ด ์๊ฐ์ ๊ธฐ์ตํด forever
|
| 424 |
|
| 425 |
-
[verse]
|
| 426 |
-
์๋ก์ด ๋ด์ผ์ ํฅํด ๋์๊ฐ
|
| 427 |
-
์ฐ๋ฆฌ๋ง์ ๊ธธ์ ๋ง๋ค์ด๊ฐ
|
| 428 |
-
๋ฏฟ์์ผ๋ก ๊ฐ๋ํ ์ฐ๋ฆฌ์ ๋ง
|
| 429 |
-
์ ๋ ๋ฉ์ถ์ง ์์ ๊ณ์ํด์
|
| 430 |
-
|
| 431 |
-
[chorus]
|
| 432 |
-
๋ฌ๋ ค๊ฐ์ ๋ ๋์ด ๋ ๋ฉ๋ฆฌ
|
| 433 |
-
๋๋ ค์์ ์์ด ๋์ ํจ๊ป๋ผ๋ฉด
|
| 434 |
-
์์ํ ๊ณ์๋ ์ฐ๋ฆฌ์ ๋
ธ๋
|
| 435 |
-
์ด ์๊ฐ์ ๊ธฐ์ตํด forever
|
| 436 |
"""
|
| 437 |
]
|
| 438 |
],
|
|
|
|
| 232 |
mp3_files_with_path.sort(key=os.path.getmtime, reverse=True)
|
| 233 |
return mp3_files_with_path[0]
|
| 234 |
|
|
|
|
|
|
|
| 235 |
def infer(genre_txt_content, lyrics_txt_content, num_segments, max_new_tokens):
|
| 236 |
try:
|
| 237 |
# ๋ชจ๋ธ ์ ํ ๋ฐ ์ค์
|
|
|
|
| 253 |
os.makedirs(output_dir, exist_ok=True)
|
| 254 |
empty_output_folder(output_dir)
|
| 255 |
|
| 256 |
+
# ๊ธฐ๋ณธ ๋ช
๋ น์ด ๊ตฌ์ฑ (๋ถํ์ํ ๋งค๊ฐ๋ณ์ ์ ๊ฑฐ)
|
| 257 |
command = [
|
| 258 |
"python", "infer.py",
|
| 259 |
"--stage1_model", model_path,
|
|
|
|
| 264 |
"--stage2_batch_size", str(config['batch_size']),
|
| 265 |
"--output_dir", output_dir,
|
| 266 |
"--cuda_idx", "0",
|
| 267 |
+
"--max_new_tokens", str(actual_max_tokens)
|
|
|
|
|
|
|
| 268 |
]
|
| 269 |
|
| 270 |
# GPU๊ฐ ์๋ ๊ฒฝ์ฐ์๋ง ์ถ๊ฐ ์ต์
์ ์ฉ
|
| 271 |
if torch.cuda.is_available():
|
| 272 |
+
command.append("--disable_offload_model")
|
|
|
|
|
|
|
| 273 |
|
| 274 |
# CUDA ํ๊ฒฝ ๋ณ์ ์ค์
|
| 275 |
env = os.environ.copy()
|
|
|
|
| 282 |
"PYTORCH_CUDA_ALLOC_CONF": "max_split_size_mb:512"
|
| 283 |
})
|
| 284 |
|
| 285 |
+
# transformers ์บ์ ๋ง์ด๊ทธ๋ ์ด์
์ฒ๋ฆฌ
|
| 286 |
+
try:
|
| 287 |
+
from transformers.utils import move_cache
|
| 288 |
+
move_cache()
|
| 289 |
+
except Exception as e:
|
| 290 |
+
logging.warning(f"Cache migration warning (non-critical): {e}")
|
| 291 |
+
|
| 292 |
# ๋ช
๋ น ์คํ
|
| 293 |
process = subprocess.run(
|
| 294 |
command,
|
|
|
|
| 334 |
with gr.Blocks() as demo:
|
| 335 |
with gr.Column():
|
| 336 |
gr.Markdown("# YuE: Open Music Foundation Models for Full-Song Generation (Multi-Language Support)")
|
| 337 |
+
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 338 |
|
| 339 |
with gr.Row():
|
| 340 |
with gr.Column():
|
|
|
|
| 386 |
Can't imagine life alone, don't want to let you go
|
| 387 |
Stay with me forever, let our love just flow
|
| 388 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 389 |
"""
|
| 390 |
],
|
| 391 |
# ํ๊ตญ์ด ์์
|
|
|
|
| 403 |
์์ํ ๊ณ์๋ ์ฐ๋ฆฌ์ ๋
ธ๋
|
| 404 |
์ด ์๊ฐ์ ๊ธฐ์ตํด forever
|
| 405 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 406 |
"""
|
| 407 |
]
|
| 408 |
],
|