ginipick commited on
Commit
460b152
Β·
verified Β·
1 Parent(s): 2792e64

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +11 -41
app.py CHANGED
@@ -232,8 +232,6 @@ def get_last_mp3_file(output_dir):
232
  mp3_files_with_path.sort(key=os.path.getmtime, reverse=True)
233
  return mp3_files_with_path[0]
234
 
235
-
236
-
237
  def infer(genre_txt_content, lyrics_txt_content, num_segments, max_new_tokens):
238
  try:
239
  # λͺ¨λΈ 선택 및 μ„€μ •
@@ -255,7 +253,7 @@ def infer(genre_txt_content, lyrics_txt_content, num_segments, max_new_tokens):
255
  os.makedirs(output_dir, exist_ok=True)
256
  empty_output_folder(output_dir)
257
 
258
- # κΈ°λ³Έ λͺ…λ Ήμ–΄ ꡬ성
259
  command = [
260
  "python", "infer.py",
261
  "--stage1_model", model_path,
@@ -266,16 +264,12 @@ def infer(genre_txt_content, lyrics_txt_content, num_segments, max_new_tokens):
266
  "--stage2_batch_size", str(config['batch_size']),
267
  "--output_dir", output_dir,
268
  "--cuda_idx", "0",
269
- "--max_new_tokens", str(actual_max_tokens),
270
- "--temperature", str(config['temperature']),
271
- "--chorus_strength", str(config['chorus_strength'])
272
  ]
273
 
274
  # GPUκ°€ μžˆλŠ” κ²½μš°μ—λ§Œ μΆ”κ°€ μ˜΅μ…˜ 적용
275
  if torch.cuda.is_available():
276
- command.extend([
277
- "--disable_offload_model"
278
- ])
279
 
280
  # CUDA ν™˜κ²½ λ³€μˆ˜ μ„€μ •
281
  env = os.environ.copy()
@@ -288,6 +282,13 @@ def infer(genre_txt_content, lyrics_txt_content, num_segments, max_new_tokens):
288
  "PYTORCH_CUDA_ALLOC_CONF": "max_split_size_mb:512"
289
  })
290
 
 
 
 
 
 
 
 
291
  # λͺ…λ Ή μ‹€ν–‰
292
  process = subprocess.run(
293
  command,
@@ -333,16 +334,7 @@ def main():
333
  with gr.Blocks() as demo:
334
  with gr.Column():
335
  gr.Markdown("# YuE: Open Music Foundation Models for Full-Song Generation (Multi-Language Support)")
336
- gr.HTML("""
337
- <div style="display:flex;column-gap:4px;">
338
- <a href="https://github.com/multimodal-art-projection/YuE">
339
- <img src='https://img.shields.io/badge/GitHub-Repo-blue'>
340
- </a>
341
- <a href="https://map-yue.github.io">
342
- <img src='https://img.shields.io/badge/Project-Page-green'>
343
- </a>
344
- </div>
345
- """)
346
 
347
  with gr.Row():
348
  with gr.Column():
@@ -394,17 +386,6 @@ With you here beside me, everything's alright
394
  Can't imagine life alone, don't want to let you go
395
  Stay with me forever, let our love just flow
396
 
397
- [verse]
398
- Morning light is breaking, through the window pane
399
- Memories of yesterday, like soft summer rain
400
- In your arms I'm finding, all I'm dreaming of
401
- Every day beside you, fills my heart with love
402
-
403
- [chorus]
404
- Don't let this moment fade, hold me close tonight
405
- With you here beside me, everything's alright
406
- Can't imagine life alone, don't want to let you go
407
- Stay with me forever, let our love just flow
408
  """
409
  ],
410
  # ν•œκ΅­μ–΄ 예제
@@ -422,17 +403,6 @@ Stay with me forever, let our love just flow
422
  μ˜μ›νžˆ 계속될 우리의 λ…Έλž˜
423
  이 μˆœκ°„μ„ κΈ°μ–΅ν•΄ forever
424
 
425
- [verse]
426
- μƒˆλ‘œμš΄ 내일을 ν–₯ν•΄ λ‚˜μ•„κ°€
427
- 우리만의 길을 λ§Œλ“€μ–΄κ°€
428
- 믿음으둜 κ°€λ“ν•œ 우리의 맘
429
- μ ˆλŒ€ λ©ˆμΆ”μ§€ μ•Šμ•„ κ³„μ†ν•΄μ„œ
430
-
431
- [chorus]
432
- λ‹¬λ €κ°€μž 더 높이 더 멀리
433
- 두렀움은 μ—†μ–΄ λ„ˆμ™€ ν•¨κ»˜λΌλ©΄
434
- μ˜μ›νžˆ 계속될 우리의 λ…Έλž˜
435
- 이 μˆœκ°„μ„ κΈ°μ–΅ν•΄ forever
436
  """
437
  ]
438
  ],
 
232
  mp3_files_with_path.sort(key=os.path.getmtime, reverse=True)
233
  return mp3_files_with_path[0]
234
 
 
 
235
  def infer(genre_txt_content, lyrics_txt_content, num_segments, max_new_tokens):
236
  try:
237
  # λͺ¨λΈ 선택 및 μ„€μ •
 
253
  os.makedirs(output_dir, exist_ok=True)
254
  empty_output_folder(output_dir)
255
 
256
+ # κΈ°λ³Έ λͺ…λ Ήμ–΄ ꡬ성 (λΆˆν•„μš”ν•œ λ§€κ°œλ³€μˆ˜ 제거)
257
  command = [
258
  "python", "infer.py",
259
  "--stage1_model", model_path,
 
264
  "--stage2_batch_size", str(config['batch_size']),
265
  "--output_dir", output_dir,
266
  "--cuda_idx", "0",
267
+ "--max_new_tokens", str(actual_max_tokens)
 
 
268
  ]
269
 
270
  # GPUκ°€ μžˆλŠ” κ²½μš°μ—λ§Œ μΆ”κ°€ μ˜΅μ…˜ 적용
271
  if torch.cuda.is_available():
272
+ command.append("--disable_offload_model")
 
 
273
 
274
  # CUDA ν™˜κ²½ λ³€μˆ˜ μ„€μ •
275
  env = os.environ.copy()
 
282
  "PYTORCH_CUDA_ALLOC_CONF": "max_split_size_mb:512"
283
  })
284
 
285
+ # transformers μΊμ‹œ λ§ˆμ΄κ·Έλ ˆμ΄μ…˜ 처리
286
+ try:
287
+ from transformers.utils import move_cache
288
+ move_cache()
289
+ except Exception as e:
290
+ logging.warning(f"Cache migration warning (non-critical): {e}")
291
+
292
  # λͺ…λ Ή μ‹€ν–‰
293
  process = subprocess.run(
294
  command,
 
334
  with gr.Blocks() as demo:
335
  with gr.Column():
336
  gr.Markdown("# YuE: Open Music Foundation Models for Full-Song Generation (Multi-Language Support)")
337
+
 
 
 
 
 
 
 
 
 
338
 
339
  with gr.Row():
340
  with gr.Column():
 
386
  Can't imagine life alone, don't want to let you go
387
  Stay with me forever, let our love just flow
388
 
 
 
 
 
 
 
 
 
 
 
 
389
  """
390
  ],
391
  # ν•œκ΅­μ–΄ 예제
 
403
  μ˜μ›νžˆ 계속될 우리의 λ…Έλž˜
404
  이 μˆœκ°„μ„ κΈ°μ–΅ν•΄ forever
405
 
 
 
 
 
 
 
 
 
 
 
 
406
  """
407
  ]
408
  ],