Update app.py
Browse files
app.py
CHANGED
@@ -306,22 +306,21 @@ def infer(genre_txt_content, lyrics_txt_content, num_segments, max_new_tokens):
|
|
306 |
os.makedirs(output_dir, exist_ok=True)
|
307 |
empty_output_folder(output_dir)
|
308 |
|
|
|
309 |
command = [
|
310 |
-
"python", "
|
311 |
"--stage1_model", model_path,
|
312 |
"--stage2_model", "m-a-p/YuE-s2-1B-general",
|
313 |
"--genre_txt", genre_txt_path,
|
314 |
"--lyrics_txt", lyrics_txt_path,
|
315 |
"--run_n_segments", str(actual_num_segments),
|
316 |
"--stage2_batch_size", "16",
|
317 |
-
"--output_dir",
|
318 |
"--cuda_idx", "0",
|
319 |
"--max_new_tokens", str(actual_max_tokens),
|
320 |
-
"--disable_offload_model"
|
321 |
]
|
322 |
|
323 |
-
|
324 |
-
|
325 |
env = os.environ.copy()
|
326 |
if torch.cuda.is_available():
|
327 |
env.update({
|
|
|
306 |
os.makedirs(output_dir, exist_ok=True)
|
307 |
empty_output_folder(output_dir)
|
308 |
|
309 |
+
# ์์ ๋ command - ์ง์๋์ง ์๋ ์ธ์ ์ ๊ฑฐ
|
310 |
command = [
|
311 |
+
"python", "infer.py",
|
312 |
"--stage1_model", model_path,
|
313 |
"--stage2_model", "m-a-p/YuE-s2-1B-general",
|
314 |
"--genre_txt", genre_txt_path,
|
315 |
"--lyrics_txt", lyrics_txt_path,
|
316 |
"--run_n_segments", str(actual_num_segments),
|
317 |
"--stage2_batch_size", "16",
|
318 |
+
"--output_dir", output_dir,
|
319 |
"--cuda_idx", "0",
|
320 |
"--max_new_tokens", str(actual_max_tokens),
|
321 |
+
"--disable_offload_model" # GPU ๋ฉ๋ชจ๋ฆฌ ์ต์ ํ๋ฅผ ์ํด ์ถ๊ฐ
|
322 |
]
|
323 |
|
|
|
|
|
324 |
env = os.environ.copy()
|
325 |
if torch.cuda.is_available():
|
326 |
env.update({
|