modified: app.py
Browse files
app.py
CHANGED
@@ -4,6 +4,8 @@ import os
|
|
4 |
import shutil
|
5 |
import tempfile
|
6 |
import spaces
|
|
|
|
|
7 |
|
8 |
is_shared_ui = True if "innova-ai/YuE-music-generator-demo" in os.environ['SPACE_ID'] else False
|
9 |
|
@@ -108,7 +110,7 @@ device = torch.device(f"cuda:{cuda_idx}" if torch.cuda.is_available() else "cpu"
|
|
108 |
|
109 |
model = AutoModelForCausalLM.from_pretrained(
|
110 |
"m-a-p/YuE-s1-7B-anneal-en-cot",
|
111 |
-
torch_dtype=torch.
|
112 |
attn_implementation="flash_attention_2", # To enable flashattn, you have to install flash-attn
|
113 |
)
|
114 |
model.to(device)
|
|
|
4 |
import shutil
|
5 |
import tempfile
|
6 |
import spaces
|
7 |
+
from transformers import AutoTokenizer, AutoModelForCausalLM, LogitsProcessor, LogitsProcessorList
|
8 |
+
import torch
|
9 |
|
10 |
is_shared_ui = True if "innova-ai/YuE-music-generator-demo" in os.environ['SPACE_ID'] else False
|
11 |
|
|
|
110 |
|
111 |
model = AutoModelForCausalLM.from_pretrained(
|
112 |
"m-a-p/YuE-s1-7B-anneal-en-cot",
|
113 |
+
torch_dtype=torch.float16,
|
114 |
attn_implementation="flash_attention_2", # To enable flashattn, you have to install flash-attn
|
115 |
)
|
116 |
model.to(device)
|