Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -7,6 +7,7 @@ from transformers import pipeline, AutoModelForSpeechSeq2Seq, AutoProcessor
|
|
7 |
from transformers.utils import is_flash_attn_2_available
|
8 |
from languages import get_language_names
|
9 |
from subtitle_manager import Subtitle
|
|
|
10 |
|
11 |
logging.basicConfig(level=logging.INFO)
|
12 |
last_model = None
|
@@ -16,7 +17,6 @@ def write_file(output_file, subtitle):
|
|
16 |
with open(output_file, 'w', encoding='utf-8') as f:
|
17 |
f.write(subtitle)
|
18 |
|
19 |
-
@spaces.GPU
|
20 |
def create_pipe(model, flash):
|
21 |
if torch.cuda.is_available():
|
22 |
device = "cuda:0"
|
|
|
7 |
from transformers.utils import is_flash_attn_2_available
|
8 |
from languages import get_language_names
|
9 |
from subtitle_manager import Subtitle
|
10 |
+
import spaces
|
11 |
|
12 |
logging.basicConfig(level=logging.INFO)
|
13 |
last_model = None
|
|
|
17 |
with open(output_file, 'w', encoding='utf-8') as f:
|
18 |
f.write(subtitle)
|
19 |
|
|
|
20 |
def create_pipe(model, flash):
|
21 |
if torch.cuda.is_available():
|
22 |
device = "cuda:0"
|