Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -3,7 +3,6 @@ import torch
|
|
3 |
import random
|
4 |
import transformers
|
5 |
from transformers import T5Tokenizer, T5ForConditionalGeneration
|
6 |
-
import spaces
|
7 |
|
8 |
if torch.cuda.is_available():
|
9 |
device = "cuda"
|
@@ -16,7 +15,6 @@ tokenizer = T5Tokenizer.from_pretrained("roborovski/superprompt-v1")
|
|
16 |
model = T5ForConditionalGeneration.from_pretrained("roborovski/superprompt-v1", torch_dtype=torch.float16)
|
17 |
model.to(device)
|
18 |
|
19 |
-
@spaces.GPU()
|
20 |
def generate(your_prompt, max_new_tokens, repetition_penalty, temperature, model_precision_type, top_p, top_k, seed):
|
21 |
if seed == 0:
|
22 |
seed = random.randint(1, 2**32-1)
|
|
|
3 |
import random
|
4 |
import transformers
|
5 |
from transformers import T5Tokenizer, T5ForConditionalGeneration
|
|
|
6 |
|
7 |
if torch.cuda.is_available():
|
8 |
device = "cuda"
|
|
|
15 |
model = T5ForConditionalGeneration.from_pretrained("roborovski/superprompt-v1", torch_dtype=torch.float16)
|
16 |
model.to(device)
|
17 |
|
|
|
18 |
def generate(your_prompt, max_new_tokens, repetition_penalty, temperature, model_precision_type, top_p, top_k, seed):
|
19 |
if seed == 0:
|
20 |
seed = random.randint(1, 2**32-1)
|