Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -210,7 +210,7 @@ EXAMPLES = [
|
|
210 |
|
211 |
|
212 |
# Definir la función simple_chat
|
213 |
-
@spaces.GPU()
|
214 |
async def simple_chat(message: dict, temperature: float = 0.8, max_length: int = 4096, top_p: float = 1, top_k: int = 10, penalty: float = 1.0):
|
215 |
# Cargar el modelo preentrenado
|
216 |
model = AutoModelForCausalLM.from_pretrained(
|
@@ -220,6 +220,8 @@ async def simple_chat(message: dict, temperature: float = 0.8, max_length: int =
|
|
220 |
trust_remote_code=True
|
221 |
)
|
222 |
|
|
|
|
|
223 |
conversation = []
|
224 |
|
225 |
if "file" in message and message["file"]:
|
|
|
210 |
|
211 |
|
212 |
# Definir la función simple_chat
|
213 |
+
# @spaces.GPU()
|
214 |
async def simple_chat(message: dict, temperature: float = 0.8, max_length: int = 4096, top_p: float = 1, top_k: int = 10, penalty: float = 1.0):
|
215 |
# Cargar el modelo preentrenado
|
216 |
model = AutoModelForCausalLM.from_pretrained(
|
|
|
220 |
trust_remote_code=True
|
221 |
)
|
222 |
|
223 |
+
model.to('cuda')
|
224 |
+
|
225 |
conversation = []
|
226 |
|
227 |
if "file" in message and message["file"]:
|