Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
@@ -18,7 +18,7 @@ infer_prompt = """Li jêr rêwerzek heye ku peywirek rave dike, bi têketinek ku
|
|
18 |
"""
|
19 |
|
20 |
snapshot_download("nazimali/Mistral-Nemo-Kurdish")
|
21 |
-
snapshot_download(repo_id=model_id)
|
22 |
|
23 |
|
24 |
@spaces.GPU
|
@@ -52,7 +52,7 @@ def respond(
|
|
52 |
return_tensors="pt",
|
53 |
add_special_tokens=False,
|
54 |
return_token_type_ids=False,
|
55 |
-
)
|
56 |
|
57 |
with torch.inference_mode():
|
58 |
generated_ids = model.generate(
|
|
|
18 |
"""
|
19 |
|
20 |
snapshot_download("nazimali/Mistral-Nemo-Kurdish")
|
21 |
+
snapshot_download(repo_id=model_id, ignore_patterns=["*.gguf"])
|
22 |
|
23 |
|
24 |
@spaces.GPU
|
|
|
52 |
return_tensors="pt",
|
53 |
add_special_tokens=False,
|
54 |
return_token_type_ids=False,
|
55 |
+
).to("cuda")
|
56 |
|
57 |
with torch.inference_mode():
|
58 |
generated_ids = model.generate(
|