Commit
·
6797f10
1
Parent(s):
3725bfb
chore: Update flash-attn dependency in app.py and requirements.txt
Browse files- app.py +2 -2
- requirements.txt +0 -1
app.py
CHANGED
|
@@ -6,8 +6,8 @@ from transformers import AutoModelForCausalLM, AutoTokenizer, TextIteratorStream
|
|
| 6 |
import torch
|
| 7 |
import gradio as gr
|
| 8 |
from threading import Thread
|
| 9 |
-
|
| 10 |
-
|
| 11 |
|
| 12 |
models_available = [
|
| 13 |
"MohamedRashad/Arabic-Orpo-Llama-3-8B-Instruct",
|
|
|
|
| 6 |
import torch
|
| 7 |
import gradio as gr
|
| 8 |
from threading import Thread
|
| 9 |
+
import subprocess
|
| 10 |
+
subprocess.run('pip install -U flash-attn', shell=True)
|
| 11 |
|
| 12 |
models_available = [
|
| 13 |
"MohamedRashad/Arabic-Orpo-Llama-3-8B-Instruct",
|
requirements.txt
CHANGED
|
@@ -2,5 +2,4 @@ transformers==4.44.1
|
|
| 2 |
torch
|
| 3 |
accelerate==0.33.0
|
| 4 |
sentencepiece==0.2.0
|
| 5 |
-
flash-attn==2.6.3
|
| 6 |
spaces
|
|
|
|
| 2 |
torch
|
| 3 |
accelerate==0.33.0
|
| 4 |
sentencepiece==0.2.0
|
|
|
|
| 5 |
spaces
|