Commit
·
ae08a8d
1
Parent(s):
0df179d
update
Browse files- app.py +6 -3
- requirements.txt +1 -0
app.py
CHANGED
@@ -4,7 +4,9 @@ import torch
|
|
4 |
|
5 |
# Model loading and setup
|
6 |
model_name = "jhu-clsp/FollowIR-7B"
|
7 |
-
model = AutoModelForCausalLM.from_pretrained(model_name)
|
|
|
|
|
8 |
tokenizer = AutoTokenizer.from_pretrained(model_name, padding_side="left")
|
9 |
tokenizer.pad_token = tokenizer.eos_token
|
10 |
tokenizer.padding_side = "left"
|
@@ -29,8 +31,9 @@ def check_relevance(query, instruction, passage):
|
|
29 |
pad_to_multiple_of=None,
|
30 |
)
|
31 |
|
32 |
-
|
33 |
-
|
|
|
34 |
|
35 |
batch_scores = model(**tokens).logits[:, -1, :]
|
36 |
true_vector = batch_scores[:, token_true_id]
|
|
|
4 |
|
5 |
# Model loading and setup
|
6 |
model_name = "jhu-clsp/FollowIR-7B"
|
7 |
+
model = AutoModelForCausalLM.from_pretrained(model_name)
|
8 |
+
if torch.cuda.is_available():
|
9 |
+
model = model.cuda()
|
10 |
tokenizer = AutoTokenizer.from_pretrained(model_name, padding_side="left")
|
11 |
tokenizer.pad_token = tokenizer.eos_token
|
12 |
tokenizer.padding_side = "left"
|
|
|
31 |
pad_to_multiple_of=None,
|
32 |
)
|
33 |
|
34 |
+
if torch.cuda.is_available():
|
35 |
+
for key in tokens:
|
36 |
+
tokens[key] = tokens[key].cuda()
|
37 |
|
38 |
batch_scores = model(**tokens).logits[:, -1, :]
|
39 |
true_vector = batch_scores[:, token_true_id]
|
requirements.txt
CHANGED
@@ -1,3 +1,4 @@
|
|
1 |
gradio
|
|
|
2 |
torch
|
3 |
transformers
|
|
|
1 |
gradio
|
2 |
+
--extra-index-url https://download.pytorch.org/whl/cu117
|
3 |
torch
|
4 |
transformers
|