Spaces:
Sleeping
Sleeping
token
Browse files
app.py
CHANGED
@@ -2,7 +2,7 @@ import torch
|
|
2 |
from transformers import AutoTokenizer, AutoModelForCausalLM, BitsAndBytesConfig
|
3 |
import gradio as gr
|
4 |
from peft import PeftModel
|
5 |
-
import
|
6 |
|
7 |
# Define the base model ID
|
8 |
base_model_id = "meta-llama/Llama-2-13b-hf"
|
@@ -38,9 +38,10 @@ def formatting_func(job_description):
|
|
38 |
text = f"### The job description: {job_description}\n ### The skills: "
|
39 |
return text
|
40 |
|
|
|
41 |
def generate_skills(job_description):
|
42 |
formatted_text = formatting_func(job_description)
|
43 |
-
model_input = tokenizer(formatted_text, return_tensors="pt").to("cuda")
|
44 |
|
45 |
ft_model.eval()
|
46 |
with torch.no_grad():
|
|
|
2 |
from transformers import AutoTokenizer, AutoModelForCausalLM, BitsAndBytesConfig
|
3 |
import gradio as gr
|
4 |
from peft import PeftModel
|
5 |
+
import spaces # Ensure spaces is imported
|
6 |
|
7 |
# Define the base model ID
|
8 |
base_model_id = "meta-llama/Llama-2-13b-hf"
|
|
|
38 |
text = f"### The job description: {job_description}\n ### The skills: "
|
39 |
return text
|
40 |
|
41 |
+
@spaces.GPU # Decorate the function to ensure it uses GPU
|
42 |
def generate_skills(job_description):
|
43 |
formatted_text = formatting_func(job_description)
|
44 |
+
model_input = tokenizer(formatted_text, return_tensors="pt").to("cuda") # Use CUDA for GPU support
|
45 |
|
46 |
ft_model.eval()
|
47 |
with torch.no_grad():
|