Spaces:
Sleeping
Sleeping
add tonic tokenizer
Browse files- tasks/text.py +10 -5
tasks/text.py
CHANGED
@@ -60,17 +60,22 @@ async def evaluate_text(request: TextEvaluationRequest):
|
|
60 |
|
61 |
# Model and tokenizer paths
|
62 |
path_model = 'Tonic/climate-guard-toxic-agent'
|
63 |
-
path_tokenizer = "
|
64 |
|
65 |
-
# Load tokenizer
|
66 |
tokenizer = AutoTokenizer.from_pretrained(path_tokenizer)
|
|
|
|
|
67 |
model = AutoModelForSequenceClassification.from_pretrained(
|
68 |
path_model,
|
69 |
trust_remote_code=True,
|
70 |
-
|
|
|
|
|
71 |
).to(device)
|
72 |
|
73 |
-
#
|
|
|
74 |
model.eval()
|
75 |
|
76 |
# Preprocess function
|
@@ -78,7 +83,6 @@ async def evaluate_text(request: TextEvaluationRequest):
|
|
78 |
return tokenizer(
|
79 |
examples["quote"],
|
80 |
truncation=True,
|
81 |
-
padding=True,
|
82 |
return_tensors=None
|
83 |
)
|
84 |
|
@@ -124,6 +128,7 @@ async def evaluate_text(request: TextEvaluationRequest):
|
|
124 |
print(f"Error during model inference: {str(e)}")
|
125 |
raise
|
126 |
|
|
|
127 |
#--------------------------------------------------------------------------------------------
|
128 |
# MODEL INFERENCE ENDS HERE
|
129 |
#--------------------------------------------------------------------------------------------
|
|
|
60 |
|
61 |
# Model and tokenizer paths
|
62 |
path_model = 'Tonic/climate-guard-toxic-agent'
|
63 |
+
path_tokenizer = "Tonic/climate-guard-toxic-agent"
|
64 |
|
65 |
+
# Load tokenizer
|
66 |
tokenizer = AutoTokenizer.from_pretrained(path_tokenizer)
|
67 |
+
|
68 |
+
# Load model
|
69 |
model = AutoModelForSequenceClassification.from_pretrained(
|
70 |
path_model,
|
71 |
trust_remote_code=True,
|
72 |
+
num_labels=8,
|
73 |
+
problem_type="single_label_classification",
|
74 |
+
ignore_mismatched_sizes=True
|
75 |
).to(device)
|
76 |
|
77 |
+
# Convert to half precision and eval mode
|
78 |
+
model = model.half()
|
79 |
model.eval()
|
80 |
|
81 |
# Preprocess function
|
|
|
83 |
return tokenizer(
|
84 |
examples["quote"],
|
85 |
truncation=True,
|
|
|
86 |
return_tensors=None
|
87 |
)
|
88 |
|
|
|
128 |
print(f"Error during model inference: {str(e)}")
|
129 |
raise
|
130 |
|
131 |
+
|
132 |
#--------------------------------------------------------------------------------------------
|
133 |
# MODEL INFERENCE ENDS HERE
|
134 |
#--------------------------------------------------------------------------------------------
|