Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -7,13 +7,14 @@ tokenizer = AutoTokenizer.from_pretrained("MarkAdamsMSBA24/ADRv2024")
|
|
| 7 |
model = AutoModelForSequenceClassification.from_pretrained("MarkAdamsMSBA24/ADRv2024")
|
| 8 |
|
| 9 |
# Define the prediction function
|
|
|
|
| 10 |
def get_prediction(text):
|
| 11 |
-
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
|
| 15 |
-
|
| 16 |
-
return
|
| 17 |
|
| 18 |
iface = gr.Interface(
|
| 19 |
fn=get_prediction,
|
|
|
|
| 7 |
model = AutoModelForSequenceClassification.from_pretrained("MarkAdamsMSBA24/ADRv2024")
|
| 8 |
|
| 9 |
# Define the prediction function
|
| 10 |
+
|
| 11 |
def get_prediction(text):
|
| 12 |
+
X_test = str(text).lower()
|
| 13 |
+
encoded_input = tokenizer(X_test, return_tensors='pt')
|
| 14 |
+
output = model(**encoded_input)
|
| 15 |
+
scores = output[0][0].detach()
|
| 16 |
+
scores = torch.nn.functional.softmax(scores)
|
| 17 |
+
return {"Severe Reaction": float(scores.numpy()[1]), "Non-severe Reaction": float(scores.numpy()[0])}
|
| 18 |
|
| 19 |
iface = gr.Interface(
|
| 20 |
fn=get_prediction,
|