Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,5 +1,6 @@
|
|
1 |
import gradio as gr
|
2 |
-
from transformers import pipeline
|
|
|
3 |
import os
|
4 |
|
5 |
# Retrieve Hugging Face authentication token from environment variables
|
@@ -22,8 +23,17 @@ APP_CITATION = """
|
|
22 |
For citation, please refer to the tool's documentation.
|
23 |
"""
|
24 |
|
|
|
|
|
|
|
|
|
|
|
|
|
25 |
# Functions for model-based tasks
|
26 |
def perform_binary_classification(input_text, selected_model):
|
|
|
|
|
|
|
27 |
model = pipeline(model=f'gokceuludogan/{selected_model}')
|
28 |
return model(input_text)[0]
|
29 |
|
|
|
1 |
import gradio as gr
|
2 |
+
from transformers import pipeline, AutoTokenizer
|
3 |
+
from turkish_lm_tuner import T5ForClassification
|
4 |
import os
|
5 |
|
6 |
# Retrieve Hugging Face authentication token from environment variables
|
|
|
23 |
For citation, please refer to the tool's documentation.
|
24 |
"""
|
25 |
|
26 |
+
def inference_t5(input_text, selected_model):
|
27 |
+
model = T5ForClassification.from_pretrained("gokceuludogan/turna_tr_hateprint_w0.1_new_") #_b128")
|
28 |
+
tokenizer = AutoTokenizer.from_pretrained("gokceuludogan/turna_tr_hateprint_w0.1_new_") #_b128")
|
29 |
+
return model(**tokenizer(test_texts, return_tensors='pt')).logits
|
30 |
+
|
31 |
+
|
32 |
# Functions for model-based tasks
|
33 |
def perform_binary_classification(input_text, selected_model):
|
34 |
+
if 'turna' in selected_model:
|
35 |
+
return inference_t5(input_text, selected_model)
|
36 |
+
|
37 |
model = pipeline(model=f'gokceuludogan/{selected_model}')
|
38 |
return model(input_text)[0]
|
39 |
|