app.py
CHANGED
@@ -1,46 +1,32 @@
|
|
1 |
import gradio as gr
|
2 |
-
from transformers import
|
3 |
-
import torch
|
4 |
|
5 |
-
#
|
6 |
-
|
7 |
-
tokenizer = AutoTokenizer.from_pretrained(
|
8 |
-
model = AutoModelForSequenceClassification.from_pretrained(model_id)
|
9 |
|
10 |
-
#
|
11 |
-
|
12 |
-
model =
|
13 |
|
14 |
-
|
15 |
-
|
16 |
-
inputs = tokenizer(
|
17 |
-
|
18 |
-
|
19 |
-
|
20 |
-
|
21 |
-
|
22 |
-
with torch.no_grad():
|
23 |
-
outputs = model(**inputs)
|
24 |
-
predictions = torch.nn.functional.softmax(outputs.logits, dim=-1)
|
25 |
-
|
26 |
-
# Get prediction probabilities and labels
|
27 |
-
probs = predictions[0].tolist()
|
28 |
-
labels = model.config.id2label
|
29 |
-
|
30 |
-
# Create formatted output
|
31 |
-
results = {labels[i]: float(probs[i]) for i in range(len(probs))}
|
32 |
-
|
33 |
-
return results
|
34 |
|
35 |
-
#
|
36 |
-
|
37 |
-
fn=
|
38 |
-
inputs=
|
39 |
-
outputs=
|
40 |
-
title="
|
41 |
-
description=
|
42 |
-
examples=["Example text to try"]
|
43 |
)
|
44 |
|
45 |
-
# Launch the
|
46 |
-
|
|
|
|
1 |
import gradio as gr
|
2 |
+
from transformers import AutoTokenizer, AutoConfig, AutoModelForSequenceClassification
|
|
|
3 |
|
4 |
+
# Load model and tokenizer
|
5 |
+
model_name = "cheberle/autotrain-35swc-b4r9z"
|
6 |
+
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
|
|
7 |
|
8 |
+
# Explicitly define the model configuration if needed
|
9 |
+
config = AutoConfig.from_pretrained(model_name)
|
10 |
+
model = AutoModelForSequenceClassification.from_pretrained(model_name, config=config)
|
11 |
|
12 |
+
# Inference function
|
13 |
+
def classify_text(input_text):
|
14 |
+
inputs = tokenizer(input_text, return_tensors="pt", padding=True, truncation=True)
|
15 |
+
outputs = model(**inputs)
|
16 |
+
probabilities = outputs.logits.softmax(dim=-1).tolist()[0]
|
17 |
+
labels = {i: f"Label {i}" for i in range(len(probabilities))} # Define label mapping if needed
|
18 |
+
result = {labels[i]: prob for i, prob in enumerate(probabilities)}
|
19 |
+
return result
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
20 |
|
21 |
+
# Gradio interface
|
22 |
+
interface = gr.Interface(
|
23 |
+
fn=classify_text,
|
24 |
+
inputs="text",
|
25 |
+
outputs="label",
|
26 |
+
title="DeepSeek-R1 Text Classification",
|
27 |
+
description="Classify text inputs using the DeepSeek-R1 model."
|
|
|
28 |
)
|
29 |
|
30 |
+
# Launch the app
|
31 |
+
if __name__ == "__main__":
|
32 |
+
interface.launch()
|