File size: 1,343 Bytes
b0e6d60 9989364 b0e39c2 9989364 b0e6d60 f3d041b 9989364 b0e39c2 b0e6d60 9989364 b0e6d60 9989364 b0e6d60 3cd2ead 9989364 24e4297 b0e6d60 dc47816 b0e6d60 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 |
import gradio as gr
from transformers import AutoTokenizer
from transformers.utils import logging
# Enable logging to see debug messages
logging.set_verbosity_info()
# Import custom configuration and model classes
from transformers_modules.deepseek_ai.DeepSeek_R1.configuration_deepseek import DeepseekV3Config
from transformers_modules.deepseek_ai.DeepSeek_R1.modeling_deepseek import DeepseekV3Model
# Load model and tokenizer
model_name = "deepseek-ai/DeepSeek-R1"
tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
config = DeepseekV3Config.from_pretrained(model_name, trust_remote_code=True)
model = DeepseekV3Model.from_pretrained(model_name, config=config, trust_remote_code=True)
def classify_text(input_text):
# Tokenize input
inputs = tokenizer(input_text, return_tensors="pt")
# Get model output
outputs = model(**inputs)
probabilities = outputs.logits.softmax(dim=-1).detach().numpy()
return {f"Class {i}": prob for i, prob in enumerate(probabilities[0])}
# Create Gradio interface
interface = gr.Interface(
fn=classify_text,
inputs=gr.Textbox(label="Enter Text"),
outputs=gr.Label(label="Class Probabilities"),
title="DeepSeek-R1 Text Classification",
description="A text classification app powered by DeepSeek-R1."
)
# Launch the app
interface.launch() |