import gradio as gr from transformers import AutoTokenizer, AutoModelForCausalLM # Load your model from Hugging Face Transformers model_name = "deepseek-ai/DeepSeek-V2-Lite" tokenizer = AutoTokenizer.from_pretrained(model_name) model = AutoModelForCausalLM.from_pretrained(model_name) # Define a function to use the model def math_inference(input_text): inputs = tokenizer(input_text, return_tensors="pt") output = model.generate(**inputs) response = tokenizer.decode(output[0], skip_special_tokens=True) return response # Create a Gradio interface iface = gr.Interface( fn=math_inference, inputs=gr.Textbox(prompt="Input math question"), outputs=gr.Textbox(prompt="Math answer"), layout="vertical", title="Math Solver" ) # Launch the Gradio interface iface.launch()