Spaces:
Running
Running
import gradio as gr | |
from transformers import AutoTokenizer, AutoModelForCausalLM | |
import torch | |
model_name = "deepseek-ai/deepseek-coder-1.3b-base" | |
tokenizer = AutoTokenizer.from_pretrained(model_name) | |
model = AutoModelForCausalLM.from_pretrained(model_name) | |
def generate_code(prompt): | |
inputs = tokenizer(prompt, return_tensors="pt").to(model.device) | |
outputs = model.generate( | |
**inputs, | |
max_new_tokens=300, | |
pad_token_id=tokenizer.eos_token_id | |
) | |
return tokenizer.decode(outputs[0], skip_special_tokens=True) | |
examples = [ | |
"Create a Python function to reverse a string.", | |
"Write a JavaScript function that returns the factorial of a number.", | |
"Build a simple HTML page with a form and a submit button.", | |
"Create a Python script to fetch weather data using an API." | |
] | |
with gr.Blocks() as demo: | |
gr.Markdown("## 💻 Generate Code with DeepSeek") | |
prompt = gr.Textbox(label="Enter your prompt", lines=4, scale=2) | |
output = gr.Textbox(label="Generated code", lines=10) | |
gen_button = gr.Button("Generate") | |
gen_button.click(fn=generate_code, inputs=prompt, outputs=output) | |
gr.Examples(examples=examples, inputs=prompt) | |
demo.launch() |