import gradio as gr from transformers import AutoTokenizer, AutoModelForCausalLM import torch model_name = "deepseek-ai/deepseek-coder-1.3b-base" tokenizer = AutoTokenizer.from_pretrained(model_name) model = AutoModelForCausalLM.from_pretrained( model_name, torch_dtype=torch.float16, device_map="auto" ) def generate_code(prompt): inputs = tokenizer(prompt, return_tensors="pt").to(model.device) outputs = model.generate(**inputs, max_new_tokens=300, pad_token_id=tokenizer.eos_token_id) return tokenizer.decode(outputs[0], skip_special_tokens=True) examples = [ "Write a Python function to calculate the factorial of a number.", "Generate a React component for a simple to-do list.", "Create a SQL query to find the top 5 selling products.", "Write a JavaScript function to validate email addresses.", "Generate a CSS style for a responsive navbar." ] with gr.Blocks() as demo: gr.Markdown("## 💻 DeepSeek Code Generator") prompt = gr.Textbox(label="Enter your code description here", lines=3) output = gr.Textbox(label="Generated Code", lines=15) generate_btn = gr.Button("Generate Code") examples_box = gr.Column(visible=False) toggle_examples_btn = gr.Button("Show Examples") def generate_click(prompt_text): return generate_code(prompt_text) def toggle_examples(): return gr.update(visible=not examples_box.visible) generate_btn.click(fn=generate_click, inputs=prompt, outputs=output) toggle_examples_btn.click(fn=toggle_examples, inputs=None, outputs=examples_box) with examples_box: for ex in examples: ex_btn = gr.Button(ex, variant="secondary", size="sm") ex_btn.click(fn=lambda x=ex: x, inputs=None, outputs=prompt) demo.launch()