|
import gradio as gr |
|
from transformers import AutoTokenizer, AutoModelForCausalLM, set_seed, pipeline |
|
|
|
|
|
title = "Python Code Generator" |
|
description = "This is a space to convert English text to Python code using the [codeparrot-small-text-to-code](https://huggingface.co/codeparrot/codeparrot-small-text-to-code) model, a pre-trained Python code generation model trained on a dataset of docstrings and Python code extracted from Jupyter notebooks available at [github-jupyter-text](https://huggingface.co/datasets/codeparrot/github-jupyter-text)." |
|
example = [ |
|
["Utility function to calculate the precision of predictions using sklearn metrics", 65, 0.6, 42], |
|
["Let's implement a function that calculates the size of a file called filepath", 60, 0.6, 42], |
|
["Let's implement the Bubble Sort sorting algorithm in an auxiliary function:", 87, 0.6, 42], |
|
["Function to calculate the nth Fibonacci number.", 65, 0.6, 42], |
|
["Function to calculate the factorial of a number.", 65, 0.6, 42], |
|
["Function to reverse a string.", 65, 0.6, 42], |
|
["Function to check if a number is prime.", 65, 0.6, 42], |
|
["Function to generate the Fibonacci sequence up to the nth term.", 65, 0.6, 42], |
|
["Function to generate the factorial sequence up to the nth term.", 65, 0.6, 42], |
|
] |
|
|
|
|
|
|
|
tokenizer = AutoTokenizer.from_pretrained("codeparrot/codeparrot-small-text-to-code") |
|
model = AutoModelForCausalLM.from_pretrained("codeparrot/codeparrot-small-text-to-code") |
|
|
|
def create_docstring(gen_prompt): |
|
return "\"\"\"\n" + gen_prompt + "\n\"\"\"\n\n" |
|
|
|
def generate_code(gen_prompt, max_tokens, temperature=0.6, seed=42): |
|
set_seed(seed) |
|
pipe = pipeline("text-generation", model=model, tokenizer=tokenizer) |
|
prompt = create_docstring(gen_prompt) |
|
generated_text = pipe(prompt, do_sample=True, top_p=0.95, temperature=temperature, max_new_tokens=max_tokens)[0]['generated_text'] |
|
return generated_text |
|
|
|
def save_to_text_file(output_text): |
|
with open("generated_code.txt", "w") as file: |
|
file.write(output_text) |
|
|
|
iface = gr.Interface( |
|
fn=generate_code, |
|
inputs=[ |
|
gr.Textbox(label="English instructions", placeholder="Enter English instructions..."), |
|
gr.inputs.Slider( |
|
minimum=8, |
|
maximum=256, |
|
step=1, |
|
default=8, |
|
label="Number of tokens to generate", |
|
), |
|
gr.inputs.Slider( |
|
minimum=0, |
|
maximum=2.5, |
|
step=0.1, |
|
default=0.6, |
|
label="Temperature", |
|
), |
|
gr.inputs.Slider( |
|
minimum=0, |
|
maximum=1000, |
|
step=1, |
|
default=42, |
|
label="Random seed for generation" |
|
) |
|
], |
|
outputs=gr.Code(label="Generated Python code", language="python", lines=10), |
|
examples=example, |
|
layout="horizontal", |
|
theme="peach", |
|
description=description, |
|
title=title |
|
) |
|
iface.launch() |
|
|