|
import gradio as gr |
|
from transformers import GPT2LMHeadModel, GPT2Tokenizer |
|
import torch |
|
|
|
|
|
device = torch.device("cpu") |
|
|
|
|
|
print("Cargando modelo code-autocomplete-gpt2-base...") |
|
model_name = "shibing624/code-autocomplete-gpt2-base" |
|
tokenizer = GPT2Tokenizer.from_pretrained(model_name) |
|
model = GPT2LMHeadModel.from_pretrained(model_name) |
|
|
|
|
|
model.to(device) |
|
model.eval() |
|
|
|
|
|
if tokenizer.pad_token is None: |
|
tokenizer.pad_token = tokenizer.eos_token |
|
|
|
def autocomplete_text(input_text, max_tokens=20): |
|
""" |
|
Autocompleta el texto/c贸digo de entrada usando code-autocomplete-gpt2-base |
|
|
|
Args: |
|
input_text (str): Texto/c贸digo inicial a completar |
|
max_tokens (int): N煤mero m谩ximo de tokens a generar |
|
|
|
Returns: |
|
str: Solo la parte nueva generada (sin el input original) |
|
""" |
|
if not input_text.strip(): |
|
return "Por favor, ingresa alg煤n texto para completar." |
|
|
|
try: |
|
|
|
inputs = tokenizer.encode(input_text, return_tensors="pt", padding=True) |
|
inputs = inputs.to(device) |
|
|
|
|
|
with torch.no_grad(): |
|
outputs = model.generate( |
|
inputs, |
|
max_new_tokens=max_tokens, |
|
num_return_sequences=1, |
|
temperature=0.7, |
|
do_sample=True, |
|
pad_token_id=tokenizer.eos_token_id, |
|
eos_token_id=tokenizer.eos_token_id, |
|
attention_mask=torch.ones_like(inputs) |
|
) |
|
|
|
|
|
generated_text = tokenizer.decode(outputs[0], skip_special_tokens=True) |
|
|
|
|
|
new_text = generated_text[len(input_text):].strip() |
|
|
|
if not new_text: |
|
return "No se pudo generar texto adicional." |
|
|
|
return new_text |
|
|
|
except Exception as e: |
|
return f"Error al generar texto: {str(e)}" |
|
|
|
def create_autocomplete_interface(): |
|
""" |
|
Crea la interfaz de autocompletar dentro de gr.Blocks() |
|
""" |
|
|
|
with gr.Blocks(title="Autocompletar C贸digo") as demo: |
|
|
|
gr.Markdown("# 馃 Autocompletar C贸digo") |
|
gr.Markdown("Escribe el inicio de tu c贸digo y la IA lo completar谩 por ti.") |
|
|
|
with gr.Tab("Autocompletar"): |
|
with gr.Row(): |
|
with gr.Column(): |
|
input_textbox = gr.Textbox( |
|
label="C贸digo a completar", |
|
placeholder="def fibonacci(n):", |
|
lines=5, |
|
max_lines=10 |
|
) |
|
|
|
generate_btn = gr.Button("Completar C贸digo", variant="primary") |
|
|
|
with gr.Column(): |
|
output_textbox = gr.Textbox( |
|
label="C贸digo generado", |
|
placeholder="Aqu铆 aparecer谩 la continuaci贸n del c贸digo...", |
|
lines=5, |
|
max_lines=10, |
|
interactive=False |
|
) |
|
|
|
|
|
generate_btn.click( |
|
fn=autocomplete_text, |
|
inputs=[input_textbox], |
|
outputs=[output_textbox] |
|
) |
|
|
|
|
|
input_textbox.submit( |
|
fn=autocomplete_text, |
|
inputs=[input_textbox], |
|
outputs=[output_textbox] |
|
) |
|
|
|
|
|
with gr.Tab("Ejemplos"): |
|
gr.Markdown(""" |
|
### Ejemplos de uso: |
|
|
|
**Entrada:** "def fibonacci(n):" |
|
**Salida:** "\\n if n <= 1:\\n return n\\n return fibonacci(n-1) + fibonacci(n-2)" |
|
|
|
**Entrada:** "for i in range(" |
|
**Salida:** "10):\\n print(i)" |
|
|
|
**Entrada:** "import pandas as pd\\ndf = pd.read_csv(" |
|
**Salida:** "'data.csv')\\nprint(df.head())" |
|
|
|
**Entrada:** "class Calculator:" |
|
**Salida:** "\\n def __init__(self):\\n pass" |
|
""") |
|
|
|
return demo |
|
|
|
|
|
if __name__ == "__main__": |
|
print("Iniciando aplicaci贸n de autocompletar c贸digo...") |
|
|
|
|
|
app = create_autocomplete_interface() |
|
|
|
|
|
app.launch( |
|
share=False, |
|
server_name="0.0.0.0", |
|
server_port=7860, |
|
show_error=True, |
|
debug=False |
|
) |