CodeComment / app.py
Jatin112002's picture
Update app.py
225a5e1 verified
from transformers import AutoModelForCausalLM, AutoTokenizer
import gradio as gr
import torch
# Load a smaller model that fits within 16GB RAM
model_name = "deepseek-ai/deepseek-coder-1.3b-instruct"
# Load tokenizer
tokenizer = AutoTokenizer.from_pretrained(model_name)
# Load model in CPU-friendly format (low precision for efficiency)
model = AutoModelForCausalLM.from_pretrained(
model_name,
torch_dtype=torch.float32, # Use float32 since CPU-only
device_map="cpu" # Ensure it runs only on CPU
)
# Function to generate comments
def generate_code_comments(code_snippet):
prompt = f"### Code:\n{code_snippet}\n### Add meaningful comments to this code:\n"
inputs = tokenizer(prompt, return_tensors="pt", padding=True, truncation=True, max_length=512)
outputs = model.generate(**inputs, max_length=512)
commented_code = tokenizer.decode(outputs[0], skip_special_tokens=True)
return commented_code
# Create Gradio interface
iface = gr.Interface(
fn=generate_code_comments,
inputs="text",
outputs="text",
title="AI Code Comment Generator",
description="Enter a code snippet, and the AI will add meaningful comments.",
)
iface.launch()