builder / app.py
mgbam's picture
Update app.py
49d4630 verified
raw
history blame
7.37 kB
"""
app.py – AnyCoder AI (Gradio)
* Logo: assets/logo.png
* Models: full list from constants.AVAILABLE_MODELS
* No height= arg on gr.Code (Gradio ≥5)
"""
from __future__ import annotations
import gradio as gr
from typing import List, Tuple, Dict, Optional
# ── local modules ──────────────────────────────────────────────────────────
from constants import (
HTML_SYSTEM_PROMPT, HTML_SYSTEM_PROMPT_WITH_SEARCH,
TRANSFORMERS_JS_SYSTEM_PROMPT, TRANSFORMERS_JS_SYSTEM_PROMPT_WITH_SEARCH,
GENERIC_SYSTEM_PROMPT, GENERIC_SYSTEM_PROMPT_WITH_SEARCH,
TransformersJSFollowUpSystemPrompt, FollowUpSystemPrompt,
AVAILABLE_MODELS, DEMO_LIST, get_gradio_language,
)
from hf_client import get_inference_client
from tavily_search import enhance_query_with_search
from utils import (
extract_text_from_file, extract_website_content,
history_to_messages, history_to_chatbot_messages,
remove_code_block, parse_transformers_js_output,
format_transformers_js_output,
)
from search_replace import ( # <-- moved here
apply_search_replace_changes,
apply_transformers_js_search_replace_changes,
)
from deploy import send_to_sandbox
# ── aliases ────────────────────────────────────────────────────────────────
History = List[Tuple[str, str]]
Model = Dict[str, str]
# ── code generation core ───────────────────────────────────────────────────
def generate_code(
prompt: str,
file_path: Optional[str],
website_url: Optional[str],
model: Model,
language: str,
enable_search: bool,
history: Optional[History],
):
history = history or []
prompt = prompt or ""
# choose system prompt
if history:
system_prompt = (
TransformersJSFollowUpSystemPrompt if language == "transformers.js"
else FollowUpSystemPrompt
)
else:
if language == "html":
system_prompt = HTML_SYSTEM_PROMPT_WITH_SEARCH if enable_search else HTML_SYSTEM_PROMPT
elif language == "transformers.js":
system_prompt = TRANSFORMERS_JS_SYSTEM_PROMPT_WITH_SEARCH if enable_search else TRANSFORMERS_JS_SYSTEM_PROMPT
else:
system_prompt = (
GENERIC_SYSTEM_PROMPT_WITH_SEARCH.format(language=language)
if enable_search else GENERIC_SYSTEM_PROMPT.format(language=language)
)
messages = history_to_messages(history, system_prompt)
# attach context
if file_path:
prompt += f"\n\n[File]\n{extract_text_from_file(file_path)[:5000]}"
if website_url:
prompt += f"\n\n[Website]\n{extract_website_content(website_url)[:8000]}"
messages.append({"role": "user", "content": enhance_query_with_search(prompt, enable_search)})
# call model
client = get_inference_client(model["id"])
try:
resp = client.chat.completions.create(
model=model["id"],
messages=messages,
max_tokens=16000,
temperature=0.1,
)
answer = resp.choices[0].message.content
except Exception as e:
err = f"❌ **Error:**\n```\n{e}\n```"
history.append((prompt, err))
return err, history, "", history_to_chatbot_messages(history)
# post‑process
if language == "transformers.js":
files = parse_transformers_js_output(answer)
code = format_transformers_js_output(files)
preview = send_to_sandbox(files["index.html"]) if files["index.html"] else ""
else:
clean = remove_code_block(answer)
if history and not history[-1][1].startswith("❌"):
clean = apply_search_replace_changes(history[-1][1], clean)
code = clean
preview = send_to_sandbox(code) if language == "html" else ""
history.append((prompt, code))
chat_msgs = history_to_chatbot_messages(history)
return code, history, preview, chat_msgs
# ── UI ─────────────────────────────────────────────────────────────────────
theme = gr.themes.Base(primary_hue="indigo", font="Inter")
with gr.Blocks(theme=theme, title="AnyCoder AI") as demo:
st_hist = gr.State([])
st_model = gr.State(AVAILABLE_MODELS[0])
# header with logo
gr.HTML(
'<div style="text-align:center;margin:1rem 0;">'
'<img src="assets/logo.png" alt="logo" style="width:120px;"><br>'
'<h1 style="margin:0.4rem 0 0">AnyCoder AI</h1>'
'<p style="color:#555">Your AI partner for generating, modifying &amp; understanding code.</p>'
'</div>'
)
with gr.Row():
with gr.Column(scale=1):
gr.Markdown("### 1 · Model")
dd_model = gr.Dropdown([m["name"] for m in AVAILABLE_MODELS],
value=AVAILABLE_MODELS[0]["name"],
label="AI Model")
gr.Markdown("### 2 · Context")
with gr.Tabs():
with gr.Tab("Prompt"):
tb_prompt = gr.Textbox(lines=6, placeholder="Describe what you want…")
with gr.Tab("File"):
fi_file = gr.File()
with gr.Tab("Website"):
tb_url = gr.Textbox(placeholder="https://example.com")
gr.Markdown("### 3 · Output")
dd_lang = gr.Dropdown(
[l for l in get_gradio_language.__defaults__[0] if l], # supported list
value="html",
label="Target language"
)
cb_search = gr.Checkbox(label="Enable Tavily Web Search")
with gr.Row():
btn_clear = gr.Button("Clear", variant="secondary")
btn_gen = gr.Button("Generate Code", variant="primary")
with gr.Column(scale=2):
with gr.Tabs():
with gr.Tab("Code"):
code_out = gr.Code(language="html", lines=25)
with gr.Tab("Preview"):
html_prev = gr.HTML()
with gr.Tab("History"):
chat_out = gr.Chatbot(type="messages", height=400)
# quick demos
gr.Markdown("#### Quick Start")
with gr.Row():
for d in DEMO_LIST[:6]:
gr.Button(d["title"], size="sm").click(
lambda desc=d["description"]: desc, outputs=tb_prompt
)
# callbacks
dd_model.change(lambda n: next(m for m in AVAILABLE_MODELS if m["name"] == n),
dd_model, st_model)
btn_gen.click(
generate_code,
inputs=[tb_prompt, fi_file, tb_url, st_model, dd_lang, cb_search, st_hist],
outputs=[code_out, st_hist, html_prev, chat_out],
)
btn_clear.click(
lambda: ("", None, "", [], [], "", ""),
outputs=[tb_prompt, fi_file, tb_url, st_hist, chat_out, code_out, html_prev],
queue=False,
)
if __name__ == "__main__":
demo.launch()