builder / app.py
mgbam's picture
Update app.py
2b7139c verified
raw
history blame
8.45 kB
# app.py
"""
Shasha /β€―AnyCoderΒ AI – Gradio front‑end + back‑end
Generate or refactor code with multiple foundation models.
Uploads, website scrape, optional Tavily search enrichment, live HTML preview.
"""
from __future__ import annotations
from typing import List, Tuple, Dict, Any, Optional
import gradio as gr
# ──────────────────────────────────────────────────────────────────────────────
# Local helpers (unchanged)
# ──────────────────────────────────────────────────────────────────────────────
from constants import (
HTML_SYSTEM_PROMPT,
TRANSFORMERS_JS_SYSTEM_PROMPT,
AVAILABLE_MODELS,
)
from hf_client import get_inference_client
from tavily_search import enhance_query_with_search
from utils import (
extract_text_from_file,
extract_website_content,
apply_search_replace_changes,
history_to_messages,
history_to_chatbot_messages,
remove_code_block,
parse_transformers_js_output,
format_transformers_js_output,
)
from deploy import send_to_sandbox
History = List[Tuple[str, str]]
SUPPORTED_LANGUAGES = [
"python", "c", "cpp", "markdown", "latex", "json", "html", "css",
"javascript", "jinja2", "typescript", "yaml", "dockerfile", "shell",
"r", "sql", "sql-msSQL", "sql-mySQL", "sql-mariaDB", "sql-sqlite",
"sql-cassandra", "sql-plSQL", "sql-hive", "sql-pgSQL", "sql-gql",
"sql-gpSQL", "sql-sparkSQL", "sql-esper",
]
# ──────────────────────────────────────────────────────────────────────────────
# Generation callback
# ──────────────────────────────────────────────────────────────────────────────
def generation_code(
query: Optional[str],
file_path: Optional[str],
website_url: Optional[str],
current_model: Dict[str, Any],
enable_search: bool,
language: str,
history: History | None,
) -> Tuple[str, History, str, List[Dict[str, str]]]:
"""Core back‑end that the β€œGenerateΒ Code” button calls."""
query = (query or "").strip()
history = history or []
# Pick proper system prompt
if language == "html":
system_prompt = HTML_SYSTEM_PROMPT
elif language == "transformers.js":
system_prompt = TRANSFORMERS_JS_SYSTEM_PROMPT
else:
system_prompt = (
f"You are an expert {language} developer. "
f"Write clean, idiomatic {language} based on the user request."
)
# Route to provider
model_id = current_model["id"]
if model_id.startswith(("openai/", "gpt-")):
provider = "openai"
elif model_id.startswith(("gemini/", "google/")):
provider = "gemini"
elif model_id.startswith("fireworks-ai/"):
provider = "fireworks-ai"
else:
provider = "auto"
# Build message list
messages = history_to_messages(history, system_prompt)
context_parts: List[str] = [query]
if file_path:
context_parts.append("[Attachedβ€―file]")
context_parts.append(extract_text_from_file(file_path)[:5_000])
if website_url:
html = extract_website_content(website_url)
if not html.startswith("Error"):
context_parts.append("[Websiteβ€―content]")
context_parts.append(html[:8_000])
user_msg = "\n\n".join(context_parts)
user_msg = enhance_query_with_search(user_msg, enable_search)
messages.append({"role": "user", "content": user_msg})
# Call model
try:
client = get_inference_client(model_id, provider)
resp = client.chat.completions.create(
model = model_id,
messages = messages,
max_tokens = 16_000,
temperature= 0.1,
)
answer = resp.choices[0].message.content
except Exception as exc:
err = f"❌ **Error**\n```\n{exc}\n```"
history.append((query, err))
return "", history, "", history_to_chatbot_messages(history)
# Post‑process
if language == "transformers.js":
files = parse_transformers_js_output(answer)
code = format_transformers_js_output(files)
preview = send_to_sandbox(files.get("index.html", ""))
else:
cleaned = remove_code_block(answer)
if history and not history[-1][1].startswith("❌"):
cleaned = apply_search_replace_changes(history[-1][1], cleaned)
code = cleaned
preview = send_to_sandbox(code) if language == "html" else ""
history.append((query, code))
chat_history = history_to_chatbot_messages(history)
return code, history, preview, chat_history
# ──────────────────────────────────────────────────────────────────────────────
# Gradio UI
# ──────────────────────────────────────────────────────────────────────────────
CUSTOM_CSS = """
body{font-family:-apple-system,BlinkMacSystemFont,"Segoe UI",Roboto,sans-serif;}
.gradio-container{background:#f9fafb;}
#gen-btn{box-shadow:0 2px 4px rgba(0,0,0,.08);}
"""
with gr.Blocks(theme=gr.themes.Soft(primary_hue="indigo"),
title="AnyCoderΒ AI",
css=CUSTOM_CSS) as demo:
state_history: gr.State = gr.State([])
state_model : gr.State = gr.State(AVAILABLE_MODELS[0])
gr.Markdown("## 🏒 **AnyCoderΒ AI**\nYour AI partner for generating, modifying  and  understanding code.",
elem_id="header")
# 1Β Β·Β MODEL
gr.Markdown("### 1Β Β·Β Model")
model_dd = gr.Dropdown(
choices=[m["name"] for m in AVAILABLE_MODELS],
value=AVAILABLE_MODELS[0]["name"],
label="AIΒ Model",
)
# 2Β Β·Β CONTEXT
gr.Markdown("### 2Β Β·Β Context")
with gr.Tabs():
with gr.Tab("Prompt"):
prompt_tb = gr.Textbox(lines=6, placeholder="Describe what you need…")
with gr.Tab("File"):
file_upl = gr.File(label="ReferenceΒ file")
import_btn= gr.Button("Importβ€―Project", variant="secondary")
import_btn.click(lambda: print("import clicked"), outputs=[])
with gr.Tab("Website"):
url_tb = gr.Textbox(placeholder="https://example.com")
# 3Β Β·Β OUTPUT
gr.Markdown("### 3Β Β·Β Output")
lang_dd = gr.Dropdown(SUPPORTED_LANGUAGES, value="html", label="TargetΒ Language")
search_ck = gr.Checkbox(label="Enable Tavily WebΒ Search")
with gr.Row():
clear_btn = gr.Button("Clear Session", variant="secondary")
gen_btn = gr.Button("GenerateΒ Code", elem_id="gen-btn")
# OUTPUT PANELS
with gr.Tabs():
with gr.Tab("Code"):
code_out = gr.Code(language="html", interactive=True, lines=25)
with gr.Tab("LiveΒ Preview"):
preview = gr.HTML()
with gr.Tab("History"):
chatbox = gr.Chatbot(type="messages")
# ── CALLBACKS ──────────────────────────────────────────────
model_dd.change(lambda n: next(m for m in AVAILABLE_MODELS if m["name"] == n),
inputs=model_dd, outputs=state_model)
gen_btn.click(
fn=generation_code,
inputs=[prompt_tb, file_upl, url_tb,
state_model, search_ck, lang_dd, state_history],
outputs=[code_out, state_history, preview, chatbox],
)
clear_btn.click(
lambda: ("", None, "", [], "", "", []),
outputs=[prompt_tb, file_upl, url_tb, state_history,
code_out, preview, chatbox],
queue=False,
)
if __name__ == "__main__":
demo.queue().launch()