builder / app.py
mgbam's picture
Update app.py
ea998ae verified
raw
history blame
3.77 kB
# app.py ── root of the repo
"""
AnyCoderΒ /Β ShashaΒ AI – Gradio back‑end
β€’ Hosts the custom HTML/JS/CSS in /static
β€’ Exposes POST /run/predict for the browser‑side fetch()
"""
from __future__ import annotations
from pathlib import Path
from typing import List, Tuple
import gradio as gr
from inference import chat_completion
from tavily_search import enhance_query_with_search
from models import AVAILABLE_MODELS, find_model, ModelInfo
from utils import (
extract_text_from_file, extract_website_content,
history_to_messages, history_to_chatbot_messages,
apply_search_replace_changes, remove_code_block,
parse_transformers_js_output, format_transformers_js_output,
)
SYSTEM_PROMPTS = {
"html": "ONLY USE HTML, CSS &β€―JS. Return ONE file wrapped in ```html```.",
"transformers.js":"Generate THREE files (index.html / index.js / style.css) as fenced blocks."
}
History = List[Tuple[str, str]]
# ─────────────────────────────────────────────────────────────────────────────
def generate(prompt:str,
file_path:str|None,
website_url:str|None,
model_id:str,
language:str,
enable_search:bool,
history:History|None) -> Tuple[str,History]:
"""Invoked by the JS front‑end."""
history = history or []
sys_prompt = SYSTEM_PROMPTS.get(language, f"You are an expert {language} developer.")
messages = history_to_messages(history, sys_prompt)
ctx: list[str] = [prompt.strip()]
if file_path:
ctx.append("[File]\n" + extract_text_from_file(file_path)[:5_000])
if website_url:
html = extract_website_content(website_url)
if not html.startswith("Error"):
ctx.append("[Website]\n" + html[:8_000])
user_q = "\n\n".join(filter(None, ctx))
user_q = enhance_query_with_search(user_q, enable_search)
messages.append({"role": "user", "content": user_q})
model: ModelInfo = find_model(model_id) or AVAILABLE_MODELS[0]
answer = chat_completion(model.id, messages)
if language == "transformers.js":
files = parse_transformers_js_output(answer)
code = format_transformers_js_output(files)
else:
cleaned = remove_code_block(answer)
if history and not history[-1][1].startswith("❌"):
cleaned = apply_search_replace_changes(history[-1][1], cleaned)
code = cleaned
history.append((prompt, code))
return code, history
# ─────────────────────────────────────────────────────────────────────────────
HTML_SOURCE = Path("static/index.html").read_text(encoding="utf-8")
with gr.Blocks(css="body{margin:0}", title="AnyCoderΒ AI") as demo:
gr.HTML(HTML_SOURCE) # the whole UI
# hidden I/O elements for the JS fetch()
with gr.Group(visible=False):
prompt_in = gr.Textbox()
file_in = gr.File()
url_in = gr.Textbox()
model_in = gr.Textbox()
lang_in = gr.Textbox()
search_in = gr.Checkbox()
hist_state = gr.State([])
code_out, hist_out = gr.Textbox(), gr.State([])
gr.Button(visible=False).click( # POST /run/predict
generate,
[prompt_in, file_in, url_in,
model_in, lang_in, search_in, hist_state],
[code_out, hist_out],
api_name="predict",
)
if __name__ == "__main__":
demo.queue().launch()