|
""" |
|
AnyCoder / Shasha AI – Gradio back‑end |
|
|
|
• Serves the custom front‑end shipped in index.html (+ static/style.css & static/index.js). |
|
• Exposes one JSON endpoint (`POST /run/predict`) that the JS front‑end |
|
calls to run model inference. |
|
""" |
|
|
|
from pathlib import Path |
|
from typing import List, Tuple |
|
|
|
import gradio as gr |
|
|
|
|
|
from inference import chat_completion |
|
from tavily_search import enhance_query_with_search |
|
from deploy import send_to_sandbox |
|
from models import AVAILABLE_MODELS, find_model, ModelInfo |
|
from utils import ( |
|
extract_text_from_file, |
|
extract_website_content, |
|
history_to_messages, |
|
history_to_chatbot_messages, |
|
apply_search_replace_changes, |
|
remove_code_block, |
|
parse_transformers_js_output, |
|
format_transformers_js_output, |
|
) |
|
|
|
|
|
SYSTEM_PROMPTS = { |
|
"html": ( |
|
"ONLY USE HTML, CSS AND JAVASCRIPT. Return ONE html file " |
|
"wrapped in ```html ...```." |
|
), |
|
"transformers.js": ( |
|
"Generate THREE separate files (index.html / index.js / style.css) " |
|
"as three fenced blocks." |
|
), |
|
} |
|
History = List[Tuple[str, str]] |
|
|
|
|
|
def generate( |
|
prompt: str, |
|
file_path: str | None, |
|
website_url: str | None, |
|
model_id: str, |
|
language: str, |
|
enable_search: bool, |
|
history: History | None, |
|
) -> Tuple[str, History]: |
|
"""Called by the JS front‑end via POST /run/predict.""" |
|
history = history or [] |
|
|
|
|
|
system_prompt = SYSTEM_PROMPTS.get(language, f"You are an expert {language} developer.") |
|
messages = history_to_messages(history, system_prompt) |
|
|
|
ctx_parts: list[str] = [prompt.strip()] |
|
|
|
if file_path: |
|
ctx_parts.append("[File]") |
|
ctx_parts.append(extract_text_from_file(file_path)[:5000]) |
|
if website_url: |
|
site_html = extract_website_content(website_url) |
|
if not site_html.startswith("Error"): |
|
ctx_parts.append("[Website]") |
|
ctx_parts.append(site_html[:8000]) |
|
|
|
user_query = "\n\n".join(filter(None, ctx_parts)) |
|
user_query = enhance_query_with_search(user_query, enable_search) |
|
messages.append({"role": "user", "content": user_query}) |
|
|
|
|
|
model: ModelInfo = find_model(model_id) or AVAILABLE_MODELS[0] |
|
answer = chat_completion(model.id, messages) |
|
|
|
|
|
if language == "transformers.js": |
|
files = parse_transformers_js_output(answer) |
|
code = format_transformers_js_output(files) |
|
else: |
|
cleaned = remove_code_block(answer) |
|
if history and not history[-1][1].startswith("❌"): |
|
cleaned = apply_search_replace_changes(history[-1][1], cleaned) |
|
code = cleaned |
|
|
|
history.append((prompt, code)) |
|
return code, history |
|
|
|
|
|
HTML_SOURCE = Path("index.html").read_text(encoding="utf‑8") |
|
|
|
|
|
with gr.Blocks(css="body{margin:0}", title="AnyCoder AI") as demo: |
|
|
|
gr.HTML(HTML_SOURCE) |
|
|
|
|
|
with gr.Group(visible=False) as api: |
|
prompt_in = gr.Textbox() |
|
file_in = gr.File() |
|
url_in = gr.Textbox() |
|
model_in = gr.Textbox() |
|
lang_in = gr.Textbox() |
|
search_in = gr.Checkbox() |
|
hist_state = gr.State([]) |
|
|
|
code_out, hist_out = gr.Textbox(), gr.State([]) |
|
|
|
|
|
api_btn = gr.Button(visible=False) |
|
api_btn.click( |
|
fn=generate, |
|
inputs=[ |
|
prompt_in, file_in, url_in, |
|
model_in, lang_in, search_in, hist_state |
|
], |
|
outputs=[code_out, hist_out], |
|
api_name="predict", |
|
) |
|
|
|
if __name__ == "__main__": |
|
demo.queue().launch() |
|
|