|
""" |
|
AnyCoder AI — static‑first UI wrapper |
|
Loads HTML/CSS/JS from the /static folder and exposes /run/predict for |
|
the front‑end to call. |
|
|
|
• static/index.html dark themed UI |
|
• static/style.css styles |
|
• static/index.js JS logic (model list, fetch /run/predict) |
|
|
|
Back‑end helpers (models.py, inference.py, plugins.py …) are unchanged. |
|
""" |
|
|
|
from pathlib import Path |
|
from typing import List, Tuple |
|
|
|
import gradio as gr |
|
|
|
|
|
from inference import chat_completion |
|
from tavily_search import enhance_query_with_search |
|
from utils import ( |
|
extract_text_from_file, |
|
extract_website_content, |
|
history_to_messages, |
|
history_to_chatbot_messages, |
|
apply_search_replace_changes, |
|
remove_code_block, |
|
parse_transformers_js_output, |
|
format_transformers_js_output, |
|
) |
|
from models import AVAILABLE_MODELS, find_model, ModelInfo |
|
|
|
|
|
SYSTEM_PROMPTS = { |
|
"html": ( |
|
"ONLY USE HTML, CSS AND JAVASCRIPT. Return **one** HTML file " |
|
"wrapped in ```html```." |
|
), |
|
"transformers.js": ( |
|
"Generate THREE fenced blocks: index.html, index.js, style.css." |
|
), |
|
} |
|
|
|
History = List[Tuple[str, str]] |
|
|
|
|
|
|
|
|
|
def generate( |
|
prompt: str, |
|
file_path: str | None, |
|
website_url: str | None, |
|
model_id: str, |
|
language: str, |
|
enable_search: bool, |
|
history: History | None, |
|
): |
|
history = history or [] |
|
|
|
|
|
sys_prompt = SYSTEM_PROMPTS.get(language, f"You are an expert {language} developer.") |
|
msgs = history_to_messages(history, sys_prompt) |
|
|
|
parts = [prompt.strip()] |
|
|
|
if file_path: |
|
parts.append(extract_text_from_file(file_path)[:5_000]) |
|
|
|
if website_url: |
|
html = extract_website_content(website_url) |
|
if not html.startswith("Error"): |
|
parts.append(html[:8_000]) |
|
|
|
user_query = enhance_query_with_search("\n\n".join(filter(None, parts)), enable_search) |
|
msgs.append({"role": "user", "content": user_query}) |
|
|
|
|
|
model: ModelInfo = find_model(model_id) or AVAILABLE_MODELS[0] |
|
reply = chat_completion(model.id, msgs) |
|
|
|
|
|
if language == "transformers.js": |
|
files = parse_transformers_js_output(reply) |
|
code = format_transformers_js_output(files) |
|
else: |
|
cleaned = remove_code_block(reply) |
|
if history and not history[-1][1].startswith("❌"): |
|
cleaned = apply_search_replace_changes(history[-1][1], cleaned) |
|
code = cleaned |
|
|
|
history.append((prompt, code)) |
|
return code, history |
|
|
|
|
|
|
|
|
|
|
|
HTML_SOURCE = Path("static/index.html").read_text(encoding="utf‑8") |
|
|
|
with gr.Blocks(css="body{margin:0}", title="AnyCoder AI") as demo: |
|
|
|
gr.HTML(HTML_SOURCE) |
|
|
|
|
|
prompt_in = gr.Textbox(visible=False) |
|
file_in = gr.File(visible=False) |
|
url_in = gr.Textbox(visible=False) |
|
model_in = gr.Textbox(visible=False) |
|
lang_in = gr.Textbox(visible=False) |
|
search_in = gr.Checkbox(visible=False) |
|
hist_state = gr.State([]) |
|
|
|
code_out = gr.Textbox(visible=False) |
|
hist_out = gr.State([]) |
|
|
|
|
|
dummy_btn = gr.Button(visible=False) |
|
dummy_btn.click( |
|
fn=generate, |
|
inputs=[prompt_in, file_in, url_in, model_in, lang_in, search_in, hist_state], |
|
outputs=[code_out, hist_out], |
|
api_name="predict", |
|
queue=True, |
|
) |
|
|
|
if __name__ == "__main__": |
|
demo.queue().launch() |
|
|