File size: 3,997 Bytes
583310d a18bd58 d8cd951 a18bd58 d8cd951 8d36c79 1c75fd0 583310d 1c75fd0 a18bd58 256b0b9 a18bd58 6dcd973 1c75fd0 a18bd58 1c75fd0 a18bd58 1c75fd0 a18bd58 d8cd951 a18bd58 ea998ae a18bd58 1c75fd0 a18bd58 1c75fd0 ea998ae a18bd58 1c75fd0 a18bd58 1c75fd0 a18bd58 d8cd951 a18bd58 d8cd951 a18bd58 583310d a18bd58 256b0b9 a18bd58 1c75fd0 d8cd951 a18bd58 d8cd951 a18bd58 d8cd951 a18bd58 583310d f7cf3be 2deb7a7 256b0b9 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 |
"""
AnyCoder AI — static‑first UI wrapper
Loads HTML/CSS/JS from the /static folder and exposes /run/predict for
the front‑end to call.
• static/index.html dark themed UI
• static/style.css styles
• static/index.js JS logic (model list, fetch /run/predict)
Back‑end helpers (models.py, inference.py, plugins.py …) are unchanged.
"""
from pathlib import Path
from typing import List, Tuple
import gradio as gr
# ---------- imports that actually do the work ----------
from inference import chat_completion # runs the model
from tavily_search import enhance_query_with_search
from utils import ( # misc helpers
extract_text_from_file,
extract_website_content,
history_to_messages,
history_to_chatbot_messages,
apply_search_replace_changes,
remove_code_block,
parse_transformers_js_output,
format_transformers_js_output,
)
from models import AVAILABLE_MODELS, find_model, ModelInfo
# -------------------------------------------------------
SYSTEM_PROMPTS = {
"html": (
"ONLY USE HTML, CSS AND JAVASCRIPT. Return **one** HTML file "
"wrapped in ```html```."
),
"transformers.js": (
"Generate THREE fenced blocks: index.html, index.js, style.css."
),
}
History = List[Tuple[str, str]]
# ------------------------------------------------------------------
# /run/predict — called by static/index.js
# ------------------------------------------------------------------
def generate(
prompt: str,
file_path: str | None,
website_url: str | None,
model_id: str,
language: str,
enable_search: bool,
history: History | None,
):
history = history or []
# 1 · system + user messages
sys_prompt = SYSTEM_PROMPTS.get(language, f"You are an expert {language} developer.")
msgs = history_to_messages(history, sys_prompt)
parts = [prompt.strip()]
if file_path:
parts.append(extract_text_from_file(file_path)[:5_000])
if website_url:
html = extract_website_content(website_url)
if not html.startswith("Error"):
parts.append(html[:8_000])
user_query = enhance_query_with_search("\n\n".join(filter(None, parts)), enable_search)
msgs.append({"role": "user", "content": user_query})
# 2 · run model
model: ModelInfo = find_model(model_id) or AVAILABLE_MODELS[0]
reply = chat_completion(model.id, msgs)
# 3 · post‑process
if language == "transformers.js":
files = parse_transformers_js_output(reply)
code = format_transformers_js_output(files)
else:
cleaned = remove_code_block(reply)
if history and not history[-1][1].startswith("❌"):
cleaned = apply_search_replace_changes(history[-1][1], cleaned)
code = cleaned
history.append((prompt, code))
return code, history
# ------------------------------------------------------------------
# Serve static UI
# ------------------------------------------------------------------
HTML_SOURCE = Path("static/index.html").read_text(encoding="utf‑8")
with gr.Blocks(css="body{margin:0}", title="AnyCoder AI") as demo:
# Front‑end
gr.HTML(HTML_SOURCE)
# Hidden components for API
prompt_in = gr.Textbox(visible=False)
file_in = gr.File(visible=False)
url_in = gr.Textbox(visible=False)
model_in = gr.Textbox(visible=False)
lang_in = gr.Textbox(visible=False)
search_in = gr.Checkbox(visible=False)
hist_state = gr.State([])
code_out = gr.Textbox(visible=False)
hist_out = gr.State([])
# Expose /run/predict
dummy_btn = gr.Button(visible=False)
dummy_btn.click(
fn=generate,
inputs=[prompt_in, file_in, url_in, model_in, lang_in, search_in, hist_state],
outputs=[code_out, hist_out],
api_name="predict",
queue=True,
)
if __name__ == "__main__":
demo.queue().launch()
|