File size: 3,769 Bytes
ea998ae
583310d
ea998ae
 
 
d8cd951
ea998ae
d8cd951
8d36c79
1c75fd0
583310d
1c75fd0
8d36c79
 
 
 
ea998ae
 
 
 
256b0b9
6dcd973
1c75fd0
ea998ae
 
1c75fd0
 
 
ea998ae
 
 
 
 
 
 
 
 
d8cd951
ea998ae
 
1c75fd0
ea998ae
1c75fd0
ea998ae
1c75fd0
ea998ae
 
 
1c75fd0
ea998ae
 
 
1c75fd0
d8cd951
 
 
583310d
d8cd951
 
256b0b9
d8cd951
1c75fd0
 
d8cd951
 
 
 
 
ea998ae
 
d8cd951
 
ea998ae
 
 
 
 
 
 
 
 
 
d8cd951
 
ea998ae
 
 
 
 
d8cd951
583310d
f7cf3be
2deb7a7
256b0b9
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
# app.py  ── root of the repo
"""
AnyCoderΒ /Β ShashaΒ AI – Gradio back‑end
β€’ Hosts the custom HTML/JS/CSS in /static
β€’ Exposes POST /run/predict for the browser‑side fetch()
"""
from __future__ import annotations
from pathlib import Path
from typing import List, Tuple

import gradio as gr

from inference       import chat_completion
from tavily_search   import enhance_query_with_search
from models          import AVAILABLE_MODELS, find_model, ModelInfo
from utils           import (
    extract_text_from_file, extract_website_content,
    history_to_messages, history_to_chatbot_messages,
    apply_search_replace_changes, remove_code_block,
    parse_transformers_js_output, format_transformers_js_output,
)

SYSTEM_PROMPTS = {
    "html":           "ONLY USE HTML, CSS &β€―JS. Return ONE file wrapped in ```html```.",
    "transformers.js":"Generate THREE files (index.html / index.js / style.css) as fenced blocks."
}
History = List[Tuple[str, str]]

# ─────────────────────────────────────────────────────────────────────────────
def generate(prompt:str,
             file_path:str|None,
             website_url:str|None,
             model_id:str,
             language:str,
             enable_search:bool,
             history:History|None) -> Tuple[str,History]:
    """Invoked by the JS front‑end."""
    history = history or []
    sys_prompt = SYSTEM_PROMPTS.get(language, f"You are an expert {language} developer.")
    messages  = history_to_messages(history, sys_prompt)

    ctx: list[str] = [prompt.strip()]
    if file_path:
        ctx.append("[File]\n" + extract_text_from_file(file_path)[:5_000])
    if website_url:
        html = extract_website_content(website_url)
        if not html.startswith("Error"):
            ctx.append("[Website]\n" + html[:8_000])

    user_q = "\n\n".join(filter(None, ctx))
    user_q = enhance_query_with_search(user_q, enable_search)
    messages.append({"role": "user", "content": user_q})

    model: ModelInfo = find_model(model_id) or AVAILABLE_MODELS[0]
    answer = chat_completion(model.id, messages)

    if language == "transformers.js":
        files = parse_transformers_js_output(answer)
        code  = format_transformers_js_output(files)
    else:
        cleaned = remove_code_block(answer)
        if history and not history[-1][1].startswith("❌"):
            cleaned = apply_search_replace_changes(history[-1][1], cleaned)
        code = cleaned

    history.append((prompt, code))
    return code, history

# ─────────────────────────────────────────────────────────────────────────────
HTML_SOURCE = Path("static/index.html").read_text(encoding="utf-8")

with gr.Blocks(css="body{margin:0}", title="AnyCoderΒ AI") as demo:
    gr.HTML(HTML_SOURCE)                            # the whole UI
    # hidden I/O elements for the JS fetch()
    with gr.Group(visible=False):
        prompt_in  = gr.Textbox()
        file_in    = gr.File()
        url_in     = gr.Textbox()
        model_in   = gr.Textbox()
        lang_in    = gr.Textbox()
        search_in  = gr.Checkbox()
        hist_state = gr.State([])
        code_out, hist_out = gr.Textbox(), gr.State([])

    gr.Button(visible=False).click(                 # POST /run/predict
        generate,
        [prompt_in, file_in, url_in,
         model_in, lang_in, search_in, hist_state],
        [code_out, hist_out],
        api_name="predict",
    )

if __name__ == "__main__":
    demo.queue().launch()