File size: 7,045 Bytes
c558be9
1bd1ac4
c558be9
a8e5826
 
 
 
 
 
48f06a6
c558be9
1bd1ac4
 
a8e5826
13a7675
583310d
13a7675
a8e5826
 
 
a18bd58
a8e5826
c558be9
 
 
1bd1ac4
13a7675
c558be9
 
 
 
a8e5826
1bd1ac4
 
 
 
 
 
 
 
 
13a7675
 
 
a8e5826
 
13a7675
 
a8e5826
 
13a7675
 
 
1bd1ac4
13a7675
1bd1ac4
a8e5826
1bd1ac4
13a7675
 
 
1bd1ac4
 
48f06a6
1bd1ac4
 
a8e5826
1bd1ac4
 
13a7675
a8e5826
1bd1ac4
a8e5826
13a7675
a8e5826
13a7675
a8e5826
13a7675
a8e5826
 
 
48f06a6
a8e5826
 
13a7675
a8e5826
1bd1ac4
 
a8e5826
1bd1ac4
 
 
 
13a7675
a8e5826
13a7675
a8e5826
 
1bd1ac4
13a7675
a8e5826
1bd1ac4
 
 
 
 
 
a8e5826
 
1bd1ac4
 
a8e5826
1bd1ac4
a8e5826
 
 
 
1bd1ac4
 
a8e5826
1bd1ac4
 
a8e5826
1bd1ac4
 
 
a8e5826
1bd1ac4
a8e5826
1bd1ac4
 
 
a8e5826
1bd1ac4
 
a8e5826
1bd1ac4
 
a8e5826
1bd1ac4
 
 
 
 
a8e5826
1bd1ac4
a8e5826
1bd1ac4
 
 
 
 
a8e5826
1bd1ac4
 
 
a8e5826
 
1bd1ac4
 
a8e5826
1bd1ac4
a8e5826
1bd1ac4
 
 
 
 
 
 
 
 
 
 
c558be9
f7cf3be
2deb7a7
c558be9
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
# app.py
# ────────────────────────────────────────────────────────────────
"""
AnyCoderΒ /Β ShashaΒ AI – single‑file Gradio interface
 β€’ Pick a model (registry in models.py)
 β€’ Give context (prompt, file upload, or website URL)
 β€’ Choose from 25+ target languages
 β€’ Optional Tavily web‑search enrichment
 β€’ Generate code, preview HTML, see conversation history
"""

from __future__ import annotations

from typing import List, Tuple, Dict, Any

import gradio as gr

# ── local modules ───────────────────────────────────────────────
from models        import AVAILABLE_MODELS, find_model, ModelInfo
from inference     import chat_completion
from tavily_search import enhance_query_with_search
from utils         import (
    extract_text_from_file,
    extract_website_content,
    history_to_messages,
    history_to_chatbot_messages,
    apply_search_replace_changes,
    remove_code_block,
    parse_transformers_js_output,
    format_transformers_js_output,
)
from deploy        import send_to_sandbox

# ── constants ───────────────────────────────────────────────────
SUPPORTED_LANGUAGES = [
    "python", "c", "cpp", "markdown", "latex", "json", "html", "css",
    "javascript", "jinja2", "typescript", "yaml", "dockerfile", "shell",
    "r", "sql", "sql-msSQL", "sql-mySQL", "sql-mariaDB", "sql-sqlite",
    "sql-cassandra", "sql-plSQL", "sql-hive", "sql-pgSQL", "sql-gql",
    "sql-gpSQL", "sql-sparkSQL", "sql-esper",
]

SYSTEM_PROMPTS = {
    "html": (
        "ONLY USE HTML, CSS AND JAVASCRIPT. "
        "Return exactly one complete HTML page wrapped in ```html ...```."
    ),
    "transformers.js": (
        "Generate THREE fenced blocks: "
        "`index.html`, `index.js`, and `style.css` for a transformers.js web‑app."
    ),
}

History = List[Tuple[str, str]]


# ── back‑end callback ───────────────────────────────────────────
def generate_code(
    prompt: str,
    file_path: str | None,
    website_url: str | None,
    model_name: str,
    enable_search: bool,
    language: str,
    hist: History | None,
) -> Tuple[str, History, str, List[Dict[str, str]]]:
    """Runs on **Generate Code** click."""
    hist = hist or []
    prompt = (prompt or "").strip()

    # 1Β build message list
    sys_prompt = SYSTEM_PROMPTS.get(language, f"You are an expert {language} developer.")
    msgs       = history_to_messages(hist, sys_prompt)

    ctx: List[str] = [prompt] if prompt else []
    if file_path:
        ctx += ["[File]", extract_text_from_file(file_path)[:5000]]
    if website_url:
        site_html = extract_website_content(website_url)
        if not site_html.startswith("Error"):
            ctx += ["[Website]", site_html[:8000]]

    user_query = enhance_query_with_search("\n\n".join(ctx), enable_search)
    msgs.append({"role": "user", "content": user_query})

    # 2Β call model through inference.py
    model: ModelInfo = find_model(model_name) or AVAILABLE_MODELS[0]
    try:
        raw = chat_completion(model.id, msgs)
    except Exception as exc:  # pragma: no cover
        err = f"❌ **Error**\n```{exc}```"
        hist.append((prompt, err))
        return "", hist, "", history_to_chatbot_messages(hist)

    # 3Β post‑process response
    if language == "transformers.js":
        files   = parse_transformers_js_output(raw)
        code    = format_transformers_js_output(files)
        preview = send_to_sandbox(files.get("index.html", ""))
    else:
        cleaned = remove_code_block(raw)
        if hist and not hist[-1][1].startswith("❌"):
            cleaned = apply_search_replace_changes(hist[-1][1], cleaned)
        code    = cleaned
        preview = send_to_sandbox(cleaned) if language == "html" else ""

    hist.append((prompt, code))
    chat_display = history_to_chatbot_messages(hist)
    return code, hist, preview, chat_display


# ── Gradio UI ───────────────────────────────────────────────────
THEME = gr.themes.Soft(primary_hue="indigo")
CSS = """
body {font-family:-apple-system,BlinkMacSystemFont,'Segoe UI',Roboto,Arial,sans-serif}
#title {text-align:center;font-size:2.3rem;margin-top:1rem}
#subtitle {text-align:center;color:#55606d;margin-bottom:2rem}
"""

with gr.Blocks(theme=THEME, css=CSS, title="AnyCoderΒ AI") as demo:
    state_hist: gr.State[History] = gr.State([])

    gr.Markdown("## πŸš€Β AnyCoderΒ AI", elem_id="title")
    gr.Markdown("Your AI partner for generating, modifying & understanding code.", elem_id="subtitle")

    with gr.Row():
        # ── sidebar (inputs)
        with gr.Column(scale=1):
            gr.Markdown("#### 1Β Β·Β Model")
            model_dd = gr.Dropdown(
                choices=[m.name for m in AVAILABLE_MODELS],
                value=AVAILABLE_MODELS[0].name,
                label="AI Model",
            )

            gr.Markdown("#### 2Β Β·Β Context")
            with gr.Tabs():
                with gr.Tab("Prompt"):
                    prompt_box = gr.Textbox(lines=6, placeholder="Describe what you want to build…")
                with gr.Tab("File"):
                    file_box   = gr.File(type="filepath")
                with gr.Tab("Website"):
                    url_box    = gr.Textbox(placeholder="https://example.com")

            gr.Markdown("#### 3Β Β·Β Output")
            lang_dd   = gr.Dropdown(SUPPORTED_LANGUAGES, value="html", label="TargetΒ Language")
            search_ck = gr.Checkbox(label="Enable Tavily WebΒ Search")

            with gr.Row():
                clear_btn = gr.Button("Clear Session", variant="secondary")
                gen_btn   = gr.Button("GenerateΒ Code", variant="primary")

        # ── main panel (outputs)
        with gr.Column(scale=2):
            with gr.Tabs():
                with gr.Tab("Code"):
                    code_out    = gr.Code(interactive=True, label=None)
                with gr.Tab("Live Preview"):
                    preview_out = gr.HTML()
                with gr.Tab("History"):
                    chat_out    = gr.Chatbot(type="messages")

    # wiring
    gen_btn.click(
        generate_code,
        inputs=[prompt_box, file_box, url_box, model_dd, search_ck, lang_dd, state_hist],
        outputs=[code_out, state_hist, preview_out, chat_out],
    )

    clear_btn.click(
        lambda: ("", None, "", "html", False, [], [], "", ""),
        outputs=[prompt_box, file_box, url_box, lang_dd, search_ck,
                 state_hist, code_out, preview_out, chat_out],
        queue=False,
    )

if __name__ == "__main__":
    demo.queue().launch()