File size: 5,350 Bytes
2bcb2e2
 
10686a9
 
 
 
9b171dd
 
10686a9
 
 
 
 
 
 
 
 
 
 
 
 
 
9b171dd
10686a9
 
2bcb2e2
10686a9
2bcb2e2
 
 
 
 
 
 
 
 
 
 
10686a9
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
2bcb2e2
10686a9
 
 
 
 
 
 
 
9b171dd
10686a9
9b171dd
 
10686a9
 
2bcb2e2
 
 
10686a9
 
 
2bcb2e2
 
10686a9
 
 
 
 
 
 
 
 
 
 
2bcb2e2
 
 
10686a9
2bcb2e2
10686a9
2bcb2e2
10686a9
2bcb2e2
 
10686a9
2bcb2e2
10686a9
2bcb2e2
9b171dd
10686a9
2bcb2e2
10686a9
 
 
 
2bcb2e2
9b171dd
10686a9
9b171dd
2bcb2e2
10686a9
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
# app.py

from typing import Optional, Dict, List, Tuple
import gradio as gr

from constants import HTML_SYSTEM_PROMPT, AVAILABLE_MODELS, DEMO_LIST
from hf_client import get_inference_client, tavily_client
from tavily_search import enhance_query_with_search
from utils import (
    extract_text_from_file,
    extract_website_content,
    apply_search_replace_changes,
    apply_transformers_js_search_replace_changes,
    history_to_messages,
    history_to_chatbot_messages,
    remove_code_block,
    parse_transformers_js_output,
    format_transformers_js_output
)
from search_replace import SEARCH_START, DIVIDER, REPLACE_END
from web_scraper import extract_text_from_image
from deploy import send_to_sandbox, handle_load_project

# Type aliases
History = List[Tuple[str, str]]

# Core generation function
def generation_code(
    query: Optional[str],
    image: Optional[gr.Image],
    file: Optional[str],
    website_url: Optional[str],
    _setting: Dict[str, str],
    _history: Optional[History],
    _current_model: Dict,
    enable_search: bool,
    language: str,
    provider: str
) -> Tuple[str, History, str, List[Dict[str, str]]]:
    # Initialize inputs
    if query is None:
        query = ''
    if _history is None:
        _history = []

    # Prepare system prompt and history
    system_prompt = _setting.get('system', HTML_SYSTEM_PROMPT)
    messages = history_to_messages(_history, system_prompt)

    # Append file content if provided
    if file:
        file_text = extract_text_from_file(file)
        if file_text:
            query += f"\n\n[Reference file content below]\n{file_text[:5000]}"

    # Append website content if provided
    if website_url:
        website_text = extract_website_content(website_url)
        if not website_text.startswith("Error"):
            query += f"\n\n[Website content below]\n{website_text[:8000]}"

    # Enhance with web search if enabled
    final_query = enhance_query_with_search(query, enable_search)
    messages.append({'role': 'user', 'content': final_query})

    # Call HF inference
    client = get_inference_client(_current_model['id'], provider)
    completion = client.chat.completions.create(
        model=_current_model['id'],
        messages=messages,
        max_tokens=10000
    )
    content = completion.choices[0].message.content

    # Process output based on language and existing content
    has_existing = bool(_history and _history[-1][1])
    if language == 'transformers.js':
        files = parse_transformers_js_output(content)
        code_str = format_transformers_js_output(files)
        sandbox_html = send_to_sandbox(files['index.html'])
    else:
        clean = remove_code_block(content)
        if has_existing and not clean.strip().startswith('<!DOCTYPE'):
            clean = apply_search_replace_changes(_history[-1][1], clean)
        code_str = clean
        sandbox_html = send_to_sandbox(clean) if language == 'html' else ''

    # Update history and prepare chatbot messages
    new_history = _history + [(query, code_str)]
    chat_msgs = history_to_chatbot_messages(new_history)

    # Return exactly four outputs: code, history state, preview HTML, and chat history
    return code_str, new_history, sandbox_html, chat_msgs

# Build Gradio UI
with gr.Blocks(
    theme=gr.themes.Base(),
    title="AnyCoder - AI Code Generator"
) as demo:
    history_state = gr.State([])
    setting_state = gr.State({ 'system': HTML_SYSTEM_PROMPT })
    current_model = gr.State(AVAILABLE_MODELS[9])

    with gr.Sidebar():
        gr.LoginButton()
        load_project_url = gr.Textbox(label="Hugging Face Space URL")
        load_project_btn = gr.Button("Import Project")
        load_project_status = gr.Markdown(visible=False)

        input_box = gr.Textbox(label="What to build?", lines=3)
        language_dropdown = gr.Dropdown(choices=["html","python","transformers.js"], value="html")
        website_input = gr.Textbox(label="Website URL")
        file_input = gr.File(label="Reference file")
        image_input = gr.Image(label="Design image")
        search_toggle = gr.Checkbox(label="Web search")
        model_dropdown = gr.Dropdown(choices=[m['name'] for m in AVAILABLE_MODELS], value=AVAILABLE_MODELS[9]['name'])

        generate_btn = gr.Button("Generate")
        clear_btn = gr.Button("Clear")

    with gr.Column():
        with gr.Tabs():
            with gr.Tab("Code"):
                code_output = gr.Code(label="Generated code")
            with gr.Tab("Preview"):
                preview = gr.HTML(label="Live preview")
            with gr.Tab("History"):
                history_output = gr.Chatbot()

    load_project_btn.click(
        fn=handle_load_project,
        inputs=[load_project_url],
        outputs=[load_project_status, code_output, preview, load_project_url, history_state, history_output]
    )

    generate_btn.click(
        fn=generation_code,
        inputs=[input_box, image_input, file_input, website_input,
                setting_state, history_state, current_model,
                search_toggle, language_dropdown, gr.State('auto')],
        outputs=[code_output, history_state, preview, history_output]
    )

    clear_btn.click(lambda: ([], [], "", []), outputs=[history_state, history_output, preview, code_output])

if __name__ == "__main__":
    demo.queue().launch()