File size: 7,016 Bytes
588ca16 48f06a6 c928d36 e0b040a c928d36 4f8a74b 13a7675 c928d36 dad8300 c928d36 dad8300 c928d36 588ca16 c928d36 588ca16 c928d36 2b7139c c928d36 588ca16 c928d36 49d4630 c928d36 dad8300 c928d36 2b7139c c928d36 e0b040a c928d36 e0b040a 588ca16 e0b040a 588ca16 c928d36 588ca16 e0b040a dad8300 c928d36 e0b040a c928d36 588ca16 e0b040a c928d36 e0b040a c928d36 588ca16 c928d36 588ca16 e0b040a c928d36 e0b040a c928d36 1bd1ac4 c928d36 e0b040a c928d36 588ca16 c928d36 588ca16 dad8300 c928d36 e0b040a c928d36 e0b040a c928d36 e0b040a c928d36 e0b040a c928d36 1bd1ac4 49d4630 c928d36 f7cf3be 2deb7a7 588ca16 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167 168 169 170 171 172 173 174 175 176 177 178 179 180 181 182 183 |
# app.py
"""
ShashaCodeβ―Builder βΒ AI codeβgeneration playground.
β’ Huggingβ―FaceΒ Spaces +Β Gradio frontβend
β’ Supports prompts, file upload, webβsite scraping, optional web search
β’ Streams code back, shows live HTML preview, can deploy to a user Space
"""
# βββββββββββββββββββββββββββββββββββββββββ Imports
import gradio as gr
from pathlib import Path
from typing import Dict, List, Optional, Tuple, Any
from constants import ( # β all constants live here
HTML_SYSTEM_PROMPT,
TRANSFORMERS_JS_SYSTEM_PROMPT,
SYSTEM_PROMPTS,
AVAILABLE_MODELS,
DEMO_LIST,
GRADIO_SUPPORTED_LANGUAGES, # β new import
SEARCH_START, DIVIDER, REPLACE_END,
)
from hf_client import get_inference_client
from tavily_search import enhance_query_with_search
from utils import ( # helpers split into utils.py
history_to_messages,
history_to_chatbot_messages,
remove_code_block,
parse_transformers_js_output,
format_transformers_js_output,
parse_svelte_output,
format_svelte_output,
apply_search_replace_changes,
apply_transformers_js_search_replace_changes,
extract_text_from_file,
extract_website_content,
get_gradio_language,
)
from deploy import send_to_sandbox
# βββββββββββββββββββββββββββββββββββββββββ Type Aliases
History = List[Tuple[str, str]]
ModelInfo = Dict[str, Any]
# βββββββββββββββββββββββββββββββββββββββββ Core Function
def generate_code(
query: str,
file_path: Optional[str],
website_url: Optional[str],
model: ModelInfo,
enable_search: bool,
language: str,
history: Optional[History],
) -> Tuple[str, History, str, List[Dict[str, str]]]:
"""Main inference pipeline: build prompt β call model β postβprocess."""
query = query or ""
history = history or []
# 1. pick system prompt
if language == "html": system = HTML_SYSTEM_PROMPT
elif language == "transformers.js": system = TRANSFORMERS_JS_SYSTEM_PROMPT
else: system = SYSTEM_PROMPTS.get(language, HTML_SYSTEM_PROMPT)
# 2. build message list
messages = history_to_messages(history, system)
ctx_parts = [query.strip()]
if file_path: ctx_parts += ["[File]", extract_text_from_file(file_path)[:5000]]
if website_url:
html = extract_website_content(website_url)
if not html.startswith("Error"):
ctx_parts += ["[Website]", html[:8000]]
user_query = "\n\n".join(ctx_parts)
user_query = enhance_query_with_search(user_query, enable_search)
messages.append({"role": "user", "content": user_query})
# 3. call model
client = get_inference_client(model["id"])
resp = client.chat.completions.create(
model=model["id"],
messages=messages,
max_tokens=16000,
temperature=0.15,
)
answer = resp.choices[0].message.content
# 4. postβprocess
if language == "transformers.js":
files = parse_transformers_js_output(answer)
code = format_transformers_js_output(files)
preview = send_to_sandbox(files.get("index.html", ""))
else:
clean = remove_code_block(answer)
if history and not history[-1][1].startswith("β"):
clean = apply_search_replace_changes(history[-1][1], clean)
code = clean
preview = send_to_sandbox(code) if language == "html" else ""
history.append((query, code))
chat_msgs = history_to_chatbot_messages(history)
return code, history, preview, chat_msgs
# βββββββββββββββββββββββββββββββββββββββββ UI
LOGO_PATH = "assets/logo.png" # ensure this file exists
CUSTOM_CSS = """
body {font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif;}
#logo {max-height:64px;margin:auto;}
"""
with gr.Blocks(css=CUSTOM_CSS, title="ShashaCodeβ―Builder") as demo:
state_history = gr.State([])
state_model = gr.State(AVAILABLE_MODELS[0])
# Header
with gr.Row():
gr.Image(LOGO_PATH, elem_id="logo", show_label=False, height=64)
gr.Markdown("## **AnyCoderΒ AI**\nYour AI partner for generating, modifying & understanding code.")
# Sidebar (inputs)
with gr.Row():
with gr.Column(scale=1, min_width=300):
# Model
dd_model = gr.Dropdown(
label="AIΒ Model",
choices=[m["name"] for m in AVAILABLE_MODELS],
value=AVAILABLE_MODELS[0]["name"],
)
# Prompt / File / Website tabs
with gr.Tabs():
with gr.Tab("Prompt"):
tb_prompt = gr.Textbox(label="Describe what you'd like to buildβ¦", lines=6)
with gr.Tab("File"):
inp_file = gr.File(label="Reference file", type="filepath")
with gr.Tab("Website"):
tb_url = gr.Textbox(label="URL to redesign")
# Output config
dd_lang = gr.Dropdown(
label="Target language",
choices=[l for l in GRADIO_SUPPORTED_LANGUAGES if l], # β fixed list
value="html",
)
chk_search = gr.Checkbox(label="EnableΒ Tavily WebΒ Search")
# Buttons
btn_generate = gr.Button("Generateβ―Code", variant="primary")
btn_clear = gr.Button("ClearΒ Session", variant="secondary")
# Main panel (outputs)
with gr.Column(scale=2):
with gr.Tabs():
with gr.Tab("Code"):
out_code = gr.Code(language="html", show_label=False)
with gr.Tab("Preview"):
out_prev = gr.HTML()
with gr.Tab("History"):
out_hist = gr.Chatbot(type="messages")
# βββ Callbacks βββββββββββββββββββββββββββββββββββββββββββββ
def _model_from_name(name):
return next((m for m in AVAILABLE_MODELS if m["name"] == name), AVAILABLE_MODELS[0])
dd_model.change(lambda n: _model_from_name(n), inputs=dd_model, outputs=state_model)
btn_generate.click(
fn=generate_code,
inputs=[tb_prompt, inp_file, tb_url, state_model, chk_search, dd_lang, state_history],
outputs=[out_code, state_history, out_prev, out_hist],
)
btn_clear.click(lambda: ("", None, "", [], "", []),
outputs=[tb_prompt, inp_file, tb_url, state_history, out_code, out_prev])
if __name__ == "__main__":
demo.queue().launch()
|