|
""" |
|
app.py |
|
|
|
Main application file for AnyCoder, a Gradio-based AI code generation tool. |
|
|
|
This application provides a user interface for generating code in various languages |
|
using different AI models. It supports inputs from text prompts, files, images, |
|
and websites, and includes features like web search enhancement and live code previews. |
|
|
|
Structure: |
|
- Imports & Configuration: Loads necessary libraries and constants. |
|
- Helper Functions: Small utility functions supporting the UI logic. |
|
- Core Application Logic: The main `generation_code` function that handles the AI interaction. |
|
- UI Layout: Defines the Gradio interface using `gr.Blocks`. |
|
- Event Wiring: Connects UI components to backend functions. |
|
- Application Entry Point: Launches the Gradio app. |
|
""" |
|
|
|
import gradio as gr |
|
from typing import Optional, Dict, List, Tuple, Any |
|
|
|
|
|
|
|
|
|
from constants import SYSTEM_PROMPTS, AVAILABLE_MODELS, DEMO_LIST |
|
from hf_client import get_inference_client |
|
from tavily_search import enhance_query_with_search |
|
from utils import ( |
|
extract_text_from_file, |
|
extract_website_content, |
|
apply_search_replace_changes, |
|
history_to_messages, |
|
history_to_chatbot_messages, |
|
remove_code_block, |
|
parse_transformers_js_output, |
|
format_transformers_js_output |
|
) |
|
from deploy import send_to_sandbox, load_project_from_url |
|
|
|
|
|
History = List[Tuple[str, str]] |
|
Model = Dict[str, Any] |
|
DEFAULT_SYSTEM_PROMPT = """ |
|
You are a helpful AI coding assistant. Your primary goal is to generate clean, correct, and efficient code based on the user's request. |
|
- Follow the user's requirements precisely. |
|
- If the user asks for a specific language, provide the code in that language. |
|
- Enclose the final code in a single markdown code block (e.g., ```html ... ```). |
|
- Do not include any conversational text, apologies, or explanations outside of the code block in your final response. |
|
""" |
|
|
|
|
|
|
|
|
|
|
|
def get_model_details(model_name: str) -> Optional[Model]: |
|
"""Finds the full dictionary for a model given its name.""" |
|
for model in AVAILABLE_MODELS: |
|
if model["name"] == model_name: |
|
return model |
|
return None |
|
|
|
|
|
|
|
|
|
|
|
def generation_code( |
|
query: Optional[str], |
|
file: Optional[str], |
|
website_url: Optional[str], |
|
current_model: Model, |
|
enable_search: bool, |
|
language: str, |
|
history: Optional[History], |
|
hf_token: str, |
|
) -> Tuple[str, History, str, List[Dict[str, str]]]: |
|
""" |
|
The main function to handle a user's code generation request. |
|
""" |
|
|
|
query = query or "" |
|
history = history or [] |
|
|
|
try: |
|
|
|
system_prompt = SYSTEM_PROMPTS.get(language, DEFAULT_SYSTEM_PROMPT) |
|
model_id = current_model["id"] |
|
|
|
|
|
if model_id.startswith("openai/"): |
|
provider = "openai" |
|
elif model_id.startswith("gemini/"): |
|
provider = "gemini" |
|
elif model_id.startswith("fireworks-ai/"): |
|
provider = "fireworks-ai" |
|
else: |
|
|
|
provider = "huggingface" |
|
|
|
|
|
messages = history_to_messages(history, system_prompt) |
|
context_query = query |
|
|
|
if file: |
|
text = extract_text_from_file(file) |
|
context_query += f"\n\n[Attached File Content]\n{text[:5000]}" |
|
|
|
if website_url: |
|
text = extract_website_content(website_url) |
|
if not text.startswith('Error'): |
|
context_query += f"\n\n[Scraped Website Content]\n{text[:8000]}" |
|
|
|
final_query = enhance_query_with_search(context_query, enable_search) |
|
messages.append({'role': 'user', 'content': final_query}) |
|
|
|
|
|
client = get_inference_client(model_id, provider, user_token=hf_token) |
|
resp = client.chat.completions.create( |
|
model=model_id, |
|
messages=messages, |
|
max_tokens=16384, |
|
temperature=0.1 |
|
) |
|
content = resp.choices[0].message.content |
|
|
|
except Exception as e: |
|
error_message = f"β **An error occurred:**\n\n```\n{str(e)}\n```\n\nPlease check your API keys, model selection, or try again." |
|
history.append((query, error_message)) |
|
return "", history, "", history_to_chatbot_messages(history) |
|
|
|
|
|
if language == 'transformers.js': |
|
files = parse_transformers_js_output(content) |
|
code_str = format_transformers_js_output(files) |
|
preview_html = send_to_sandbox(files.get('index.html', '')) |
|
else: |
|
clean_code = remove_code_block(content) |
|
|
|
if history and history[-1][1] and not history[-1][1].startswith("β"): |
|
code_str = apply_search_replace_changes(history[-1][1], clean_code) |
|
else: |
|
code_str = clean_code |
|
preview_html = send_to_sandbox(code_str) if language == 'html' else '' |
|
|
|
|
|
updated_history = history + [(query, code_str)] |
|
chat_messages = history_to_chatbot_messages(updated_history) |
|
|
|
return code_str, updated_history, preview_html, chat_messages |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
CUSTOM_CSS = """ |
|
body { font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, 'Helvetica Neue', Arial, sans-serif; } |
|
#main_title { text-align: center; font-size: 2.5rem; font-weight: 700; color: #1a202c; margin: 1.5rem 0 0.5rem 0; } |
|
#subtitle { text-align: center; color: #4a5568; margin-bottom: 2.5rem; font-size: 1.1rem; } |
|
.gradio-container { background-color: #f7fafc; } |
|
/* Custom styling for the generate button to make it stand out */ |
|
#gen_btn { box-shadow: 0 4px 6px -1px rgba(0, 0, 0, 0.1), 0 2px 4px -1px rgba(0, 0, 0, 0.06); } |
|
""" |
|
|
|
with gr.Blocks(theme=gr.themes.Soft(primary_hue="blue", secondary_hue="sky"), title="AnyCoder - AI Code Generator", css=CUSTOM_CSS) as demo: |
|
|
|
history_state = gr.State([]) |
|
initial_model = AVAILABLE_MODELS[0] |
|
model_state = gr.State(initial_model) |
|
|
|
|
|
gr.Markdown("# π Shasha AI", elem_id="main_title") |
|
gr.Markdown("Your personal AI partner for generating, modifying, and understanding code.", elem_id="subtitle") |
|
|
|
with gr.Row(equal_height=False): |
|
|
|
with gr.Column(scale=1): |
|
gr.Markdown("### 1. Select Model") |
|
model_choices = [model["name"] for model in AVAILABLE_MODELS] |
|
model_dd = gr.Dropdown( |
|
choices=model_choices, |
|
value=initial_model["name"], |
|
label="AI Model", |
|
info="Different models have different strengths." |
|
) |
|
|
|
gr.Markdown("### 2. Provide Context") |
|
with gr.Tabs(): |
|
with gr.Tab("π Prompt"): |
|
prompt_in = gr.Textbox( |
|
label="Your Request", |
|
lines=7, |
|
placeholder="e.g., 'Create a modern, responsive landing page for a SaaS product.'", |
|
show_label=False |
|
) |
|
with gr.Tab("π File"): |
|
file_in = gr.File(label="Attach File (Optional)", type="filepath") |
|
with gr.Tab("π Website"): |
|
url_site = gr.Textbox(label="Scrape Website (Optional)", placeholder="https://example.com") |
|
|
|
gr.Markdown("### 3. Configure Output") |
|
language_dd = gr.Dropdown( |
|
choices=["html", "python", "transformers.js", "sql", "javascript", "css"], |
|
value="html", |
|
label="Target Language" |
|
) |
|
search_chk = gr.Checkbox(label="Enable Web Search", info="Enhances AI with real-time info.") |
|
|
|
with gr.Row(): |
|
clr_btn = gr.Button("Clear Session", variant="secondary") |
|
gen_btn = gr.Button("Generate Code", variant="primary", elem_id="gen_btn") |
|
|
|
|
|
with gr.Column(scale=2): |
|
with gr.Tabs() as main_tabs: |
|
with gr.Tab("π» Code", id="code_tab"): |
|
code_out = gr.Code(label="Generated Code", language="html", interactive=True) |
|
with gr.Tab("ποΈ Live Preview", id="preview_tab"): |
|
preview_out = gr.HTML(label="Live Preview") |
|
with gr.Tab("π History", id="history_tab"): |
|
chat_out = gr.Chatbot(label="Conversation History", type="messages") |
|
|
|
|
|
def on_model_change(model_name: str) -> Dict: |
|
"""Updates the model_state when the user selects a new model.""" |
|
model_details = get_model_details(model_name) |
|
return model_details or initial_model |
|
|
|
model_dd.change(fn=on_model_change, inputs=[model_dd], outputs=[model_state]) |
|
language_dd.change(fn=lambda lang: gr.update(language=lang), inputs=[language_dd], outputs=[code_out]) |
|
|
|
gen_btn.click( |
|
fn=generation_code, |
|
inputs=[ |
|
prompt_in, file_in, url_site, |
|
model_state, search_chk, language_dd, history_state |
|
], |
|
outputs=[code_out, history_state, preview_out, chat_out] |
|
) |
|
|
|
def clear_session(): |
|
"""Resets all UI components and state to their initial values.""" |
|
return ( |
|
"", |
|
None, |
|
"", |
|
[], |
|
"", |
|
"", |
|
[] |
|
) |
|
|
|
clr_btn.click( |
|
fn=clear_session, |
|
outputs=[prompt_in, file_in, url_site, history_state, code_out, preview_out, chat_out], |
|
queue=False |
|
) |
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__': |
|
demo.queue().launch() |