|
|
|
""" Core business logic for the code generation application. """ |
|
from typing import Dict, List, Optional, Tuple, Generator, Any |
|
from config import (HTML_SYSTEM_PROMPT, GENERIC_SYSTEM_PROMPT, HTML_SYSTEM_PROMPT_WITH_SEARCH, |
|
GENERIC_SYSTEM_PROMPT_WITH_SEARCH, FollowUpSystemPrompt) |
|
from services import llm_service, search_service |
|
from extractor import extract_text_from_file, extract_website_content |
|
from utils import (history_to_messages, remove_code_block, process_image_for_model, |
|
apply_search_replace_changes) |
|
|
|
History = List[Tuple[Optional[str], Optional[str]]] |
|
|
|
def _determine_system_prompt(language: str, enable_search: bool, history: History) -> Tuple[str, bool]: |
|
is_follow_up = bool(history and history[-1][1] and ("<html" in history[-1][1])) |
|
if is_follow_up: return FollowUpSystemPrompt, True |
|
|
|
if language == "html": |
|
return (HTML_SYSTEM_PROMPT_WITH_SEARCH if enable_search else HTML_SYSTEM_PROMPT), False |
|
else: |
|
base = GENERIC_SYSTEM_PROMPT_WITH_SEARCH if enable_search else GENERIC_SYSTEM_PROMPT |
|
return base.format(language=language), False |
|
|
|
def _prepare_user_content(query: str, image_data: Optional[Any], file_path: Optional[str], website_url: Optional[str], enable_search: bool) -> Any: |
|
context_parts = [query] |
|
if file_path: context_parts.append(f"\n\n--- Reference File ---\n{extract_text_from_file(file_path)[:8000]}") |
|
if website_url: context_parts.append(f"\n\n--- Website to Redesign ---\n{extract_website_content(website_url)}") |
|
full_query = "".join(context_parts) |
|
if enable_search and search_service.is_available(): full_query += f"\n\n--- Web Search Results ---\n{search_service.search(full_query)}" |
|
|
|
if image_data is not None: |
|
return [{"type": "text", "text": full_query}, {"type": "image_url", "image_url": {"url": process_image_for_model(image_data)}}] |
|
return full_query |
|
|
|
def generate_code(query: str, image_data: Optional[Any], file_path: Optional[str], website_url: Optional[str], history: History, |
|
model_config: Dict[str, str], enable_search: bool, language: str) -> Generator[Dict[str, Any], None, None]: |
|
system_prompt, is_follow_up = _determine_system_prompt(language, enable_search, history) |
|
messages = history_to_messages(history, system_prompt) |
|
user_content = _prepare_user_content(query, image_data, file_path, website_url, enable_search) |
|
messages.append({'role': 'user', 'content': user_content}) |
|
|
|
content_stream = "" |
|
for chunk in llm_service.generate_code_stream(model_config['id'], messages): |
|
content_stream += chunk |
|
processed_code = apply_search_replace_changes(history[-1][1], content_stream) if is_follow_up else remove_code_block(content_stream) |
|
yield {"code_output": processed_code} |
|
|
|
final_code = apply_search_replace_changes(history[-1][1], content_stream) if is_follow_up else remove_code_block(content_stream) |
|
history.append((query, final_code)) |
|
yield {"code_output": final_code, "history": history} |