|
|
|
|
|
""" |
|
Core business logic for the code generation application. |
|
|
|
This module orchestrates the entire process from receiving a user query to |
|
generating the final code. It interacts with the services, extractors, and |
|
utility modules to fulfill the request. |
|
""" |
|
from typing import Dict, List, Optional, Tuple, Generator, Any |
|
|
|
from config import ( |
|
HTML_SYSTEM_PROMPT, GENERIC_SYSTEM_PROMPT, |
|
HTML_SYSTEM_PROMPT_WITH_SEARCH, GENERIC_SYSTEM_PROMPT_WITH_SEARCH, |
|
FOLLOW_UP_SYSTEM_PROMPT |
|
) |
|
from services import llm_service, search_service |
|
from extractor import extract_text_from_file, extract_website_content |
|
from utils import ( |
|
history_to_messages, |
|
remove_code_block, |
|
process_image_for_model, |
|
apply_search_replace_changes |
|
) |
|
|
|
|
|
History = List[Tuple[Optional[str], Optional[str]]] |
|
|
|
def _determine_system_prompt(language: str, enable_search: bool, history: History) -> Tuple[str, bool]: |
|
"""Determines the appropriate system prompt based on context.""" |
|
is_follow_up = False |
|
if history and history[-1][1] and ("<!DOCTYPE html>" in history[-1][1] or "<html" in history[-1][1]): |
|
is_follow_up = True |
|
return FOLLOW_UP_SYSTEM_PROMPT, is_follow_up |
|
|
|
if language == "html": |
|
prompt = HTML_SYSTEM_PROMPT_WITH_SEARCH if enable_search else HTML_SYSTEM_PROMPT |
|
else: |
|
base_prompt = GENERIC_SYSTEM_PROMPT_WITH_SEARCH if enable_search else GENERIC_SYSTEM_PROMPT |
|
prompt = base_prompt.format(language=language) |
|
return prompt, is_follow_up |
|
|
|
def _prepare_user_content( |
|
query: str, image_data: Optional[Any], file_path: Optional[str], |
|
website_url: Optional[str], enable_search: bool |
|
) -> any: |
|
"""Constructs the final user prompt including context from files, web, and search.""" |
|
context_parts = [query] |
|
|
|
if file_path: |
|
file_text = extract_text_from_file(file_path) |
|
context_parts.append(f"\n\n--- Reference File Content ---\n{file_text[:8000]}") |
|
|
|
if website_url: |
|
web_text = extract_website_content(website_url) |
|
context_parts.append(f"\n\n--- Website Content for Redesign ---\n{web_text[:8000]}") |
|
|
|
full_query = "".join(context_parts) |
|
|
|
if enable_search and search_service.is_available(): |
|
search_results = search_service.search(full_query) |
|
full_query += f"\n\n--- Web Search Results ---\n{search_results}" |
|
|
|
if image_data is not None: |
|
return [ |
|
{"type": "text", "text": full_query}, |
|
{"type": "image_url", "image_url": {"url": process_image_for_model(image_data)}} |
|
] |
|
return full_query |
|
|
|
|
|
def generate_code( |
|
query: str, |
|
image_data: Optional[Any], |
|
file_path: Optional[str], |
|
website_url: Optional[str], |
|
history: History, |
|
model_config: Dict[str, str], |
|
enable_search: bool, |
|
language: str |
|
) -> Generator[Dict[str, Any], None, None]: |
|
""" |
|
Main generator function to handle a user request and stream responses. |
|
""" |
|
system_prompt, is_follow_up = _determine_system_prompt(language, enable_search, history) |
|
messages = history_to_messages(history, system_prompt) |
|
user_content = _prepare_user_content(query, image_data, file_path, website_url, enable_search) |
|
messages.append({'role': 'user', 'content': user_content}) |
|
|
|
content_stream = "" |
|
stream = llm_service.generate_code_stream(model_config['id'], messages) |
|
|
|
for chunk in stream: |
|
content_stream += chunk |
|
processed_code = "" |
|
|
|
if is_follow_up: |
|
last_html = history[-1][1] if history and history[-1][1] else "" |
|
|
|
modified_html = apply_search_replace_changes(last_html, content_stream) |
|
processed_code = modified_html |
|
else: |
|
processed_code = remove_code_block(content_stream) |
|
|
|
yield {"code_output": processed_code} |
|
|
|
|
|
final_content = content_stream |
|
if is_follow_up: |
|
last_html = history[-1][1] if history and history[-1][1] else "" |
|
|
|
final_code = apply_search_replace_changes(last_html, final_content) |
|
else: |
|
final_code = remove_code_block(final_content) |
|
|
|
|
|
history.append((query, final_code)) |
|
|
|
yield {"code_output": final_code, "history": history} |