Update core.py
Browse files
core.py
CHANGED
@@ -1,4 +1,4 @@
|
|
1 |
-
# /core.py
|
2 |
|
3 |
"""
|
4 |
Core business logic for the code generation application.
|
@@ -12,13 +12,15 @@ from typing import Dict, List, Optional, Tuple, Generator, Any
|
|
12 |
from config import (
|
13 |
HTML_SYSTEM_PROMPT, GENERIC_SYSTEM_PROMPT,
|
14 |
HTML_SYSTEM_PROMPT_WITH_SEARCH, GENERIC_SYSTEM_PROMPT_WITH_SEARCH,
|
15 |
-
FOLLOW_UP_SYSTEM_PROMPT
|
16 |
)
|
17 |
from services import llm_service, search_service
|
18 |
from extractor import extract_text_from_file, extract_website_content
|
19 |
from utils import (
|
20 |
-
history_to_messages,
|
21 |
-
|
|
|
|
|
22 |
)
|
23 |
|
24 |
# --- Type Definitions ---
|
@@ -90,18 +92,13 @@ def generate_code(
|
|
90 |
|
91 |
for chunk in stream:
|
92 |
content_stream += chunk
|
93 |
-
|
|
|
94 |
if is_follow_up:
|
95 |
-
# For follow-ups, we apply changes incrementally
|
96 |
-
# This logic assumes the model sends complete change blocks.
|
97 |
last_html = history[-1][1] if history and history[-1][1] else ""
|
98 |
-
#
|
99 |
-
|
100 |
-
|
101 |
-
processed_code = modified_html
|
102 |
-
else:
|
103 |
-
# If not a full block, show the raw diff for user to see progress
|
104 |
-
processed_code = f"Applying Changes:\n\n{content_stream}"
|
105 |
else:
|
106 |
processed_code = remove_code_block(content_stream)
|
107 |
|
@@ -111,12 +108,12 @@ def generate_code(
|
|
111 |
final_content = content_stream
|
112 |
if is_follow_up:
|
113 |
last_html = history[-1][1] if history and history[-1][1] else ""
|
114 |
-
|
|
|
115 |
else:
|
116 |
final_code = remove_code_block(final_content)
|
117 |
|
118 |
-
#
|
119 |
-
# For user content, use the original query, not the enhanced one, for cleaner history display
|
120 |
history.append((query, final_code))
|
121 |
|
122 |
yield {"code_output": final_code, "history": history}
|
|
|
1 |
+
# /core.py (Corrected)
|
2 |
|
3 |
"""
|
4 |
Core business logic for the code generation application.
|
|
|
12 |
from config import (
|
13 |
HTML_SYSTEM_PROMPT, GENERIC_SYSTEM_PROMPT,
|
14 |
HTML_SYSTEM_PROMPT_WITH_SEARCH, GENERIC_SYSTEM_PROMPT_WITH_SEARCH,
|
15 |
+
FOLLOW_UP_SYSTEM_PROMPT
|
16 |
)
|
17 |
from services import llm_service, search_service
|
18 |
from extractor import extract_text_from_file, extract_website_content
|
19 |
from utils import (
|
20 |
+
history_to_messages,
|
21 |
+
remove_code_block,
|
22 |
+
process_image_for_model,
|
23 |
+
apply_search_replace_changes # <--- FIX: Corrected the function name here
|
24 |
)
|
25 |
|
26 |
# --- Type Definitions ---
|
|
|
92 |
|
93 |
for chunk in stream:
|
94 |
content_stream += chunk
|
95 |
+
processed_code = ""
|
96 |
+
|
97 |
if is_follow_up:
|
|
|
|
|
98 |
last_html = history[-1][1] if history and history[-1][1] else ""
|
99 |
+
# <--- FIX: Use the corrected function name here
|
100 |
+
modified_html = apply_search_replace_changes(last_html, content_stream)
|
101 |
+
processed_code = modified_html
|
|
|
|
|
|
|
|
|
102 |
else:
|
103 |
processed_code = remove_code_block(content_stream)
|
104 |
|
|
|
108 |
final_content = content_stream
|
109 |
if is_follow_up:
|
110 |
last_html = history[-1][1] if history and history[-1][1] else ""
|
111 |
+
# <--- FIX: And use the corrected function name here as well
|
112 |
+
final_code = apply_search_replace_changes(last_html, final_content)
|
113 |
else:
|
114 |
final_code = remove_code_block(final_content)
|
115 |
|
116 |
+
# Use the original query for history display, not the enhanced one.
|
|
|
117 |
history.append((query, final_code))
|
118 |
|
119 |
yield {"code_output": final_code, "history": history}
|