Update app.py
Browse files
app.py
CHANGED
@@ -15,7 +15,6 @@ from typing import Optional, Dict, List, Tuple, Any
|
|
15 |
from constants import (
|
16 |
HTML_SYSTEM_PROMPT,
|
17 |
TRANSFORMERS_JS_SYSTEM_PROMPT,
|
18 |
-
GENERIC_SYSTEM_PROMPT,
|
19 |
AVAILABLE_MODELS,
|
20 |
DEMO_LIST,
|
21 |
)
|
@@ -37,7 +36,7 @@ from deploy import send_to_sandbox
|
|
37 |
History = List[Tuple[str, str]]
|
38 |
Model = Dict[str, Any]
|
39 |
|
40 |
-
# --- Supported languages dropdown
|
41 |
SUPPORTED_LANGUAGES = [
|
42 |
"python", "c", "cpp", "markdown", "latex", "json", "html", "css",
|
43 |
"javascript", "jinja2", "typescript", "yaml", "dockerfile", "shell",
|
@@ -64,17 +63,21 @@ def generation_code(
|
|
64 |
query = query or ""
|
65 |
history = history or []
|
66 |
try:
|
67 |
-
#
|
68 |
if language == "html":
|
69 |
system_prompt = HTML_SYSTEM_PROMPT
|
70 |
elif language == "transformers.js":
|
71 |
system_prompt = TRANSFORMERS_JS_SYSTEM_PROMPT
|
72 |
else:
|
73 |
-
|
|
|
|
|
|
|
|
|
74 |
|
75 |
model_id = current_model["id"]
|
76 |
-
#
|
77 |
-
if model_id.startswith("openai/") or model_id in {"gpt-4","gpt-3.5-turbo"}:
|
78 |
provider = "openai"
|
79 |
elif model_id.startswith("gemini/") or model_id.startswith("google/"):
|
80 |
provider = "gemini"
|
@@ -83,20 +86,20 @@ def generation_code(
|
|
83 |
else:
|
84 |
provider = "auto"
|
85 |
|
86 |
-
#
|
87 |
msgs = history_to_messages(history, system_prompt)
|
88 |
-
|
89 |
if file:
|
90 |
ftext = extract_text_from_file(file)
|
91 |
-
|
92 |
if website_url:
|
93 |
wtext = extract_website_content(website_url)
|
94 |
if not wtext.startswith("Error"):
|
95 |
-
|
96 |
-
final_q = enhance_query_with_search(
|
97 |
msgs.append({"role": "user", "content": final_q})
|
98 |
|
99 |
-
#
|
100 |
client = get_inference_client(model_id, provider)
|
101 |
resp = client.chat.completions.create(
|
102 |
model=model_id,
|
@@ -111,7 +114,7 @@ def generation_code(
|
|
111 |
history.append((query, err))
|
112 |
return "", history, "", history_to_chatbot_messages(history)
|
113 |
|
114 |
-
#
|
115 |
if language == "transformers.js":
|
116 |
files = parse_transformers_js_output(content)
|
117 |
code = format_transformers_js_output(files)
|
@@ -174,7 +177,7 @@ with gr.Blocks(theme=gr.themes.Soft(primary_hue="blue"), css=CUSTOM_CSS, title="
|
|
174 |
with gr.Column(scale=2):
|
175 |
with gr.Tabs():
|
176 |
with gr.Tab("π» Code"):
|
177 |
-
code_out = gr.Code(language=
|
178 |
with gr.Tab("ποΈ Live Preview"):
|
179 |
preview_out = gr.HTML()
|
180 |
with gr.Tab("π History"):
|
|
|
15 |
from constants import (
|
16 |
HTML_SYSTEM_PROMPT,
|
17 |
TRANSFORMERS_JS_SYSTEM_PROMPT,
|
|
|
18 |
AVAILABLE_MODELS,
|
19 |
DEMO_LIST,
|
20 |
)
|
|
|
36 |
History = List[Tuple[str, str]]
|
37 |
Model = Dict[str, Any]
|
38 |
|
39 |
+
# --- Supported languages for dropdown ---
|
40 |
SUPPORTED_LANGUAGES = [
|
41 |
"python", "c", "cpp", "markdown", "latex", "json", "html", "css",
|
42 |
"javascript", "jinja2", "typescript", "yaml", "dockerfile", "shell",
|
|
|
63 |
query = query or ""
|
64 |
history = history or []
|
65 |
try:
|
66 |
+
# Choose system prompt based on language
|
67 |
if language == "html":
|
68 |
system_prompt = HTML_SYSTEM_PROMPT
|
69 |
elif language == "transformers.js":
|
70 |
system_prompt = TRANSFORMERS_JS_SYSTEM_PROMPT
|
71 |
else:
|
72 |
+
# Generic fallback prompt
|
73 |
+
system_prompt = (
|
74 |
+
f"You are an expert {language} developer. "
|
75 |
+
f"Write clean, idiomatic {language} code based on the user's request."
|
76 |
+
)
|
77 |
|
78 |
model_id = current_model["id"]
|
79 |
+
# Determine provider
|
80 |
+
if model_id.startswith("openai/") or model_id in {"gpt-4", "gpt-3.5-turbo"}:
|
81 |
provider = "openai"
|
82 |
elif model_id.startswith("gemini/") or model_id.startswith("google/"):
|
83 |
provider = "gemini"
|
|
|
86 |
else:
|
87 |
provider = "auto"
|
88 |
|
89 |
+
# Build message history
|
90 |
msgs = history_to_messages(history, system_prompt)
|
91 |
+
context = query
|
92 |
if file:
|
93 |
ftext = extract_text_from_file(file)
|
94 |
+
context += f"\n\n[Attached file]\n{ftext[:5000]}"
|
95 |
if website_url:
|
96 |
wtext = extract_website_content(website_url)
|
97 |
if not wtext.startswith("Error"):
|
98 |
+
context += f"\n\n[Website content]\n{wtext[:8000]}"
|
99 |
+
final_q = enhance_query_with_search(context, enable_search)
|
100 |
msgs.append({"role": "user", "content": final_q})
|
101 |
|
102 |
+
# Call the model
|
103 |
client = get_inference_client(model_id, provider)
|
104 |
resp = client.chat.completions.create(
|
105 |
model=model_id,
|
|
|
114 |
history.append((query, err))
|
115 |
return "", history, "", history_to_chatbot_messages(history)
|
116 |
|
117 |
+
# Process model output
|
118 |
if language == "transformers.js":
|
119 |
files = parse_transformers_js_output(content)
|
120 |
code = format_transformers_js_output(files)
|
|
|
177 |
with gr.Column(scale=2):
|
178 |
with gr.Tabs():
|
179 |
with gr.Tab("π» Code"):
|
180 |
+
code_out = gr.Code(language="html", interactive=True)
|
181 |
with gr.Tab("ποΈ Live Preview"):
|
182 |
preview_out = gr.HTML()
|
183 |
with gr.Tab("π History"):
|