mgbam commited on
Commit
afdc33e
Β·
verified Β·
1 Parent(s): 31f94f1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +53 -42
app.py CHANGED
@@ -3,15 +3,22 @@
3
  """
4
  Main application file for SHASHA AI, a Gradio-based AI code generation tool.
5
 
6
- This application provides a user interface for generating code in various languages
7
- using different AI models. It supports inputs from text prompts, files, images,
8
- and websites, and includes features like web search enhancement and live code previews.
9
  """
10
 
11
  import gradio as gr
12
  from typing import Optional, Dict, List, Tuple, Any
13
 
14
- from constants import SYSTEM_PROMPTS, AVAILABLE_MODELS, DEMO_LIST
 
 
 
 
 
 
 
15
  from hf_client import get_inference_client
16
  from tavily_search import enhance_query_with_search
17
  from utils import (
@@ -22,14 +29,15 @@ from utils import (
22
  history_to_chatbot_messages,
23
  remove_code_block,
24
  parse_transformers_js_output,
25
- format_transformers_js_output
26
  )
27
  from deploy import send_to_sandbox
28
 
 
29
  History = List[Tuple[str, str]]
30
  Model = Dict[str, Any]
31
 
32
- # Full list of supported languages for syntax highlighting & generation
33
  SUPPORTED_LANGUAGES = [
34
  "python", "c", "cpp", "markdown", "latex", "json", "html", "css",
35
  "javascript", "jinja2", "typescript", "yaml", "dockerfile", "shell",
@@ -38,13 +46,6 @@ SUPPORTED_LANGUAGES = [
38
  "sql-gpSQL", "sql-sparkSQL", "sql-esper"
39
  ]
40
 
41
- DEFAULT_SYSTEM_PROMPT = """
42
- You are a helpful AI coding assistant. Generate clean, correct, and efficient code based on the user's request.
43
- - Follow requirements precisely.
44
- - Enclose final code in a single ```code``` block of the target language.
45
- - Do not include any explanations outside the code block.
46
- """
47
-
48
  def get_model_details(name: str) -> Optional[Model]:
49
  for m in AVAILABLE_MODELS:
50
  if m["name"] == name:
@@ -63,32 +64,39 @@ def generation_code(
63
  query = query or ""
64
  history = history or []
65
  try:
66
- system_prompt = SYSTEM_PROMPTS.get(language, DEFAULT_SYSTEM_PROMPT)
67
- model_id = current_model["id"]
 
 
 
 
 
68
 
 
69
  # pick provider
70
- if model_id.startswith("openai/") or model_id in ("gpt-4", "gpt-3.5-turbo"):
71
  provider = "openai"
72
- elif model_id.startswith("gemini/"):
73
  provider = "gemini"
74
  elif model_id.startswith("fireworks-ai/"):
75
  provider = "fireworks-ai"
76
  else:
77
- provider = "huggingface"
78
 
79
  # assemble messages
80
  msgs = history_to_messages(history, system_prompt)
81
  ctx = query
82
  if file:
83
- txt = extract_text_from_file(file)
84
- ctx += f"\n\n[File]\n{txt[:5000]}"
85
  if website_url:
86
- txt = extract_website_content(website_url)
87
- if not txt.startswith("Error"):
88
- ctx += f"\n\n[Website]\n{txt[:8000]}"
89
  final_q = enhance_query_with_search(ctx, enable_search)
90
  msgs.append({"role": "user", "content": final_q})
91
 
 
92
  client = get_inference_client(model_id, provider)
93
  resp = client.chat.completions.create(
94
  model=model_id,
@@ -107,41 +115,44 @@ def generation_code(
107
  if language == "transformers.js":
108
  files = parse_transformers_js_output(content)
109
  code = format_transformers_js_output(files)
110
- preview = send_to_sandbox(files.get("index.html",""))
111
  else:
112
- clean = remove_code_block(content)
113
  if history and history[-1][1] and not history[-1][1].startswith("❌"):
114
- code = apply_search_replace_changes(history[-1][1], clean)
115
  else:
116
- code = clean
117
  preview = send_to_sandbox(code) if language == "html" else ""
118
 
119
  new_hist = history + [(query, code)]
120
  chat = history_to_chatbot_messages(new_hist)
121
  return code, new_hist, preview, chat
122
 
123
- # custom CSS
124
  CUSTOM_CSS = """
125
  body { font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif; }
126
- #main_title { text-align: center; font-size: 2.5rem; margin: 1.5rem 0 0.5rem; }
127
  #subtitle { text-align: center; color: #4a5568; margin-bottom: 2.5rem; }
128
- .gradio-container { background: #f7fafc; }
129
  #gen_btn { box-shadow: 0 4px 6px rgba(0,0,0,0.1); }
130
  """
131
 
132
  with gr.Blocks(theme=gr.themes.Soft(primary_hue="blue"), css=CUSTOM_CSS, title="Shasha AI") as demo:
133
  history_state = gr.State([])
134
- initial = AVAILABLE_MODELS[0]
135
- model_state = gr.State(initial)
136
 
137
  gr.Markdown("# πŸš€ Shasha AI", elem_id="main_title")
138
- gr.Markdown("Your personal AI partner for generating, modifying, and understanding code.", elem_id="subtitle")
139
 
140
  with gr.Row():
141
  with gr.Column(scale=1):
142
  gr.Markdown("### 1. Select Model")
143
- names = [m["name"] for m in AVAILABLE_MODELS]
144
- model_dd = gr.Dropdown(names, value=initial["name"], label="AI Model")
 
 
 
145
 
146
  gr.Markdown("### 2. Provide Context")
147
  with gr.Tabs():
@@ -157,28 +168,28 @@ with gr.Blocks(theme=gr.themes.Soft(primary_hue="blue"), css=CUSTOM_CSS, title="
157
  search_chk = gr.Checkbox(label="Enable Web Search")
158
 
159
  with gr.Row():
160
- clr = gr.Button("Clear Session", variant="secondary")
161
- gen = gr.Button("Generate Code", variant="primary", elem_id="gen_btn")
162
 
163
  with gr.Column(scale=2):
164
  with gr.Tabs():
165
  with gr.Tab("πŸ’» Code"):
166
- code_out = gr.Code(language=lambda: lang_dd.value, interactive=True)
167
  with gr.Tab("πŸ‘οΈ Live Preview"):
168
  preview_out = gr.HTML()
169
  with gr.Tab("πŸ“œ History"):
170
  chat_out = gr.Chatbot(type="messages")
171
 
172
- model_dd.change(lambda n: get_model_details(n) or initial, inputs=[model_dd], outputs=[model_state])
173
 
174
- gen.click(
175
  fn=generation_code,
176
  inputs=[prompt_in, file_in, url_in, model_state, search_chk, lang_dd, history_state],
177
  outputs=[code_out, history_state, preview_out, chat_out],
178
  )
179
 
180
- clr.click(
181
- fn=lambda: ("", None, "", [], "", "", []),
182
  outputs=[prompt_in, file_in, url_in, history_state, code_out, preview_out, chat_out],
183
  queue=False,
184
  )
 
3
  """
4
  Main application file for SHASHA AI, a Gradio-based AI code generation tool.
5
 
6
+ Provides a UI for generating code in many languages using various AI models.
7
+ Supports text prompts, file uploads, website scraping, optional web search,
8
+ and live previews of HTML output.
9
  """
10
 
11
  import gradio as gr
12
  from typing import Optional, Dict, List, Tuple, Any
13
 
14
+ # --- Local module imports ---
15
+ from constants import (
16
+ HTML_SYSTEM_PROMPT,
17
+ TRANSFORMERS_JS_SYSTEM_PROMPT,
18
+ GENERIC_SYSTEM_PROMPT,
19
+ AVAILABLE_MODELS,
20
+ DEMO_LIST,
21
+ )
22
  from hf_client import get_inference_client
23
  from tavily_search import enhance_query_with_search
24
  from utils import (
 
29
  history_to_chatbot_messages,
30
  remove_code_block,
31
  parse_transformers_js_output,
32
+ format_transformers_js_output,
33
  )
34
  from deploy import send_to_sandbox
35
 
36
+ # --- Type aliases ---
37
  History = List[Tuple[str, str]]
38
  Model = Dict[str, Any]
39
 
40
+ # --- Supported languages dropdown & syntax highlighting ---
41
  SUPPORTED_LANGUAGES = [
42
  "python", "c", "cpp", "markdown", "latex", "json", "html", "css",
43
  "javascript", "jinja2", "typescript", "yaml", "dockerfile", "shell",
 
46
  "sql-gpSQL", "sql-sparkSQL", "sql-esper"
47
  ]
48
 
 
 
 
 
 
 
 
49
  def get_model_details(name: str) -> Optional[Model]:
50
  for m in AVAILABLE_MODELS:
51
  if m["name"] == name:
 
64
  query = query or ""
65
  history = history or []
66
  try:
67
+ # choose system prompt
68
+ if language == "html":
69
+ system_prompt = HTML_SYSTEM_PROMPT
70
+ elif language == "transformers.js":
71
+ system_prompt = TRANSFORMERS_JS_SYSTEM_PROMPT
72
+ else:
73
+ system_prompt = GENERIC_SYSTEM_PROMPT.format(language=language)
74
 
75
+ model_id = current_model["id"]
76
  # pick provider
77
+ if model_id.startswith("openai/") or model_id in {"gpt-4","gpt-3.5-turbo"}:
78
  provider = "openai"
79
+ elif model_id.startswith("gemini/") or model_id.startswith("google/"):
80
  provider = "gemini"
81
  elif model_id.startswith("fireworks-ai/"):
82
  provider = "fireworks-ai"
83
  else:
84
+ provider = "auto"
85
 
86
  # assemble messages
87
  msgs = history_to_messages(history, system_prompt)
88
  ctx = query
89
  if file:
90
+ ftext = extract_text_from_file(file)
91
+ ctx += f"\n\n[Attached file]\n{ftext[:5000]}"
92
  if website_url:
93
+ wtext = extract_website_content(website_url)
94
+ if not wtext.startswith("Error"):
95
+ ctx += f"\n\n[Website content]\n{wtext[:8000]}"
96
  final_q = enhance_query_with_search(ctx, enable_search)
97
  msgs.append({"role": "user", "content": final_q})
98
 
99
+ # call model
100
  client = get_inference_client(model_id, provider)
101
  resp = client.chat.completions.create(
102
  model=model_id,
 
115
  if language == "transformers.js":
116
  files = parse_transformers_js_output(content)
117
  code = format_transformers_js_output(files)
118
+ preview = send_to_sandbox(files.get("index.html", ""))
119
  else:
120
+ cleaned = remove_code_block(content)
121
  if history and history[-1][1] and not history[-1][1].startswith("❌"):
122
+ code = apply_search_replace_changes(history[-1][1], cleaned)
123
  else:
124
+ code = cleaned
125
  preview = send_to_sandbox(code) if language == "html" else ""
126
 
127
  new_hist = history + [(query, code)]
128
  chat = history_to_chatbot_messages(new_hist)
129
  return code, new_hist, preview, chat
130
 
131
+ # --- Custom CSS ---
132
  CUSTOM_CSS = """
133
  body { font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, sans-serif; }
134
+ #main_title { text-align: center; font-size: 2.5rem; margin-top: 1.5rem; }
135
  #subtitle { text-align: center; color: #4a5568; margin-bottom: 2.5rem; }
136
+ .gradio-container { background-color: #f7fafc; }
137
  #gen_btn { box-shadow: 0 4px 6px rgba(0,0,0,0.1); }
138
  """
139
 
140
  with gr.Blocks(theme=gr.themes.Soft(primary_hue="blue"), css=CUSTOM_CSS, title="Shasha AI") as demo:
141
  history_state = gr.State([])
142
+ initial_model = AVAILABLE_MODELS[0]
143
+ model_state = gr.State(initial_model)
144
 
145
  gr.Markdown("# πŸš€ Shasha AI", elem_id="main_title")
146
+ gr.Markdown("Your AI partner for generating, modifying, and understanding code.", elem_id="subtitle")
147
 
148
  with gr.Row():
149
  with gr.Column(scale=1):
150
  gr.Markdown("### 1. Select Model")
151
+ model_dd = gr.Dropdown(
152
+ choices=[m["name"] for m in AVAILABLE_MODELS],
153
+ value=initial_model["name"],
154
+ label="AI Model"
155
+ )
156
 
157
  gr.Markdown("### 2. Provide Context")
158
  with gr.Tabs():
 
168
  search_chk = gr.Checkbox(label="Enable Web Search")
169
 
170
  with gr.Row():
171
+ clr_btn = gr.Button("Clear Session", variant="secondary")
172
+ gen_btn = gr.Button("Generate Code", variant="primary", elem_id="gen_btn")
173
 
174
  with gr.Column(scale=2):
175
  with gr.Tabs():
176
  with gr.Tab("πŸ’» Code"):
177
+ code_out = gr.Code(language=None, interactive=True)
178
  with gr.Tab("πŸ‘οΈ Live Preview"):
179
  preview_out = gr.HTML()
180
  with gr.Tab("πŸ“œ History"):
181
  chat_out = gr.Chatbot(type="messages")
182
 
183
+ model_dd.change(lambda n: get_model_details(n) or initial_model, inputs=[model_dd], outputs=[model_state])
184
 
185
+ gen_btn.click(
186
  fn=generation_code,
187
  inputs=[prompt_in, file_in, url_in, model_state, search_chk, lang_dd, history_state],
188
  outputs=[code_out, history_state, preview_out, chat_out],
189
  )
190
 
191
+ clr_btn.click(
192
+ lambda: ("", None, "", [], "", "", []),
193
  outputs=[prompt_in, file_in, url_in, history_state, code_out, preview_out, chat_out],
194
  queue=False,
195
  )