mgbam commited on
Commit
6e5998c
Β·
verified Β·
1 Parent(s): a8a6b95

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +30 -35
app.py CHANGED
@@ -21,6 +21,7 @@ from typing import Optional, Dict, List, Tuple, Any
21
 
22
  # --- Local Module Imports ---
23
  # These modules contain the application's configuration, clients, and utility functions.
 
24
  from constants import SYSTEM_PROMPTS, AVAILABLE_MODELS, DEMO_LIST
25
  from hf_client import get_inference_client
26
  from tavily_search import enhance_query_with_search
@@ -36,10 +37,16 @@ from utils import (
36
  )
37
  from deploy import send_to_sandbox, load_project_from_url
38
 
39
- # --- Type Aliases for Readability ---
40
  History = List[Tuple[str, str]]
41
  Model = Dict[str, Any]
42
-
 
 
 
 
 
 
43
 
44
  # ==============================================================================
45
  # HELPER FUNCTIONS
@@ -52,7 +59,6 @@ def get_model_details(model_name: str) -> Optional[Model]:
52
  return model
53
  return None
54
 
55
-
56
  # ==============================================================================
57
  # CORE APPLICATION LOGIC
58
  # ==============================================================================
@@ -69,20 +75,6 @@ def generation_code(
69
  ) -> Tuple[str, History, str, List[Dict[str, str]]]:
70
  """
71
  The main function to handle a user's code generation request.
72
-
73
- Args:
74
- query: The user's text prompt.
75
- file: An uploaded file for context.
76
- website_url: A URL to scrape for context.
77
- current_model: The dictionary of the currently selected AI model.
78
- enable_search: Flag to enable web search for query enhancement.
79
- language: The target programming language.
80
- history: The existing conversation history.
81
- hf_token: The logged-in user's Hugging Face token for billing.
82
-
83
- Returns:
84
- A tuple containing the generated code, updated history, preview HTML,
85
- and formatted chatbot messages.
86
  """
87
  # 1. --- Initialization and Input Sanitization ---
88
  query = query or ""
@@ -90,9 +82,19 @@ def generation_code(
90
 
91
  try:
92
  # 2. --- System Prompt and Model Selection ---
93
- system_prompt = SYSTEM_PROMPTS.get(language, SYSTEM_PROMPTS["default"])
94
  model_id = current_model["id"]
95
- provider = current_model["provider"]
 
 
 
 
 
 
 
 
 
 
96
 
97
  # 3. --- Assemble Full Context for the AI ---
98
  messages = history_to_messages(history, system_prompt)
@@ -115,13 +117,12 @@ def generation_code(
115
  resp = client.chat.completions.create(
116
  model=model_id,
117
  messages=messages,
118
- max_tokens=16384, # Increased token limit for complex code
119
- temperature=0.1 # Low temperature for more predictable, stable code
120
  )
121
  content = resp.choices[0].message.content
122
 
123
  except Exception as e:
124
- # If the API call fails, show a user-friendly error in the chat.
125
  error_message = f"❌ **An error occurred:**\n\n```\n{str(e)}\n```\n\nPlease check your API keys, model selection, or try again."
126
  history.append((query, error_message))
127
  return "", history, "", history_to_chatbot_messages(history)
@@ -133,8 +134,8 @@ def generation_code(
133
  preview_html = send_to_sandbox(files.get('index.html', ''))
134
  else:
135
  clean_code = remove_code_block(content)
136
- if history and history[-1][1] not in (None, ""):
137
- # Apply search/replace if a previous turn exists
138
  code_str = apply_search_replace_changes(history[-1][1], clean_code)
139
  else:
140
  code_str = clean_code
@@ -153,10 +154,7 @@ def generation_code(
153
 
154
  with gr.Blocks(theme=gr.themes.Soft(), title="AnyCoder - AI Code Generator") as demo:
155
  # --- State Management ---
156
- # Using gr.State to hold non-visible data like conversation history
157
- # and the full dictionary of the selected model.
158
  history_state = gr.State([])
159
- # Initialize with the first model from our constants list
160
  initial_model = AVAILABLE_MODELS[0]
161
  model_state = gr.State(initial_model)
162
 
@@ -181,7 +179,7 @@ with gr.Blocks(theme=gr.themes.Soft(), title="AnyCoder - AI Code Generator") as
181
  )
182
 
183
  with gr.Accordion("πŸ› οΈ Inputs & Context", open=True):
184
- prompt_in = gr.Textbox(label="Prompt", lines=3, placeholder="e.g., 'Create a dark-themed login form with a spinning loader.'")
185
  file_in = gr.File(label="πŸ“Ž Attach File (Optional)", type="filepath")
186
  url_site = gr.Textbox(label="🌐 Scrape Website (Optional)", placeholder="https://example.com")
187
 
@@ -211,19 +209,16 @@ with gr.Blocks(theme=gr.themes.Soft(), title="AnyCoder - AI Code Generator") as
211
  # EVENT WIRING
212
  # ==============================================================================
213
 
214
- # Update the model_state when the user selects a new model from the dropdown.
215
  def on_model_change(model_name: str) -> Dict:
 
216
  model_details = get_model_details(model_name)
217
  return model_details or initial_model
218
  model_dd.change(fn=on_model_change, inputs=[model_dd], outputs=[model_state])
219
 
220
- # Update the syntax highlighting when the language changes.
221
  language_dd.change(fn=lambda lang: gr.Code(language=lang), inputs=[language_dd], outputs=[code_out])
222
 
223
- # The main event listener for the "Generate" button.
224
  gen_btn.click(
225
  fn=generation_code,
226
- # Note: `hf_token` is passed automatically by Gradio and is not listed here.
227
  inputs=[
228
  prompt_in, file_in, url_site,
229
  model_state, search_chk, language_dd, history_state
@@ -231,12 +226,13 @@ gen_btn.click(
231
  outputs=[code_out, history_state, preview_out, chat_out]
232
  )
233
 
234
- # Clear button functionality to reset the interface.
235
  def clear_session():
 
236
  return "", [], "", [], None, ""
237
  clr_btn.click(
238
  fn=clear_session,
239
- outputs=[prompt_in, history_state, preview_out, chat_out, file_in, url_site]
 
240
  )
241
 
242
 
@@ -245,5 +241,4 @@ clr_btn.click(
245
  # ==============================================================================
246
 
247
  if __name__ == '__main__':
248
- # Launch the Gradio app with queuing enabled for handling multiple users.
249
  demo.queue().launch()
 
21
 
22
  # --- Local Module Imports ---
23
  # These modules contain the application's configuration, clients, and utility functions.
24
+ # Note: These files (hf_client.py, etc.) must exist in the same directory.
25
  from constants import SYSTEM_PROMPTS, AVAILABLE_MODELS, DEMO_LIST
26
  from hf_client import get_inference_client
27
  from tavily_search import enhance_query_with_search
 
37
  )
38
  from deploy import send_to_sandbox, load_project_from_url
39
 
40
+ # --- Type Aliases and Constants ---
41
  History = List[Tuple[str, str]]
42
  Model = Dict[str, Any]
43
+ DEFAULT_SYSTEM_PROMPT = """
44
+ You are a helpful AI coding assistant. Your primary goal is to generate clean, correct, and efficient code based on the user's request.
45
+ - Follow the user's requirements precisely.
46
+ - If the user asks for a specific language, provide the code in that language.
47
+ - Enclose the final code in a single markdown code block (e.g., ```html ... ```).
48
+ - Do not include any conversational text, apologies, or explanations outside of the code block in your final response.
49
+ """
50
 
51
  # ==============================================================================
52
  # HELPER FUNCTIONS
 
59
  return model
60
  return None
61
 
 
62
  # ==============================================================================
63
  # CORE APPLICATION LOGIC
64
  # ==============================================================================
 
75
  ) -> Tuple[str, History, str, List[Dict[str, str]]]:
76
  """
77
  The main function to handle a user's code generation request.
 
 
 
 
 
 
 
 
 
 
 
 
 
 
78
  """
79
  # 1. --- Initialization and Input Sanitization ---
80
  query = query or ""
 
82
 
83
  try:
84
  # 2. --- System Prompt and Model Selection ---
85
+ system_prompt = SYSTEM_PROMPTS.get(language, DEFAULT_SYSTEM_PROMPT)
86
  model_id = current_model["id"]
87
+
88
+ # Robustly determine the provider based on ID, falling back to a default
89
+ if model_id.startswith("openai/"):
90
+ provider = "openai"
91
+ elif model_id.startswith("gemini/"):
92
+ provider = "gemini"
93
+ elif model_id.startswith("fireworks-ai/"):
94
+ provider = "fireworks-ai"
95
+ else:
96
+ # Assume other models are served via standard Hugging Face TGI
97
+ provider = "huggingface"
98
 
99
  # 3. --- Assemble Full Context for the AI ---
100
  messages = history_to_messages(history, system_prompt)
 
117
  resp = client.chat.completions.create(
118
  model=model_id,
119
  messages=messages,
120
+ max_tokens=16384,
121
+ temperature=0.1
122
  )
123
  content = resp.choices[0].message.content
124
 
125
  except Exception as e:
 
126
  error_message = f"❌ **An error occurred:**\n\n```\n{str(e)}\n```\n\nPlease check your API keys, model selection, or try again."
127
  history.append((query, error_message))
128
  return "", history, "", history_to_chatbot_messages(history)
 
134
  preview_html = send_to_sandbox(files.get('index.html', ''))
135
  else:
136
  clean_code = remove_code_block(content)
137
+ # Apply search/replace if a previous turn exists and contains valid code
138
+ if history and history[-1][1] and not history[-1][1].startswith("❌"):
139
  code_str = apply_search_replace_changes(history[-1][1], clean_code)
140
  else:
141
  code_str = clean_code
 
154
 
155
  with gr.Blocks(theme=gr.themes.Soft(), title="AnyCoder - AI Code Generator") as demo:
156
  # --- State Management ---
 
 
157
  history_state = gr.State([])
 
158
  initial_model = AVAILABLE_MODELS[0]
159
  model_state = gr.State(initial_model)
160
 
 
179
  )
180
 
181
  with gr.Accordion("πŸ› οΈ Inputs & Context", open=True):
182
+ prompt_in = gr.Textbox(label="Prompt", lines=3, placeholder="e.g., 'Create a dark-themed login form.'")
183
  file_in = gr.File(label="πŸ“Ž Attach File (Optional)", type="filepath")
184
  url_site = gr.Textbox(label="🌐 Scrape Website (Optional)", placeholder="https://example.com")
185
 
 
209
  # EVENT WIRING
210
  # ==============================================================================
211
 
 
212
  def on_model_change(model_name: str) -> Dict:
213
+ """Updates the model_state when the user selects a new model."""
214
  model_details = get_model_details(model_name)
215
  return model_details or initial_model
216
  model_dd.change(fn=on_model_change, inputs=[model_dd], outputs=[model_state])
217
 
 
218
  language_dd.change(fn=lambda lang: gr.Code(language=lang), inputs=[language_dd], outputs=[code_out])
219
 
 
220
  gen_btn.click(
221
  fn=generation_code,
 
222
  inputs=[
223
  prompt_in, file_in, url_site,
224
  model_state, search_chk, language_dd, history_state
 
226
  outputs=[code_out, history_state, preview_out, chat_out]
227
  )
228
 
 
229
  def clear_session():
230
+ """Resets the UI components and state to their initial values."""
231
  return "", [], "", [], None, ""
232
  clr_btn.click(
233
  fn=clear_session,
234
+ outputs=[prompt_in, history_state, preview_out, chat_out, file_in, url_site],
235
+ queue=False
236
  )
237
 
238
 
 
241
  # ==============================================================================
242
 
243
  if __name__ == '__main__':
 
244
  demo.queue().launch()