acecalisto3 commited on
Commit
2bb5759
·
verified ·
1 Parent(s): 307ce47

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +106 -28
app.py CHANGED
@@ -10,6 +10,7 @@ import gradio as gr
10
  from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
11
  from huggingface_hub import InferenceClient, cached_download, Repository, HfApi
12
  from IPython.display import display, HTML
 
13
 
14
  # --- Configuration ---
15
  VERBOSE = True
@@ -150,7 +151,7 @@ def preview(project_path: str = DEFAULT_PROJECT_PATH):
150
 
151
  def main():
152
  with gr.Blocks() as demo:
153
- gr.Markdown("## FragMixt: Your Hugging Face No-Code App Builder")
154
 
155
  # --- Model Selection ---
156
  with gr.Tab("Model"):
@@ -226,33 +227,110 @@ def main():
226
  def run_chat(purpose: str, message: str, agent_name: str, sys_prompt: str, temperature: float, max_new_tokens: int, top_p: float, repetition_penalty: float, history: List[Tuple[str, str]]) -> Tuple[List[Tuple[str, str]], List[Tuple[str, str]]]:
227
  if not current_model:
228
  return [(history, history), "Please load a model first."]
229
- response = generate_response(message, history, agent_name, sys_prompt, temperature, max_new_tokens, top_p, repetition_penalty)
230
- history.append((message, response))
231
- return history, history
232
-
233
- submit_button.click(run_chat, inputs=[purpose, message, agent_name, sys_prompt, temperature, max_new_tokens, top_p, repetition_penalty, history], outputs=[chatbot, history])
234
-
235
- # --- Project Management ---
236
- with gr.Tab("Project"):
237
- project_name = gr.Textbox(label="Project Name", placeholder="MyHuggingFaceApp")
238
- create_project_button = gr.Button("Create Hugging Face Project")
239
- project_output = gr.Textbox(label="Output", lines=5)
240
- file_content = gr.Code(label="File Content", language="python", lines=20)
241
- file_path = gr.Textbox(label="File Path (relative to project)", placeholder="src/main.py")
242
- read_button = gr.Button("Read File")
243
- write_button = gr.Button("Write to File")
244
- command_input = gr.Textbox(label="Terminal Command", placeholder="pip install -r requirements.txt")
245
- command_output = gr.Textbox(label="Command Output", lines=5)
246
- run_command_button = gr.Button("Run Command")
247
- preview_button = gr.Button("Preview Project")
248
-
249
- create_project_button.click(create_project, inputs=[project_name], outputs=project_output)
250
- read_button.click(read_file, inputs=file_path, outputs=file_content)
251
- write_button.click(write_file, inputs=[file_path, file_content], outputs=project_output)
252
- run_command_button.click(run_command, inputs=command_input, outputs=command_output)
253
- preview_button.click(preview, outputs=project_output)
254
-
255
- demo.launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
256
 
257
  if __name__ == "__main__":
258
  main()
 
10
  from transformers import AutoModelForCausalLM, AutoTokenizer, pipeline
11
  from huggingface_hub import InferenceClient, cached_download, Repository, HfApi
12
  from IPython.display import display, HTML
13
+ import streamlit.components.v1 as components
14
 
15
  # --- Configuration ---
16
  VERBOSE = True
 
151
 
152
  def main():
153
  with gr.Blocks() as demo:
154
+ gr.Markdown("## IDEvIII: Your Hugging Face No-Code App Builder")
155
 
156
  # --- Model Selection ---
157
  with gr.Tab("Model"):
 
227
  def run_chat(purpose: str, message: str, agent_name: str, sys_prompt: str, temperature: float, max_new_tokens: int, top_p: float, repetition_penalty: float, history: List[Tuple[str, str]]) -> Tuple[List[Tuple[str, str]], List[Tuple[str, str]]]:
228
  if not current_model:
229
  return [(history, history), "Please load a model first."]
230
+
231
+ def generate_response(message, history, agent_name, sys_prompt, temperature, max_new_tokens, top_p, repetition_penalty):
232
+ if not current_model:
233
+ return "Please load a model first."
234
+
235
+ conversation = [{"role": "system", "content": sys_prompt}]
236
+ for message, response in history:
237
+ conversation.append({"role": "user", "content": message})
238
+ conversation.append({"role": "assistant", "content": response})
239
+ conversation.append({"role": "user", "content": message})
240
+
241
+ response = current_model.generate(
242
+ conversation,
243
+ max_new_tokens=max_new_tokens,
244
+ temperature=temperature,
245
+ top_p=top_p,
246
+ repetition_penalty=repetition_penalty,
247
+ )
248
+
249
+ return response.text.strip()
250
+
251
+ def create_project(project_name):
252
+ try:
253
+ repo_name = get_full_repo_name(project_name, token=HfApi().token)
254
+ repo = HfFolder.create_repo(repo_name, exist_ok=True)
255
+ repo.save_data("README.md", f"# {project_name}")
256
+ return f"Created project '{project_name}' on Hugging Face Hub."
257
+ except Exception as e:
258
+ return f"Error creating project: {str(e)}"
259
+
260
+ def read_file(file_path):
261
+ if not os.path.exists(file_path):
262
+ return f"File '{file_path}' does not exist."
263
+
264
+ try:
265
+ with open(file_path, "r") as file:
266
+ content = file.read()
267
+ return content
268
+ except Exception as e:
269
+ return f"Error reading file '{file_path}': {str(e)}"
270
+
271
+ def write_file(file_path, file_content):
272
+ try:
273
+ with open(file_path, "w") as file:
274
+ file.write(file_content)
275
+ return f"Wrote to file '{file_path}' successfully."
276
+ except Exception as e:
277
+ return f"Error writing to file '{file_path}': {str(e)}"
278
+
279
+ def run_command(command):
280
+ try:
281
+ result = subprocess.run(command, shell=True, capture_output=True, text=True)
282
+ if result.returncode == 0:
283
+ return result.stdout
284
+ else:
285
+ return f"Command '{command}' failed with exit code {result.returncode}:\n{result.stderr}"
286
+ except Exception as e:
287
+ return f"Error running command '{command}': {str(e)}"
288
+
289
+
290
+ def preview():
291
+ # Get the current working directory
292
+ cwd = os.getcwd()
293
+
294
+ # Create a temporary directory for the preview
295
+ temp_dir = tempfile.mkdtemp()
296
+
297
+ try:
298
+ # Copy the project files to the temporary directory
299
+ shutil.copytree(cwd, temp_dir, ignore=shutil.ignore_patterns("__pycache__", "*.pyc"))
300
+
301
+ # Change to the temporary directory
302
+ os.chdir(temp_dir)
303
+
304
+ # Find the main Python file (e.g., app.py, main.py)
305
+ main_file = next((f for f in os.listdir(".") if f.endswith(".py")), None)
306
+
307
+ if main_file:
308
+ # Run the main Python file to generate the preview
309
+ subprocess.run(["streamlit", "run", main_file], check=True)
310
+
311
+ # Get the preview URL
312
+ preview_url = components.get_url(main_file)
313
+
314
+ # Change back to the original working directory
315
+ os.chdir(cwd)
316
+
317
+ # Return the preview URL
318
+ return preview_url
319
+ else:
320
+ return "No main Python file found in the project."
321
+ except Exception as e:
322
+ return f"Error generating preview: {str(e)}"
323
+ finally:
324
+ # Remove the temporary directory
325
+ shutil.rmtree(temp_dir)
326
+
327
+ # Customize the launch settings
328
+ server_name = "0.0.0.0" # Listen on all available network interfaces
329
+ server_port = 7860 # Choose an available port
330
+ share_gradio_link = True # Share a public URL for the app
331
+
332
+ # Launch the interface
333
+ demo.launch(server_name=server_name, server_port=server_port, share=share_gradio_link)
334
 
335
  if __name__ == "__main__":
336
  main()