Ali2206 commited on
Commit
15df552
·
verified ·
1 Parent(s): e29c534

Update ui/ui_core.py

Browse files
Files changed (1) hide show
  1. ui/ui_core.py +21 -19
ui/ui_core.py CHANGED
@@ -108,10 +108,10 @@ def create_ui(agent: TxAgent):
108
  )
109
 
110
  try:
111
- # Show loading status
112
- yield gr.update(visible=True), history
113
 
114
- # Extract file text
115
  extracted_text = ""
116
  if uploaded_files and isinstance(uploaded_files, list):
117
  total_files = len(uploaded_files)
@@ -127,22 +127,20 @@ def create_ui(agent: TxAgent):
127
  else:
128
  extracted_text += f"(Uploaded file: {os.path.basename(path)})\n"
129
  except Exception as file_error:
130
- extracted_text += f"[Error processing file: {os.path.basename(path)}] {str(file_error)}\n"
131
- continue
132
 
133
- # Sanitize and chunk
134
  sanitized = sanitize_utf8(extracted_text.strip())
135
  chunks = chunk_text(sanitized, max_tokens=8192)
136
 
137
  all_responses = ""
138
-
139
  for i, chunk in enumerate(chunks):
140
  full_message = (
141
  f"{context}\n\n--- Uploaded File Chunk {i+1}/{len(chunks)} ---\n\n{chunk}\n\n--- End of Chunk ---\n\nNow begin your reasoning:"
142
  )
143
  generator = agent.run_gradio_chat(
144
  message=full_message,
145
- history=history,
146
  temperature=0.3,
147
  max_new_tokens=1024,
148
  max_token=8192,
@@ -153,20 +151,24 @@ def create_ui(agent: TxAgent):
153
  )
154
  for update in generator:
155
  if isinstance(update, str):
156
- all_responses += update # collect only final string output
157
 
158
  all_responses = sanitize_utf8(all_responses.strip())
159
 
160
- # Convert to gr.Chatbot message format
161
- history.append({"role": "user", "content": message})
162
- history.append({"role": "assistant", "content": all_responses})
163
- yield gr.update(visible=False), history
164
-
165
- except Exception as chat_error:
166
- print(f"Chat handling error: {chat_error}")
167
- history.append({"role": "user", "content": message})
168
- history.append({"role": "assistant", "content": "❌ An error occurred while processing your request. Please try again."})
169
- yield gr.update(visible=False), history
 
 
 
 
170
 
171
  inputs = [message_input, chatbot, conversation_state, file_upload]
172
  send_button.click(fn=handle_chat, inputs=inputs, outputs=[status_box, chatbot])
 
108
  )
109
 
110
  try:
111
+ # Show loading centered
112
+ yield [{"role": "assistant", "content": "⏳ Processing... Please wait while I analyze the files."}]
113
 
114
+ # Step 1: Extract files
115
  extracted_text = ""
116
  if uploaded_files and isinstance(uploaded_files, list):
117
  total_files = len(uploaded_files)
 
127
  else:
128
  extracted_text += f"(Uploaded file: {os.path.basename(path)})\n"
129
  except Exception as file_error:
130
+ extracted_text += f"[Error processing {os.path.basename(path)}]: {str(file_error)}\n"
 
131
 
132
+ # Step 2: Chunk and sanitize
133
  sanitized = sanitize_utf8(extracted_text.strip())
134
  chunks = chunk_text(sanitized, max_tokens=8192)
135
 
136
  all_responses = ""
 
137
  for i, chunk in enumerate(chunks):
138
  full_message = (
139
  f"{context}\n\n--- Uploaded File Chunk {i+1}/{len(chunks)} ---\n\n{chunk}\n\n--- End of Chunk ---\n\nNow begin your reasoning:"
140
  )
141
  generator = agent.run_gradio_chat(
142
  message=full_message,
143
+ history=[],
144
  temperature=0.3,
145
  max_new_tokens=1024,
146
  max_token=8192,
 
151
  )
152
  for update in generator:
153
  if isinstance(update, str):
154
+ all_responses += update
155
 
156
  all_responses = sanitize_utf8(all_responses.strip())
157
 
158
+ # Final messages
159
+ final_history = history + [
160
+ {"role": "user", "content": message},
161
+ {"role": "assistant", "content": all_responses}
162
+ ]
163
+ yield final_history
164
+
165
+ except Exception as e:
166
+ print(f"[TxAgent Error] {e}")
167
+ final_history = history + [
168
+ {"role": "user", "content": message},
169
+ {"role": "assistant", "content": "⚠️ An error occurred while processing the files."}
170
+ ]
171
+ yield final_history
172
 
173
  inputs = [message_input, chatbot, conversation_state, file_upload]
174
  send_button.click(fn=handle_chat, inputs=inputs, outputs=[status_box, chatbot])