SUMANA SUMANAKUL (ING) commited on
Commit
474fb03
·
1 Parent(s): b49116e

fix requirements

Browse files
Files changed (1) hide show
  1. app.py +294 -70
app.py CHANGED
@@ -5,107 +5,331 @@ import gradio as gr
5
  import uuid
6
  from utils.chat import ChatLaborLaw
7
 
 
 
 
 
 
8
 
9
- # Function to initialize a new session and create chatbot instance for that session
10
- async def initialize_session():
11
- session_id = str(uuid.uuid4())[:8]
12
- chatbot = ChatLaborLaw()
13
- # chatbot = Chat("gemini-2.0-flash")
14
- history = []
15
- return "", session_id, chatbot, history
16
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
17
 
18
- # Function to handle user input and chatbot response
19
- async def chat_function(prompt, history, session_id, chatbot):
20
- if chatbot is None:
21
- return history, "", session_id, chatbot # Skip if chatbot not ready
22
-
23
- # Append the user's input to the message history
24
- history.append({"role": "user", "content": prompt})
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
25
 
26
- # Get the response from the chatbot
27
- response = await chatbot.chat(prompt) # ใช้ await ได้แล้ว
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
 
29
- # Append the assistant's response to the message history
30
- history.append({"role": "assistant", "content": response})
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
31
 
32
- return history, "", session_id, chatbot
 
 
 
 
 
 
 
33
 
 
 
 
 
34
 
35
- # Function to save feedback with chat history
36
- async def send_feedback(feedback, history, session_id, chatbot):
37
- os.makedirs("app/feedback", exist_ok=True)
38
- filename = f"app/feedback/feedback_{session_id}.txt"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
39
  with open(filename, "a", encoding="utf-8") as f:
40
- f.write("=== Feedback Received ===\n")
41
- f.write(f"Session ID: {session_id}\n")
42
- f.write(f"Feedback: {feedback}\n")
43
- f.write("Chat History:\n")
44
- for msg in history:
45
- f.write(f"{msg['role']}: {msg['content']}\n")
46
  f.write("\n--------------------------\n\n")
47
- return "" # Clear feedback input
 
48
 
 
 
 
49
 
50
- # Create the Gradio interface
51
  with gr.Blocks(theme=gr.themes.Soft(primary_hue="amber")) as demo:
52
  gr.Markdown("# สอบถามเรื่องกฎหมายแรงงาน")
53
 
54
- # Initialize State
55
- session_state = gr.State()
56
- chatbot_instance = gr.State()
57
- chatbot_history = gr.State([])
 
58
 
59
- # Chat UI
60
- chatbot_interface = gr.Chatbot(type="messages", label="Chat History")
61
- user_input = gr.Textbox(placeholder="Type your message here...", elem_id="user_input", lines=1)
62
-
63
- submit_button = gr.Button("Send")
64
- clear_button = gr.Button("Delete Chat History")
65
 
66
- # Submit actions
67
  submit_button.click(
68
- fn=chat_function,
69
- inputs=[user_input, chatbot_history, session_state, chatbot_instance],
70
- outputs=[chatbot_interface, user_input, session_state, chatbot_instance]
71
  )
72
-
73
  user_input.submit(
74
- fn=chat_function,
75
- inputs=[user_input, chatbot_history, session_state, chatbot_instance],
76
- outputs=[chatbot_interface, user_input, session_state, chatbot_instance]
77
  )
78
-
79
- # # Clear history
80
- # clear_button.click(lambda: [], outputs=chatbot_interface)
81
  clear_button.click(
82
  fn=initialize_session,
83
  inputs=[],
84
- outputs=[user_input, session_state, chatbot_instance, chatbot_history]
85
- ).then(
86
- fn=lambda: gr.update(value=[]),
87
- inputs=[],
88
- outputs=chatbot_interface
89
  )
90
-
91
-
92
- # Feedback section
93
- with gr.Row():
94
- feedback_input = gr.Textbox(placeholder="Send us feedback...", label="Feedback")
95
- send_feedback_button = gr.Button("Send Feedback")
96
-
97
  send_feedback_button.click(
98
  fn=send_feedback,
99
- inputs=[feedback_input, chatbot_history, session_state, chatbot_instance],
100
- outputs=[feedback_input]
 
101
  )
102
 
103
- # Initialize session on load
104
  demo.load(
105
  fn=initialize_session,
106
  inputs=[],
107
- outputs=[user_input, session_state, chatbot_instance, chatbot_history]
108
  )
109
 
110
- # Launch
111
- demo.launch(share=True)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  import uuid
6
  from utils.chat import ChatLaborLaw
7
 
8
+ # ==============================================================================
9
+ # 1. GLOBAL INITIALIZATION (ทำครั้งเดียวตอนแอปเริ่มทำงาน)
10
+ # ==============================================================================
11
+ # --- Langfuse Handler ---
12
+ LANGFUSE_HANDLER = CallbackHandler()
13
 
14
+ # --- LLM Initialization ---
15
+ # (ปรับแก้ส่วนนี้เพื่อเลือกว่าจะใช้โมเดลไหนเป็น default)
16
+ MODEL_NAME_LLM = "jai-chat-1-3-2"
17
+ TEMPERATURE = 0
 
 
 
18
 
19
+ if MODEL_NAME_LLM == "jai-chat-1-3-2":
20
+ LLM_MAIN = ChatOpenAI(
21
+ model=MODEL_NAME_LLM,
22
+ api_key=os.getenv("JAI_API_KEY"),
23
+ base_url=os.getenv("CHAT_BASE_URL"),
24
+ temperature=TEMPERATURE,
25
+ max_tokens=2048,
26
+ max_retries=2,
27
+ seed=13
28
+ )
29
+ elif MODEL_NAME_LLM == "gemini-2.0-flash":
30
+ LLM_MAIN = ChatGoogleGenerativeAI(
31
+ model="gemini-1.5-flash",
32
+ google_api_key=os.getenv("GOOGLE_API_KEY"),
33
+ temperature=TEMPERATURE,
34
+ max_output_tokens=2048,
35
+ convert_system_message_to_human=True,
36
+ )
37
+ else:
38
+ raise ValueError(f"Unsupported LLM model '{MODEL_NAME_LLM}'.")
39
 
40
+ # --- Database and Retriever Initialization ---
41
+ MONGO_CONNECTION_STR = os.getenv('MONGO_CONNECTION_STRING')
42
+ MONGO_DATABASE = os.getenv('MONGO_DATABASE')
43
+ MONGO_COLLECTION = os.getenv('MONGO_COLLECTION')
44
+
45
+ MONGO_CLIENT = MongoClient(MONGO_CONNECTION_STR)
46
+ DB = MONGO_CLIENT[MONGO_DATABASE]
47
+ MONGO_COLLECTION_INSTANCE = DB[MONGO_COLLECTION]
48
+ RETRIEVER = RerankRetriever()
49
+
50
+ print("Global objects initialized successfully.")
51
+
52
+ # ==============================================================================
53
+ # 2. HELPER FUNCTIONS (แปลงมาจากเมธอดใน Class)
54
+ # ==============================================================================
55
+
56
+ def format_main_context(list_of_documents):
57
+ formatted_docs = []
58
+ for i, doc in enumerate(list_of_documents):
59
+ metadata = doc.metadata
60
+ formatted = f"Doc{i}\n{metadata.get('law_name', '-')}\nมาตรา\t{metadata.get('section_number', '-')}\n{doc.page_content}\nประกาศ\t{metadata.get('publication_date', '-')}\nเริ่มใช้\t{metadata.get('effective_date', '-')}"
61
+ formatted_docs.append(formatted)
62
+ return "\n\n".join(formatted_docs)
63
 
64
+ def format_ref_context(list_of_docs):
65
+ formatted_ref_docs = []
66
+ for i, doc in enumerate(list_of_docs):
67
+ formatted = f"{doc.get('law_name', '-')}\nมาตรา\t{doc.get('section_number', '-')}\n{doc.get('text', '-')}"
68
+ formatted_ref_docs.append(formatted)
69
+ return "\n\n".join(formatted_ref_docs)
70
+
71
+ def get_main_context(user_query, **kwargs):
72
+ compression_retriever = RETRIEVER.get_compression_retriever(**kwargs)
73
+ return compression_retriever.invoke(user_query)
74
+
75
+ def get_ref_context(main_context_docs):
76
+ all_reference_ids = set()
77
+ for context in main_context_docs:
78
+ references_list = context.metadata.get('references', [])
79
+ if isinstance(references_list, list):
80
+ for ref_str in references_list:
81
+ all_reference_ids.add(ref_str.replace("มาตรา", "").strip())
82
 
83
+ if not all_reference_ids:
84
+ return []
85
+
86
+ mongo_query = {"law_type": "summary", "section_number": {"$in": list(all_reference_ids)}}
87
+ projection = {"text": 1, "law_name": 1, "section_number": 1}
88
+ return list(MONGO_COLLECTION_INSTANCE.find(mongo_query, projection))
89
+
90
+ # ==============================================================================
91
+ # 3. CORE LOGIC FUNCTIONS (RAG / Non-RAG)
92
+ # ==============================================================================
93
+
94
+ async def call_rag(user_input: str, langchain_history: list) -> str:
95
+ context_docs = get_main_context(user_input, law_type="summary")
96
+ main_context_str = format_main_context(context_docs)
97
+
98
+ ref_context_docs = get_ref_context(context_docs)
99
+ ref_context_str = format_ref_context(ref_context_docs) if ref_context_docs else "-"
100
+
101
+ rag_input_data = {
102
+ "question": user_input,
103
+ "main_context": main_context_str,
104
+ "ref_context": ref_context_str,
105
+ "history": langchain_history
106
+ }
107
 
108
+ try:
109
+ prompt_messages = RAG_CHAT_PROMPT.format_messages(**rag_input_data)
110
+ response = await LLM_MAIN.ainvoke(prompt_messages, config={"callbacks": [LANGFUSE_HANDLER]})
111
+ clean_response = re.sub(r"<[^>]+>|#+", "", response.content).strip()
112
+ return clean_response
113
+ except Exception as e:
114
+ print(f"Error during RAG LLM call: {e}")
115
+ return "ขออภัย ระบบขัดข้องขณะประมวลผลคำตอบ"
116
 
117
+ async def call_non_rag(user_input: str) -> str:
118
+ prompt_messages = NON_RAG_PROMPT.format(user_input=user_input)
119
+ response = await LLM_MAIN.ainvoke(prompt_messages, config={"callbacks": [LANGFUSE_HANDLER]})
120
+ return response.content.strip() if response and response.content else "ขออภัย ระบบไม่สามารถตอบคำถามได้ในขณะนี้"
121
 
122
+ # ==============================================================================
123
+ # 4. GRADIO EVENT HANDLERS
124
+ # ==============================================================================
125
+
126
+ def initialize_session():
127
+ """รีเซ็ต State ทั้งหมดสำหรับ Session ใหม่"""
128
+ session_id = str(uuid.uuid4())[:8]
129
+ return "", session_id, [], [] # user_input, session_id, ui_history, langchain_history
130
+
131
+ async def chat_orchestrator(prompt: str, ui_history: list, langchain_history: list):
132
+ """
133
+ ฟังก์ชันหลักที่จัดการการสนทนาทั้งหมด
134
+ """
135
+ if not prompt.strip():
136
+ return ui_history, langchain_history, ""
137
+
138
+ # 1. อัปเดต Langchain History ด้วยข้อความใหม่
139
+ langchain_history.append(HumanMessage(content=prompt))
140
+
141
+ # 2. จำแนกประเภทของ Input
142
+ try:
143
+ history_content_list = [msg.content for msg in langchain_history]
144
+ input_type = classify_input_type(prompt, history=history_content_list)
145
+ except Exception as e:
146
+ print(f"Error classifying input type: {e}. Defaulting to Non-RAG.")
147
+ input_type = "Non-RAG"
148
+
149
+ # 3. เรียกใช้ Flow ที่เหมาะสม
150
+ if input_type == "RAG":
151
+ ai_response = await call_rag(prompt, langchain_history)
152
+ else:
153
+ ai_response = await call_non_rag(prompt)
154
+
155
+ # 4. อัปเดต History ทั้งสองรูปแบบ
156
+ langchain_history.append(AIMessage(content=ai_response))
157
+ ui_history.append((prompt, ai_response))
158
+
159
+ # 5. ส่งค่ากลับไปอัปเดต UI และ State
160
+ return ui_history, langchain_history, "" # ui_history, langchain_history, user_input (ให้เป็นค่าว่าง)
161
+
162
+ def send_feedback(feedback: str, history: list, session_id: str):
163
+ """บันทึก Feedback"""
164
+ if not feedback.strip(): return ""
165
+ os.makedirs("feedback", exist_ok=True)
166
+ filename = f"feedback/feedback_{session_id}.txt"
167
  with open(filename, "a", encoding="utf-8") as f:
168
+ f.write(f"=== Feedback Received ===\nSession ID: {session_id}\nFeedback: {feedback}\nChat History:\n")
169
+ for user_msg, assistant_msg in history:
170
+ f.write(f"User: {user_msg}\nAssistant: {assistant_msg}\n")
 
 
 
171
  f.write("\n--------------------------\n\n")
172
+ gr.Info("ขอบคุณสำหรับข้อเสนอแนะ!")
173
+ return ""
174
 
175
+ # ==============================================================================
176
+ # 5. GRADIO UI DEFINITION
177
+ # ==============================================================================
178
 
 
179
  with gr.Blocks(theme=gr.themes.Soft(primary_hue="amber")) as demo:
180
  gr.Markdown("# สอบถามเรื่องกฎหมายแรงงาน")
181
 
182
+ # --- States ---
183
+ # session_id_state: เก็บ ID ของ session ปัจจุบัน
184
+ # langchain_history_state: เก็บประวัติการสนทนาในรูปแบบ Langchain Message (HumanMessage, AIMessage)
185
+ session_id_state = gr.State()
186
+ langchain_history_state = gr.State([])
187
 
188
+ # --- UI Components ---
189
+ chatbot_interface = gr.Chatbot(label="ประวัติการสนทนา", height=550, bubble_styling=False, show_copy_button=True)
190
+ user_input = gr.Textbox(placeholder="พิมพ์คำถามของคุณที่นี่...", label="คำถาม", lines=2)
191
+ with gr.Row():
192
+ submit_button = gr.Button("ส่ง", variant="primary", scale=4)
193
+ clear_button = gr.Button("เริ่มการสนทนาใหม่", scale=1)
194
 
195
+ # --- Event Wiring ---
196
  submit_button.click(
197
+ fn=chat_orchestrator,
198
+ inputs=[user_input, chatbot_interface, langchain_history_state],
199
+ outputs=[chatbot_interface, langchain_history_state, user_input]
200
  )
 
201
  user_input.submit(
202
+ fn=chat_orchestrator,
203
+ inputs=[user_input, chatbot_interface, langchain_history_state],
204
+ outputs=[chatbot_interface, langchain_history_state, user_input]
205
  )
 
 
 
206
  clear_button.click(
207
  fn=initialize_session,
208
  inputs=[],
209
+ outputs=[user_input, session_id_state, chatbot_interface, langchain_history_state],
210
+ queue=False
 
 
 
211
  )
212
+
213
+ with gr.Accordion("ส่งข้อเสนอแนะ (Feedback)", open=False):
214
+ feedback_input = gr.Textbox(placeholder="ความคิดเห็นของคุณมีความสำคัญต่อการพัฒนาของเรา...", label="Feedback", lines=2, scale=4)
215
+ send_feedback_button = gr.Button("ส่ง Feedback")
216
+
 
 
217
  send_feedback_button.click(
218
  fn=send_feedback,
219
+ inputs=[feedback_input, chatbot_interface, session_id_state],
220
+ outputs=[feedback_input],
221
+ queue=False
222
  )
223
 
 
224
  demo.load(
225
  fn=initialize_session,
226
  inputs=[],
227
+ outputs=[user_input, session_id_state, chatbot_interface, langchain_history_state]
228
  )
229
 
230
+ demo.queue().launch()
231
+
232
+
233
+ # # Function to initialize a new session and create chatbot instance for that session
234
+ # async def initialize_session():
235
+ # session_id = str(uuid.uuid4())[:8]
236
+ # chatbot = ChatLaborLaw()
237
+ # # chatbot = Chat("gemini-2.0-flash")
238
+ # history = []
239
+ # return "", session_id, chatbot, history
240
+
241
+
242
+ # # Function to handle user input and chatbot response
243
+ # async def chat_function(prompt, history, session_id, chatbot):
244
+ # if chatbot is None:
245
+ # return history, "", session_id, chatbot # Skip if chatbot not ready
246
+
247
+ # # Append the user's input to the message history
248
+ # history.append({"role": "user", "content": prompt})
249
+
250
+ # # Get the response from the chatbot
251
+ # response = await chatbot.chat(prompt) # ใช้ await ได้แล้ว
252
+
253
+ # # Append the assistant's response to the message history
254
+ # history.append({"role": "assistant", "content": response})
255
+
256
+ # return history, "", session_id, chatbot
257
+
258
+
259
+ # # Function to save feedback with chat history
260
+ # async def send_feedback(feedback, history, session_id, chatbot):
261
+ # os.makedirs("app/feedback", exist_ok=True)
262
+ # filename = f"app/feedback/feedback_{session_id}.txt"
263
+ # with open(filename, "a", encoding="utf-8") as f:
264
+ # f.write("=== Feedback Received ===\n")
265
+ # f.write(f"Session ID: {session_id}\n")
266
+ # f.write(f"Feedback: {feedback}\n")
267
+ # f.write("Chat History:\n")
268
+ # for msg in history:
269
+ # f.write(f"{msg['role']}: {msg['content']}\n")
270
+ # f.write("\n--------------------------\n\n")
271
+ # return "" # Clear feedback input
272
+
273
+
274
+ # # Create the Gradio interface
275
+ # with gr.Blocks(theme=gr.themes.Soft(primary_hue="amber")) as demo:
276
+ # gr.Markdown("# สอบถามเรื่องกฎหมายแรงงาน")
277
+
278
+ # # Initialize State
279
+ # session_state = gr.State()
280
+ # chatbot_instance = gr.State()
281
+ # chatbot_history = gr.State([])
282
+
283
+ # # Chat UI
284
+ # chatbot_interface = gr.Chatbot(type="messages", label="Chat History")
285
+ # user_input = gr.Textbox(placeholder="Type your message here...", elem_id="user_input", lines=1)
286
+
287
+ # submit_button = gr.Button("Send")
288
+ # clear_button = gr.Button("Delete Chat History")
289
+
290
+ # # Submit actions
291
+ # submit_button.click(
292
+ # fn=chat_function,
293
+ # inputs=[user_input, chatbot_history, session_state, chatbot_instance],
294
+ # outputs=[chatbot_interface, user_input, session_state, chatbot_instance]
295
+ # )
296
+
297
+ # user_input.submit(
298
+ # fn=chat_function,
299
+ # inputs=[user_input, chatbot_history, session_state, chatbot_instance],
300
+ # outputs=[chatbot_interface, user_input, session_state, chatbot_instance]
301
+ # )
302
+
303
+ # # # Clear history
304
+ # # clear_button.click(lambda: [], outputs=chatbot_interface)
305
+ # clear_button.click(
306
+ # fn=initialize_session,
307
+ # inputs=[],
308
+ # outputs=[user_input, session_state, chatbot_instance, chatbot_history]
309
+ # ).then(
310
+ # fn=lambda: gr.update(value=[]),
311
+ # inputs=[],
312
+ # outputs=chatbot_interface
313
+ # )
314
+
315
+
316
+ # # Feedback section
317
+ # with gr.Row():
318
+ # feedback_input = gr.Textbox(placeholder="Send us feedback...", label="Feedback")
319
+ # send_feedback_button = gr.Button("Send Feedback")
320
+
321
+ # send_feedback_button.click(
322
+ # fn=send_feedback,
323
+ # inputs=[feedback_input, chatbot_history, session_state, chatbot_instance],
324
+ # outputs=[feedback_input]
325
+ # )
326
+
327
+ # # Initialize session on load
328
+ # demo.load(
329
+ # fn=initialize_session,
330
+ # inputs=[],
331
+ # outputs=[user_input, session_state, chatbot_instance, chatbot_history]
332
+ # )
333
+
334
+ # # Launch
335
+ # demo.launch(share=True)