File size: 25,926 Bytes
69bf965
 
 
3ec547a
 
 
bf1b8a5
462065b
 
bf1b8a5
b1760d3
 
462065b
c2d3e7b
84e3816
 
 
 
 
b1760d3
462065b
b5ab9c7
 
bcb76a7
462065b
9467d9b
 
 
 
 
 
462065b
 
 
 
 
 
 
 
 
9467d9b
 
 
 
 
 
 
 
 
462065b
 
 
 
 
 
 
 
 
9467d9b
 
 
 
 
 
 
462065b
9467d9b
 
462065b
9467d9b
 
 
462065b
9467d9b
 
 
462065b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
bcb76a7
b5ab9c7
 
27915b4
 
 
 
 
 
 
 
 
50e415a
 
69bf965
27915b4
69bf965
84e3816
69bf965
17fb98b
84e3816
c2d3e7b
 
84e3816
b1760d3
462065b
84e3816
 
 
 
 
27915b4
 
 
 
 
 
 
 
 
 
84e3816
 
 
 
 
69bf965
 
 
 
27915b4
9467d9b
3ec547a
9467d9b
3ec547a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
9467d9b
3ec547a
9467d9b
 
ee0fcb3
462065b
1a2a7cc
6d23111
 
 
 
 
 
 
 
 
 
462065b
6d23111
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
36b4705
6d23111
 
36b4705
6d23111
 
 
 
 
 
 
 
 
 
 
 
cb2ef4d
6d23111
462065b
6d23111
 
 
 
 
 
 
 
 
 
 
 
 
81dd094
462065b
6d23111
36b4705
6d23111
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
462065b
1a2a7cc
462065b
6d23111
 
 
5e756bc
174bdd3
5e756bc
 
 
 
 
 
 
eeb973e
 
 
 
 
 
 
 
 
 
 
5e756bc
 
 
 
eeb973e
5e756bc
 
 
 
 
 
 
 
 
462065b
174bdd3
27915b4
174bdd3
462065b
 
174bdd3
462065b
174bdd3
27915b4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
462065b
bf1b8a5
84e3816
 
bf1b8a5
 
 
9467d9b
84e3816
bf1b8a5
 
9467d9b
84e3816
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1a2a7cc
6fa1483
 
 
6e45461
9314e12
6e45461
c1e4b28
 
5eda752
6e45461
4e9cecb
84e3816
b6b92b6
4e9cecb
66d0014
4e9cecb
66d0014
27915b4
 
 
9314e12
27915b4
9314e12
 
 
134477c
 
 
 
 
 
 
 
27915b4
134477c
 
 
 
 
 
9314e12
134477c
9314e12
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
50e415a
 
 
 
7e11155
50e415a
134477c
 
50e415a
134477c
27915b4
134477c
 
 
 
 
 
 
 
 
4e9cecb
 
84e3816
27915b4
4e9cecb
 
 
 
6e45461
bb800d4
4e9cecb
4626d8e
3fc1194
 
 
 
 
 
 
 
 
 
 
 
84e3816
3fc1194
 
4626d8e
3fc1194
 
53e7e31
4e9cecb
 
53e7e31
4e9cecb
3fc1194
 
4626d8e
 
50e415a
 
 
 
3fc1194
 
 
4626d8e
 
53e7e31
4626d8e
50e415a
4626d8e
 
6a6cbe8
69bf965
462065b
bf1b8a5
 
 
462065b
 
bf1b8a5
9467d9b
462065b
bf1b8a5
 
0a00490
bf1b8a5
9467d9b
462065b
bf1b8a5
 
58c536d
2c2e24e
 
 
6e45461
 
c602184
84e3816
0a00490
 
 
 
462065b
ff62fc2
 
9ce6618
462065b
ff62fc2
 
9467d9b
0a00490
58c536d
9467d9b
84e3816
4e9cecb
 
84e3816
462065b
 
 
 
5eda752
462065b
84e3816
462065b
 
 
84e3816
5eda752
84e3816
 
 
462065b
 
 
 
 
5eda752
462065b
 
5eda752
 
 
462065b
 
 
 
 
 
0fb590d
462065b
 
 
 
826d1eb
69bf965
9467d9b
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
import streamlit as st
import json
import os
import requests
import json
import base64
from datetime import datetime, timedelta
import subprocess
from huggingface_hub import HfApi
from pathlib import Path
from calendar_rag import (
    create_default_config,
    AcademicCalendarRAG,
    PipelineConfig,
    ModelConfig,
    RetrieverConfig,
    CacheConfig,
    ProcessingConfig,
    LocalizationConfig
)
# Custom CSS for enhanced styling
def load_custom_css():
    st.markdown("""
            <style>
            /* General body styling */
            body {
                font-family: "Arial", sans-serif !important;
                color: #000000 !important;
                background-color: white !important;
                line-height: 1.7 !important;
            }
            
            /* Main container styling */
            .main {
                padding: 2rem;
                color: #000000;
                background-color: white;
            }
            
            /* Headers styling */
            h1 {
                color: #000000;
                font-size: 2.8rem !important;
                font-weight: 700 !important;
                margin-bottom: 1.5rem !important;
                text-align: center;
                padding: 1rem 0;
                border-bottom: 3px solid #1E3A8A;
            }
            
            h3, h4 {
                color: #000000;
                font-weight: 600 !important;
                font-size: 1.6rem !important;
                margin-top: 1.5rem !important;
            }
            
            /* Chat message styling */
            .chat-message {
                padding: 1.5rem;
                border-radius: 10px;
                margin: 1rem 0;
                box-shadow: 0 2px 4px rgba(0, 0, 0, 0.1);
                font-size: 1.1rem !important;
                line-height: 1.6 !important;
                font-family: "Arial", sans-serif !important;
                color: #000000 !important;
            }
            
            .user-message {
                background-color: #F3F4F6 !important;
            }
            
            .assistant-message {
                background-color: #EFF6FF !important;
            }
            
            /* Status indicators */
            .status-indicator {
                padding: 0.5rem 1rem;
                border-radius: 6px;
                font-weight: 500;
                font-size: 1.2rem;
                color: #000000;
            }
            
            .status-online {
                background-color: #DEF7EC;
                color: #03543F;
            }
            
            .status-offline {
                background-color: #FDE8E8;
                color: rgb(255, 255, 255);
            }
            </style>
    """, unsafe_allow_html=True)

def clear_conversation_context():
    """Clear conversation context but keep chat display history"""
    # Clear the RAG pipeline's conversation history
    if 'pipeline' in st.session_state and st.session_state.pipeline:
        st.session_state.pipeline.conversation_history = []
    
    # Clear the context memory
    st.session_state.context_memory = []
    
    # Note: We keep st.session_state.chat_history for UI display purposes
    
def initialize_pipeline():
    """Initialize RAG pipeline with conversation memory support"""
    try:
        # Get API key from environment or secrets
        openai_api_key = os.getenv('OPENAI_API_KEY') or st.secrets['OPENAI_API_KEY']
        
        # Create config with same settings as main()
        config = create_default_config(openai_api_key)
        
        # Create pipeline
        pipeline = AcademicCalendarRAG(config)
        
        # Load raw data instead of calendar.json
        try:
            with open("calendar.json", "r", encoding="utf-8") as f:
                raw_data = json.load(f)
            pipeline.load_data(raw_data)
            
            # Initialize conversation history from session state if available
            if 'context_memory' in st.session_state and st.session_state.context_memory:
                # Convert context memory to conversation history format
                conversation_history = []
                for item in st.session_state.context_memory:
                    conversation_history.append({"role": "user", "content": item["query"]})
                    conversation_history.append({"role": "assistant", "content": item["response"]})
                pipeline.conversation_history = conversation_history
                
            return pipeline
        except FileNotFoundError:
            st.error("calendar.json not found. Please ensure the file exists in the same directory.")
            return None
            
    except Exception as e:
        st.error(f"Error initializing pipeline: {str(e)}")
        return None


def load_qa_history():
    """Load QA history directly from GitHub repository"""
    try:
        import requests
        import base64
        import json

        # GitHub API configuration
        REPO_OWNER = "jirasaksaimekJijo"
        REPO_NAME = "swu-chat-bot-project"
        FILE_PATH = "qa_history.json"
        GITHUB_TOKEN = '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'

        # Set up GitHub API request
        api_url = f"https://api.github.com/repos/{REPO_OWNER}/{REPO_NAME}/contents/{FILE_PATH}"
        headers = {"Accept": "application/vnd.github.v3+json"}
        
        if GITHUB_TOKEN:
            headers["Authorization"] = f"token {GITHUB_TOKEN}"

        # Make the request to GitHub API
        response = requests.get(api_url, headers=headers)

        if response.status_code == 200:
            # Decode the content from base64
            content_data = response.json()
            file_content = base64.b64decode(content_data["content"]).decode("utf-8")
            # Parse JSON
            history_data = json.loads(file_content)
            return history_data
        else:
            st.warning(f"Failed to fetch QA history: {response.status_code} - {response.reason}")
            # Return empty list if file doesn't exist or can't be accessed
            return []
            
    except Exception as e:
        st.error(f"Error loading QA history from GitHub: {str(e)}")
        return []

def save_qa_history(history_entry):
    """Save QA history entry to local JSON file and push to GitHub"""
    try:
        import requests
        import base64
        import json
        from pathlib import Path
        
        # GitHub API configuration
        REPO_OWNER = "jirasaksaimekJijo"
        REPO_NAME = "swu-chat-bot-project"
        FILE_PATH = "qa_history.json"
        GITHUB_TOKEN = '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'
        
        # First, load existing data from GitHub
        api_url = f"https://api.github.com/repos/{REPO_OWNER}/{REPO_NAME}/contents/{FILE_PATH}"
        headers = {
            "Accept": "application/vnd.github.v3+json",
            "Authorization": f"token {GITHUB_TOKEN}"
        }
        
        # Try to get existing file first
        response = requests.get(api_url, headers=headers)
        
        # Initialize empty history data
        history_data = []
        sha = None
        
        if response.status_code == 200:
            # File exists, get its content and SHA
            content_data = response.json()
            sha = content_data["sha"]
            
            try:
                # Decode and parse existing content
                file_content = base64.b64decode(content_data["content"]).decode("utf-8")
                
                if file_content.strip():  # Make sure content is not empty
                    history_data = json.loads(file_content)
                    
                    # Ensure history_data is a list
                    if not isinstance(history_data, list):
                        st.warning("Existing history data is not a list. Initializing new list.")
                        history_data = []
            except Exception as e:
                st.warning(f"Error parsing existing history: {e}. Initializing new list.")
        elif response.status_code == 404:
            # File doesn't exist yet
            st.info("Creating new QA history file.")
        else:
            st.error(f"Failed to check existing history: {response.status_code} - {response.reason}")
        
        # Process history entry before appending
        if isinstance(history_entry, dict) and all(key in history_entry for key in ["timestamp", "query", "answer"]):
            # Process answer if it's a dict
            if isinstance(history_entry["answer"], dict):
                history_entry["answer"] = history_entry["answer"].get('answer', str(history_entry["answer"]))
            # Process answer if it's a Document-like object
            elif hasattr(history_entry["answer"], 'content'):
                history_entry["answer"] = history_entry["answer"].content
            # Convert to string for any other type
            else:
                history_entry["answer"] = str(history_entry["answer"])
        
        # Append new entry to history data
        history_data.append(history_entry)
        
        # Also save locally for backup
        try:
            local_path = Path("qa_history.json")
            with open(local_path, "w", encoding="utf-8") as f:
                json.dump(history_data, f, ensure_ascii=False, indent=2)
        except Exception as local_err:
            st.warning(f"Failed to save local backup: {local_err}")
        
        # Prepare content for GitHub
        updated_content = json.dumps(history_data, ensure_ascii=False, indent=2)
        encoded_content = base64.b64encode(updated_content.encode('utf-8')).decode('utf-8')
        
        # Prepare the update/create payload
        data = {
            "message": "Update QA history",
            "content": encoded_content,
        }
        
        if sha:  # If file exists, include its SHA
            data["sha"] = sha
        
        # Update or create the file
        update_response = requests.put(api_url, headers=headers, json=data)
        
        if update_response.status_code in [200, 201]:
            return True
        else:
            st.error(f"Failed to update QA history: {update_response.status_code} - {update_response.text}")
            return False
        
    except Exception as e:
        import traceback
        st.error(f"Error in save_qa_history: {str(e)}")
        st.error(f"Traceback: {traceback.format_exc()}")
        return False
        
def add_to_qa_history(query: str, answer: str):
    """Add new QA pair to history with validation"""
    try:
        # Validate inputs
        if not query or not answer:
            st.warning("Empty query or answer detected, skipping history update")
            return None
            
        # Handle different answer types
        if isinstance(answer, dict):
            # If answer is a dict with 'answer' key, extract it
            processed_answer = answer.get('answer', str(answer))
        elif hasattr(answer, 'content'):
            # If answer is a Document-like object with content attribute
            processed_answer = answer.content
        else:
            # Convert answer to string for any other type
            processed_answer = str(answer)
            
        # Create history entry with proper timestamp
        history_entry = {
            "timestamp": (datetime.now() + timedelta(hours=5)).strftime("%Y-%m-%dT%H:%M:%S"),
            "query": query,
            "answer": processed_answer
        }
        
        # Save entry
        save_qa_history(history_entry)
        return history_entry
        
    except Exception as e:
        st.error(f"Error in add_to_qa_history: {str(e)}")
        return None
        
def add_to_history(role: str, message: str):
    """Add message to chat history, save if it's a complete QA pair, and update context memory"""
    st.session_state.chat_history.append((role, message))
    
    # If this is an assistant response, save the QA pair
    if role == "assistant" and len(st.session_state.chat_history) >= 2:
        # Get the corresponding user query (previous message)
        user_query = st.session_state.chat_history[-2][1]
        
        # Process and save the QA pair
        history_entry = add_to_qa_history(user_query, message)
        
        # Also update context memory if needed
        if 'context_memory' not in st.session_state:
            st.session_state.context_memory = []
        
        # Format response content for context memory
        if isinstance(message, dict) and "answer" in message:
            response_content = message["answer"]
        else:
            response_content = message
            
        st.session_state.context_memory.append({
            "query": user_query,
            "response": response_content,
            "timestamp": (datetime.now() + timedelta(hours=5)).strftime("%Y-%m-%dT%H:%M:%S")
        })
        
        # Limit context memory size to prevent performance issues
        if len(st.session_state.context_memory) > 10:  # Keep last 10 exchanges
            st.session_state.context_memory = st.session_state.context_memory[-10:]
    
def display_chat_history():
    """Display chat history with improved document display"""
    for role, content in st.session_state.chat_history:
        if role == "user":
            st.markdown(f"""
                <div class="chat-message user-message">
                    <strong>🧑 คำถาม:</strong><br>
                    {content}
                </div>
            """, unsafe_allow_html=True)
        else:
            if isinstance(content, dict):
                assistant_response = content.get('answer', '❌ ไม่มีข้อมูลคำตอบ')
                st.markdown(f"""
                    <div class="chat-message assistant-message">
                        <strong>🤖 คำตอบ:</strong><br>
                        {assistant_response}
                    </div>
                """, unsafe_allow_html=True)

                # Show reference documents like in main()
                if content.get('documents'):
                    with st.expander("📚 ข้อมูลอ้างอิง", expanded=False):
                        for i, doc in enumerate(content['documents'], 1):
                            st.markdown(f"""
                                <div style="padding: 1rem; background-color: #000000; border-radius: 8px; margin: 0.5rem 0;">
                                    <strong>เอกสารที่ {i}:</strong><br>
                                    {doc.content}
                                </div>
                            """, unsafe_allow_html=True)
            else:
                st.markdown(f"""
                    <div class="chat-message assistant-message">
                        <strong>🤖 คำตอบ:</strong><br>
                        {content}
                    </div>
                """, unsafe_allow_html=True)

if 'context_memory' not in st.session_state:
    st.session_state.context_memory = []

def handle_submit(user_query: str):
    """Enhanced query handling with improved conversation history tracking"""
    if not user_query:
        st.warning("⚠️ กรุณาระบุคำถาม")
        return

    user_query = user_query.strip()
    
    # Prevent duplicate submissions
    if not st.session_state.chat_history or st.session_state.chat_history[-1][1] != user_query:
        try:
            st.session_state.processing_query = True
            
            # Add user message to chat history
            add_to_history("user", user_query)
            
            # Convert the Streamlit chat history format to RAG format
            # Include more context history (up to 5 previous exchanges)
            rag_conversation_history = []
            history_to_include = st.session_state.chat_history[-11:] if len(st.session_state.chat_history) > 10 else st.session_state.chat_history
            
            for role, content in history_to_include:
                # Map Streamlit's role names to the format used in the RAG system
                rag_role = "user" if role == "user" else "assistant"
                
                # Handle content based on type
                if isinstance(content, dict) and "answer" in content:
                    rag_content = content["answer"]
                else:
                    rag_content = content
                    
                rag_conversation_history.append({"role": rag_role, "content": rag_content})
            
            # Store conversation context in session state
            if 'context_memory' not in st.session_state:
                st.session_state.context_memory = []
            
            # Process query with improved conversation history
            with st.spinner("🔍 กำลังค้นหาคำตอบ..."):
                # Add debug logging to verify context
                print(f"Processing query with {len(rag_conversation_history)} context messages")
                
                # Add special handling for reference questions
                reference_keywords = ["ก่อนหน้านี้", "ก่อนหน้า", "ที่ผ่านมา", "คำถามก่อนหน้า", "คำถามที่แล้ว", 
                                     "previous", "earlier", "before", "last time", "last question"]
                
                is_reference_question = any(keyword in user_query.lower() for keyword in reference_keywords)
                
                # If this is a reference question, emphasize context in the query
                if is_reference_question and len(rag_conversation_history) >= 3:
                    # Extract the previous user question (should be 2 positions back)
                    previous_questions = [msg["content"] for msg in rag_conversation_history[:-2] 
                                          if msg["role"] == "user"]
                    
                    if previous_questions:
                        prev_question = previous_questions[-1]
                        enhanced_query = f"คำถามนี้อ้างอิงถึงคำถามก่อนหน้า '{prev_question}' โปรดพิจารณาบริบทนี้ในการตอบ: {user_query}"
                        print(f"Enhanced reference query: {enhanced_query}")
                        user_query = enhanced_query
                
                result = st.session_state.pipeline.process_query(
                    query=user_query, 
                    conversation_history=rag_conversation_history
                )
                
                # Create response with same structure as main()
                response_dict = {
                    "answer": result.get("answer", ""),
                    "documents": result.get("relevant_docs", [])
                }
                
                # Update chat history and context
                add_to_history("assistant", response_dict)
                
                # Add this exchange to context memory for future reference
                st.session_state.context_memory.append({
                    "query": user_query,
                    "response": response_dict["answer"],
                    "timestamp": datetime.now().isoformat()
                })
            
        except Exception as e:
            error_msg = f"❌ เกิดข้อผิดพลาด: {str(e)}"
            add_to_history("assistant", error_msg)
            st.error(f"Query processing error: {e}")
        
        finally:
            st.session_state.processing_query = False
            st.rerun()

def create_chat_input():
    """Create chat input with enhanced configuration and combined clear button"""
    with st.form(key="chat_form", clear_on_submit=True):
        st.markdown("""
            <label for="query_input" style="font-size: 1.2rem; font-weight: 600; margin-bottom: 1rem; display: block;">
                <span style="color: #ffffff; border-left: 4px solid #ffffff; padding-left: 0.8rem;">
                    โปรดระบุคำถามเกี่ยวกับปฏิทินการศึกษา:
                </span>
            </label>
        """, unsafe_allow_html=True)
        
        query = st.text_input(
            "",
            key="query_input",
            placeholder="เช่น: วิชาเลือกมีอะไรบ้าง?"
        )
        
        col1, col2 = st.columns([5, 5])
        
        with col1:
            submitted = st.form_submit_button(
                "📤 ส่งคำถาม",
                type="primary",
                use_container_width=True
            )
        
        with col2:
            clear_all_button = st.form_submit_button(
                "🗑️ ล้างประวัติและบริบทสนทนา",
                type="secondary",
                use_container_width=True
            )
            
        if submitted:
            handle_submit(query)
        
        if clear_all_button:
            # Clear chat history
            st.session_state.chat_history = []
            # Clear conversation context
            clear_conversation_context()
            st.info("ล้างประวัติและบริบทสนทนาแล้ว")
            st.rerun()

def main():
    # Page config
    st.set_page_config(
        page_title="Academic Calendar Assistant",
        page_icon="📅",
        layout="wide",
        initial_sidebar_state="collapsed"
    )

    # Load custom CSS
    load_custom_css()

    # Initialize session states
    if 'pipeline' not in st.session_state:
        st.session_state.pipeline = None
    
    if 'chat_history' not in st.session_state:
        st.session_state.chat_history = []
        
    if 'context_memory' not in st.session_state:
        st.session_state.context_memory = []
        
    if 'processing_query' not in st.session_state:
        st.session_state.processing_query = False

    # Initialize pipeline with enhanced configuration
    if st.session_state.pipeline is None:
        with st.spinner("กำลังเริ่มต้นระบบ..."):
            st.session_state.pipeline = initialize_pipeline()

    # Header
    st.markdown("""
        <div style="text-align: center; padding: 2rem 0;">
            <h1>🎓 ผู้ช่วยค้นหาข้อมูลหลักสูตรและปฏิทินการศึกษา</h1>
            <p style="font-size: 1.2rem; color: #666;">บัณฑิตวิทยาลัย มหาวิทยาลัยศรีนครินทรวิโรฒ</p>
        </div>
    """, unsafe_allow_html=True)

    chat_col, info_col = st.columns([7, 3])
    
    with chat_col:
        display_chat_history()
        create_chat_input()

    # Info column
    with info_col:
        st.markdown("""
            <div style="background-color: #F9FAFB; padding: 1.5rem; border-radius: 12px; margin-bottom: 2rem;">
                <h3 style="color: #1E3A8A;">ℹ️ เกี่ยวกับระบบ</h3>
                <p style="color: #000000;">
                    ระบบนี้ใช้เทคโนโลยี <strong>RAG (Retrieval-Augmented Generation)</strong> 
                    ในการค้นหาและตอบคำถามเกี่ยวกับหลักสูตรและปฏิทินการศึกษา
                </p>
                <h4 style="color: #1E3A8A; margin-top: 1rem;">สามารถสอบถามข้อมูลเกี่ยวกับ:</h4>
                <ul style="list-style-type: none; padding-left: 0;">
                    <li style="color: #000000; margin-bottom: 0.5rem;">📚 รายวิชาในหลักสูตร</li>
                    <li style="color: #000000; margin-bottom: 0.5rem;">📝 การลงทะเบียนเรียน</li>
                    <li style="color: #000000; margin-bottom: 0.5rem;">📅 กำหนดการต่างๆ</li>
                    <li style="color: #000000; margin-bottom: 0.5rem;">💰 ค่าธรรมเนียมการศึกษา</li>
                    <li style="color: #000000; margin-bottom: 0.5rem;">📋 ขั้นตอนการสมัคร</li>
                </ul>
            </div>
        """, unsafe_allow_html=True)
        
        st.markdown("""
            <div style="background-color: #f9fafb; padding: 1.5rem; border-radius: 12px;">
                <h3 style="color: #1E3A8A;">🔄 สถานะระบบ</h3>
                <div style="margin-top: 1rem;">
                    <p><strong style="color: #000000;">⏰ เวลาปัจจุบัน:</strong><br>
                    <span style="color: #000000;">{}</span></p>
                    <p><strong style="color: #000000;">📡 สถานะระบบ:</strong><br>
                    <span class="status-indicator {}">
                        {} {}
                    </span></p>
                </div>
            </div>
        """.format(
            (datetime.now() + timedelta(hours=5)).strftime('%Y-%m-%d %H:%M:%S'),
            "status-online" if st.session_state.pipeline else "status-offline",
            "🟢" if st.session_state.pipeline else "🔴",
            "พร้อมใช้งาน" if st.session_state.pipeline else "ไม่พร้อมใช้งาน"
        ), unsafe_allow_html=True)

if __name__ == "__main__":
    main()