Upload 6 files
Browse files- .gitattributes +1 -0
- .streamlit/config.toml +9 -0
- app.py +142 -0
- requirements.txt +2 -0
- static/ai.jpg +3 -0
- static/human.png +0 -0
- static/styles.css +91 -0
.gitattributes
CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
static/ai.jpg filter=lfs diff=lfs merge=lfs -text
|
.streamlit/config.toml
ADDED
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[server]
|
2 |
+
enableStaticServing = true
|
3 |
+
|
4 |
+
[theme]
|
5 |
+
primaryColor="#112C55"
|
6 |
+
backgroundColor="#ffffff"
|
7 |
+
secondaryBackgroundColor="#cacaca"
|
8 |
+
textColor="#16232e"
|
9 |
+
font="serif"
|
app.py
ADDED
@@ -0,0 +1,142 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import time
|
2 |
+
import requests
|
3 |
+
import streamlit as st
|
4 |
+
|
5 |
+
st.set_page_config(page_title="ViBidLawQA - Trợ lý AI hỗ trợ hỏi đáp luật Việt Nam", page_icon="./app/static/ai.jpg", layout="centered", initial_sidebar_state="expanded")
|
6 |
+
|
7 |
+
with open("./static/styles.css") as f:
|
8 |
+
st.markdown(f"<style>{f.read()}</style>", unsafe_allow_html=True)
|
9 |
+
|
10 |
+
if 'messages' not in st.session_state:
|
11 |
+
st.session_state.messages = []
|
12 |
+
|
13 |
+
st.markdown(f"""
|
14 |
+
<div class=logo_area>
|
15 |
+
<img src="./app/static/ai.jpg"/>
|
16 |
+
</div>
|
17 |
+
""", unsafe_allow_html=True)
|
18 |
+
st.markdown("<h2 style='text-align: center;'>ViBidLQA Bot</h2>", unsafe_allow_html=True)
|
19 |
+
|
20 |
+
url_api_extraction_model = st.sidebar.text_input(label="URL API Extraction model:")
|
21 |
+
url_api_generation_model = st.sidebar.text_input(label="URL API Generation model:")
|
22 |
+
|
23 |
+
answering_method = st.sidebar.selectbox(options=['Extraction', 'Generation'], label='Chọn mô hình trả lời câu hỏi:', index=0)
|
24 |
+
context = st.sidebar.text_area(label='Nội dung văn bản pháp luật Việt Nam:', placeholder='Vui lòng nhập nội dung văn bản pháp luật Việt Nam tại đây...', height=500)
|
25 |
+
|
26 |
+
if answering_method == 'Generation':
|
27 |
+
print('Switching to generative model...')
|
28 |
+
print('Loading generative model...')
|
29 |
+
|
30 |
+
if answering_method == 'Extraction':
|
31 |
+
print('Switching to extraction model...')
|
32 |
+
print('Loading extraction model...')
|
33 |
+
|
34 |
+
def get_abstractive_answer(context, question):
|
35 |
+
data = {
|
36 |
+
"context": context,
|
37 |
+
"question": question
|
38 |
+
}
|
39 |
+
|
40 |
+
response = requests.post(url_api_generation_model, json=data)
|
41 |
+
if response.status_code == 200:
|
42 |
+
result = response.json()
|
43 |
+
return result["answer"]
|
44 |
+
else:
|
45 |
+
return f"Lỗi: {response.status_code} - {response.text}"
|
46 |
+
|
47 |
+
def generate_text_effect(answer):
|
48 |
+
words = answer.split()
|
49 |
+
for i in range(len(words)):
|
50 |
+
time.sleep(0.03)
|
51 |
+
yield " ".join(words[:i+1])
|
52 |
+
|
53 |
+
def get_extractive_answer(context, question, stride=20, max_length=256, n_best=50, max_answer_length=512):
|
54 |
+
data = {
|
55 |
+
"context": context,
|
56 |
+
"question": question,
|
57 |
+
"stride": stride,
|
58 |
+
"max_length": max_length,
|
59 |
+
"n_best": n_best,
|
60 |
+
"max_answer_length": max_answer_length
|
61 |
+
}
|
62 |
+
|
63 |
+
response = requests.post(url_api_extraction_model, json=data)
|
64 |
+
|
65 |
+
if response.status_code == 200:
|
66 |
+
result = response.json()
|
67 |
+
return result["best_answer"]
|
68 |
+
else:
|
69 |
+
return f"Lỗi: {response.status_code} - {response.text}"
|
70 |
+
|
71 |
+
for message in st.session_state.messages:
|
72 |
+
if message['role'] == 'assistant':
|
73 |
+
avatar_class = "assistant-avatar"
|
74 |
+
message_class = "assistant-message"
|
75 |
+
avatar = './app/static/ai.jpg'
|
76 |
+
else:
|
77 |
+
avatar_class = "user-avatar"
|
78 |
+
message_class = "user-message"
|
79 |
+
avatar = './app/static/human.png'
|
80 |
+
st.markdown(f"""
|
81 |
+
<div class="{message_class}">
|
82 |
+
<img src="{avatar}" class="{avatar_class}" />
|
83 |
+
<div class="stMarkdown">{message['content']}</div>
|
84 |
+
</div>
|
85 |
+
""", unsafe_allow_html=True)
|
86 |
+
|
87 |
+
if prompt := st.chat_input(placeholder='Tôi có thể giúp được gì cho bạn?'):
|
88 |
+
st.markdown(f"""
|
89 |
+
<div class="user-message">
|
90 |
+
<img src="./app/static/human.png" class="user-avatar" />
|
91 |
+
<div class="stMarkdown">{prompt}</div>
|
92 |
+
</div>
|
93 |
+
""", unsafe_allow_html=True)
|
94 |
+
st.session_state.messages.append({'role': 'user', 'content': prompt})
|
95 |
+
|
96 |
+
message_placeholder = st.empty()
|
97 |
+
|
98 |
+
for _ in range(2):
|
99 |
+
for dots in ["●", "●●", "●●●"]:
|
100 |
+
time.sleep(0.2)
|
101 |
+
message_placeholder.markdown(f"""
|
102 |
+
<div class="assistant-message">
|
103 |
+
<img src="./app/static/ai.jpg" class="assistant-avatar" />
|
104 |
+
<div class="stMarkdown">{dots}</div>
|
105 |
+
</div>
|
106 |
+
""", unsafe_allow_html=True)
|
107 |
+
|
108 |
+
full_response = ""
|
109 |
+
if answering_method == 'Generation':
|
110 |
+
abs_answer = get_abstractive_answer(context=context, question=prompt)
|
111 |
+
for word in generate_text_effect(abs_answer):
|
112 |
+
full_response = word
|
113 |
+
|
114 |
+
message_placeholder.markdown(f"""
|
115 |
+
<div class="assistant-message">
|
116 |
+
<img src="./app/static/ai.jpg" class="assistant-avatar" />
|
117 |
+
<div class="stMarkdown">{full_response}●</div>
|
118 |
+
</div>
|
119 |
+
""", unsafe_allow_html=True)
|
120 |
+
|
121 |
+
else:
|
122 |
+
ext_answer = get_extractive_answer(context=context, question=prompt)
|
123 |
+
for word in generate_text_effect(ext_answer):
|
124 |
+
full_response = word
|
125 |
+
|
126 |
+
message_placeholder.markdown(f"""
|
127 |
+
<div class="assistant-message">
|
128 |
+
<img src="./app/static/ai.jpg" class="assistant-avatar" />
|
129 |
+
<div class="stMarkdown">{full_response}●</div>
|
130 |
+
</div>
|
131 |
+
""", unsafe_allow_html=True)
|
132 |
+
|
133 |
+
message_placeholder.markdown(f"""
|
134 |
+
<div class="assistant-message">
|
135 |
+
<img src="./app/static/ai.jpg" class="assistant-avatar" />
|
136 |
+
<div class="stMarkdown">
|
137 |
+
{full_response}
|
138 |
+
</div>
|
139 |
+
</div>
|
140 |
+
""", unsafe_allow_html=True)
|
141 |
+
|
142 |
+
st.session_state.messages.append({'role': 'assistant', 'content': full_response})
|
requirements.txt
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
requests==2.32.3
|
2 |
+
streamlit==1.40.1
|
static/ai.jpg
ADDED
![]() |
Git LFS Details
|
static/human.png
ADDED
![]() |
static/styles.css
ADDED
@@ -0,0 +1,91 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
.user-message {
|
2 |
+
display: flex;
|
3 |
+
justify-content: flex-end;
|
4 |
+
align-items: center;
|
5 |
+
margin: 10px 0;
|
6 |
+
color: white;
|
7 |
+
}
|
8 |
+
|
9 |
+
.user-message .stMarkdown {
|
10 |
+
background: linear-gradient(0.9turn, #d46c4e, #ca8d57);
|
11 |
+
/* background: linear-gradient(0.9turn, #112c55, #3578cf); */
|
12 |
+
border-radius: 15px;
|
13 |
+
padding: 10px;
|
14 |
+
max-width: 80%;
|
15 |
+
word-wrap: break-word;
|
16 |
+
}
|
17 |
+
|
18 |
+
.user-avatar {
|
19 |
+
order: 2;
|
20 |
+
margin-left: 10px;
|
21 |
+
width: 38px;
|
22 |
+
height: 38px;
|
23 |
+
border-radius: 50%;
|
24 |
+
align-self: flex-start;
|
25 |
+
}
|
26 |
+
|
27 |
+
.assistant-message {
|
28 |
+
display: flex;
|
29 |
+
align-items: center;
|
30 |
+
margin: 10px 0;
|
31 |
+
color: black;
|
32 |
+
}
|
33 |
+
|
34 |
+
.assistant-message .stMarkdown {
|
35 |
+
background-color: #dadada;
|
36 |
+
border-radius: 15px;
|
37 |
+
padding: 10px;
|
38 |
+
max-width: 80%;
|
39 |
+
word-wrap: break-word;
|
40 |
+
}
|
41 |
+
|
42 |
+
.assistant-options {
|
43 |
+
display: flex;
|
44 |
+
align-items: center;
|
45 |
+
gap: 12px;
|
46 |
+
margin-top: 8px;
|
47 |
+
color: #666;
|
48 |
+
}
|
49 |
+
|
50 |
+
.assistant-options i {
|
51 |
+
cursor: pointer;
|
52 |
+
}
|
53 |
+
|
54 |
+
.assistant-options i:hover {
|
55 |
+
color: #333;
|
56 |
+
}
|
57 |
+
|
58 |
+
.assistant-avatar {
|
59 |
+
margin-right: 10px;
|
60 |
+
width: 38px;
|
61 |
+
height: 38px;
|
62 |
+
border-radius: 50%;
|
63 |
+
align-self: flex-start;
|
64 |
+
}
|
65 |
+
|
66 |
+
.logo_area {
|
67 |
+
display: flex;
|
68 |
+
justify-content: center;
|
69 |
+
}
|
70 |
+
|
71 |
+
.logo_area img {
|
72 |
+
margin-top: 50px;
|
73 |
+
margin-left: 0px auto;
|
74 |
+
width: 149px;
|
75 |
+
}
|
76 |
+
|
77 |
+
/* [data-testid="baseButton-headerNoPadding"], */
|
78 |
+
[data-testid="stDecoration"],
|
79 |
+
/* [data-testid="baseButton-header"], */
|
80 |
+
[data-testid="stHeader"] {
|
81 |
+
display: none;
|
82 |
+
}
|
83 |
+
|
84 |
+
[data-testid="stChatInput"],
|
85 |
+
[data-testid="stChatInputSubmitButton"] {
|
86 |
+
border-radius: 15px;
|
87 |
+
}
|
88 |
+
|
89 |
+
[data-baseweb="textarea"] {
|
90 |
+
border-color: transparent;
|
91 |
+
}
|