Soumen commited on
Commit
87566f8
·
1 Parent(s): 89e83ce

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +46 -49
app.py CHANGED
@@ -91,45 +91,9 @@ def main():
91
  import streamlit as st
92
  if "photo" not in st.session_state:
93
  st.session_state["photo"]="not done"
94
- a, b = st.columns([1, 1])
95
  def change_photo_state():
96
  st.session_state["photo"]="done"
97
  with st.container():
98
- with a:
99
- #import torch
100
- from streamlit_option_menu import option_menu
101
- from streamlit_chat import message as st_message
102
- from transformers import BlenderbotTokenizer
103
- from transformers import BlenderbotForConditionalGeneration
104
- st.title("Simple Chatbot for fun!")
105
-
106
- @st.experimental_singleton
107
- def get_models():
108
- # it may be necessary for other frameworks to cache the model
109
- # seems pytorch keeps an internal state of the conversation
110
- model_name = "facebook/blenderbot-400M-distill"
111
- tokenizer = BlenderbotTokenizer.from_pretrained(model_name)
112
- model = BlenderbotForConditionalGeneration.from_pretrained(model_name)
113
- return tokenizer, model
114
- if "history" not in st.session_state:
115
- st.session_state.history = []
116
- st.title("Hello Chatbot")
117
- def main():
118
- st.text_input("Talk to the bot", key="input_text", on_change=generate_answer)
119
- def generate_answer():
120
- tokenizer, model = get_models()
121
- user_message = st.session_state.input_text
122
- inputs = tokenizer(st.session_state.input_text, return_tensors="pt")
123
- result = model.generate(**inputs)
124
- message_bot = tokenizer.decode(
125
- result[0], skip_special_tokens=True
126
- ) # .replace("<s>", "").replace("</s>", "")
127
- st.session_state.history.append({"message": user_message, "is_user": True})
128
- st.session_state.history.append({"message": message_bot, "is_user": False})
129
- from copyreg import clear_extension_cache
130
- for chat in st.session_state.history:
131
- st_message(**chat)
132
- with b:
133
  c2, c3 = st.columns([1,1])
134
  message = st.text_input("Type your text here!")
135
  camera_photo = c2.camera_input("Capture a photo to summarize: ", on_change=change_photo_state)
@@ -195,19 +159,52 @@ def main():
195
  if c8.button("Bangla"):
196
  bansum(text)
197
  if c9.button("English"):
198
- engsum(text)
199
- # if st.button("English Text Generation"):
200
- # def query(payload):
201
- # response = requests.post(API_URL2, headers=headers2, json=payload)
202
- # return response.json()
203
-
204
- # out = query({
205
- # "inputs": text,
206
- # })
207
- # if isinstance(out, list) and out[0].get("generated_text"):
208
- # text_output = out[0]["generated_text"]
209
- # st.success(text_output)
210
- # #text=text_output
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
211
 
212
 
213
  if __name__ == "__main__":
 
91
  import streamlit as st
92
  if "photo" not in st.session_state:
93
  st.session_state["photo"]="not done"
 
94
  def change_photo_state():
95
  st.session_state["photo"]="done"
96
  with st.container():
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
97
  c2, c3 = st.columns([1,1])
98
  message = st.text_input("Type your text here!")
99
  camera_photo = c2.camera_input("Capture a photo to summarize: ", on_change=change_photo_state)
 
159
  if c8.button("Bangla"):
160
  bansum(text)
161
  if c9.button("English"):
162
+ engsum(text)
163
+
164
+ with st.container():
165
+ from streamlit_chat import message as st_message
166
+ from transformers import BlenderbotTokenizer
167
+ from transformers import BlenderbotForConditionalGeneration
168
+ st.title("Simple Chatbot for fun!")
169
+
170
+ @st.experimental_singleton
171
+ def get_models():
172
+ # it may be necessary for other frameworks to cache the model
173
+ # seems pytorch keeps an internal state of the conversation
174
+ model_name = "facebook/blenderbot-400M-distill"
175
+ tokenizer = BlenderbotTokenizer.from_pretrained(model_name)
176
+ model = BlenderbotForConditionalGeneration.from_pretrained(model_name)
177
+ return tokenizer, model
178
+ if "history" not in st.session_state:
179
+ st.session_state.history = []
180
+ st.title("Hello Chatbot")
181
+ def main():
182
+ st.text_input("Talk to the bot", key="input_text", on_change=generate_answer)
183
+ def generate_answer():
184
+ tokenizer, model = get_models()
185
+ user_message = st.session_state.input_text
186
+ inputs = tokenizer(st.session_state.input_text, return_tensors="pt")
187
+ result = model.generate(**inputs)
188
+ message_bot = tokenizer.decode(
189
+ result[0], skip_special_tokens=True
190
+ ) # .replace("<s>", "").replace("</s>", "")
191
+ st.session_state.history.append({"message": user_message, "is_user": True})
192
+ st.session_state.history.append({"message": message_bot, "is_user": False})
193
+ from copyreg import clear_extension_cache
194
+ for chat in st.session_state.history:
195
+ st_message(**chat)
196
+ # if st.button("English Text Generation"):
197
+ # def query(payload):
198
+ # response = requests.post(API_URL2, headers=headers2, json=payload)
199
+ # return response.json()
200
+
201
+ # out = query({
202
+ # "inputs": text,
203
+ # })
204
+ # if isinstance(out, list) and out[0].get("generated_text"):
205
+ # text_output = out[0]["generated_text"]
206
+ # st.success(text_output)
207
+ # #text=text_output
208
 
209
 
210
  if __name__ == "__main__":