Update app.py
Browse files
app.py
CHANGED
@@ -2,23 +2,13 @@ import os
|
|
2 |
import streamlit as st
|
3 |
import chatbot as demo_chat
|
4 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
5 |
-
from langchain.schema import (
|
6 |
-
HumanMessage,
|
7 |
-
SystemMessage,
|
8 |
-
)
|
9 |
-
from langchain_community.chat_models.huggingface import ChatHuggingFace
|
10 |
-
from transformers import pipeline
|
11 |
|
12 |
st.title("Hi, I am Chatbot Philio :mermaid:")
|
13 |
st.write("I am your hotel booking assistant for today.")
|
14 |
|
15 |
tokenizer, model = demo_chat.load_model()
|
16 |
|
17 |
-
model_identifier = "KvrParaskevi/Hotel-Assistant-
|
18 |
-
task = "text-generation" # Change this to your model's task
|
19 |
-
|
20 |
-
# Load the model using the pipeline
|
21 |
-
model_pipeline = pipeline(task, model=model,tokenizer=tokenizer)
|
22 |
|
23 |
scrollable_div_style = """
|
24 |
<style>
|
@@ -39,6 +29,13 @@ def render_chat_history(chat_history):
|
|
39 |
with st.chat_message(message["role"]):
|
40 |
st.markdown(message["content"])
|
41 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
42 |
#Application
|
43 |
#Langchain memory in session cache
|
44 |
if 'memory' not in st.session_state:
|
@@ -58,12 +55,10 @@ if 'chat_history' not in st.session_state:
|
|
58 |
if 'model' not in st.session_state:
|
59 |
st.session_state.model = model
|
60 |
|
61 |
-
st.markdown('<div class="scrollable-div">', unsafe_allow_html=True)
|
62 |
render_chat_history(st.session_state.chat_history)
|
63 |
|
64 |
-
#
|
65 |
-
#input_text = st.chat_input(placeholder="Here you can chat with our hotel booking model.")
|
66 |
-
|
67 |
if input_text := st.chat_input(placeholder="Here you can chat with our hotel booking model."):
|
68 |
|
69 |
with st.chat_message("user"):
|
@@ -71,12 +66,7 @@ if input_text := st.chat_input(placeholder="Here you can chat with our hotel boo
|
|
71 |
st.session_state.chat_history.append({"role" : "user", "content" : input_text}) #append message to chat history
|
72 |
|
73 |
with st.spinner("Generating response..."):
|
74 |
-
|
75 |
-
#first_answer = chat_response.split("Human")[0] #Because of Predict it prints the whole conversation.Here we seperate the first answer only.
|
76 |
-
tokenized_chat = tokenizer.apply_chat_template(st.session_state.chat_history, tokenize=True, add_generation_prompt=True, return_tensors="pt")
|
77 |
-
#st.write(tokenizer.decode(tokenized_chat[0]))
|
78 |
-
outputs = model.generate(tokenized_chat, max_new_tokens=128)
|
79 |
-
first_answer = tokenizer.decode(outputs[0][tokenized_chat.shape[1]:],skip_special_tokens=True)
|
80 |
|
81 |
with st.chat_message("assistant"):
|
82 |
st.markdown(first_answer)
|
|
|
2 |
import streamlit as st
|
3 |
import chatbot as demo_chat
|
4 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
|
|
|
|
|
|
|
|
|
|
|
|
5 |
|
6 |
st.title("Hi, I am Chatbot Philio :mermaid:")
|
7 |
st.write("I am your hotel booking assistant for today.")
|
8 |
|
9 |
tokenizer, model = demo_chat.load_model()
|
10 |
|
11 |
+
model_identifier = "KvrParaskevi/Hotel-Assistant-Attempt5-Llama-2-7b"
|
|
|
|
|
|
|
|
|
12 |
|
13 |
scrollable_div_style = """
|
14 |
<style>
|
|
|
29 |
with st.chat_message(message["role"]):
|
30 |
st.markdown(message["content"])
|
31 |
|
32 |
+
def generate_response(chat_history):
|
33 |
+
tokenized_chat = tokenizer.apply_chat_template(chat_history, tokenize=True, add_generation_prompt=True, return_tensors="pt")
|
34 |
+
outputs = model.generate(tokenized_chat, do_sample =True, max_new_tokens=50, temperature = 0.5, top_p = 0.8)
|
35 |
+
answer = tokenizer.decode(outputs[0][tokenized_chat.shape[1]:],skip_special_tokens=True)
|
36 |
+
final_answer = answer.split("<")[0]
|
37 |
+
return final_answer
|
38 |
+
|
39 |
#Application
|
40 |
#Langchain memory in session cache
|
41 |
if 'memory' not in st.session_state:
|
|
|
55 |
if 'model' not in st.session_state:
|
56 |
st.session_state.model = model
|
57 |
|
58 |
+
st.markdown('<div class="scrollable-div">', unsafe_allow_html=True) #add css style to container
|
59 |
render_chat_history(st.session_state.chat_history)
|
60 |
|
61 |
+
#Input field for chat interface
|
|
|
|
|
62 |
if input_text := st.chat_input(placeholder="Here you can chat with our hotel booking model."):
|
63 |
|
64 |
with st.chat_message("user"):
|
|
|
66 |
st.session_state.chat_history.append({"role" : "user", "content" : input_text}) #append message to chat history
|
67 |
|
68 |
with st.spinner("Generating response..."):
|
69 |
+
first_answer = generate_response(st.session_state.chat_history)
|
|
|
|
|
|
|
|
|
|
|
70 |
|
71 |
with st.chat_message("assistant"):
|
72 |
st.markdown(first_answer)
|