ssreeramj commited on
Commit
154a307
·
1 Parent(s): 4f2cf9a

added chat UI

Browse files
Files changed (3) hide show
  1. .github/workflows/sync-to-hub.yml +0 -0
  2. .gitignore +4 -1
  3. app.py +78 -0
.github/workflows/sync-to-hub.yml ADDED
File without changes
.gitignore CHANGED
@@ -1,4 +1,7 @@
1
  .ipynb_checkpoints
2
  __pycache__
3
  notebooks
4
- base-20230418_1930.txt
 
 
 
 
1
  .ipynb_checkpoints
2
  __pycache__
3
  notebooks
4
+ base-20230418_1930.txt
5
+
6
+ .env
7
+ env
app.py ADDED
@@ -0,0 +1,78 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from dotenv import load_dotenv
3
+
4
+ from langchain.embeddings.openai import OpenAIEmbeddings
5
+ from langchain.vectorstores import FAISS
6
+ from langchain.llms import OpenAI
7
+ from langchain.chat_models import ChatOpenAI
8
+ from langchain.chains.question_answering import load_qa_chain
9
+
10
+ import gradio as gr
11
+ import time
12
+
13
+ load_dotenv() # take environment variables from .env.
14
+
15
+ OPENAI_API_KEY = os.getenv("OPENAI_API_KEY")
16
+
17
+ # load the trained model
18
+ embeddings = OpenAIEmbeddings(openai_api_key=OPENAI_API_KEY)
19
+
20
+ docsearch = FAISS.load_local("base-20230418_1930-index", embeddings)
21
+ llm = ChatOpenAI(openai_api_key=OPENAI_API_KEY, temperature=0.2, max_tokens=1024)
22
+
23
+ chain = load_qa_chain(llm, chain_type="map_rerank", verbose=False)
24
+
25
+ # Chatbot UI
26
+ with gr.Blocks() as demo:
27
+ gr.Markdown("## Tiger Analytics Town Hall Q1 2023!!")
28
+ chatbot = gr.Chatbot(label="Tiger Bot").style(height=400)
29
+
30
+ with gr.Row():
31
+ with gr.Column(scale=0.90):
32
+ msg = gr.Textbox(
33
+ show_label=False,
34
+ placeholder="What do you want to know about the town hall?",
35
+ ).style(container=False)
36
+ with gr.Column(scale=0.10, min_width=0):
37
+ btn = gr.Button("Send")
38
+
39
+ clear = gr.Button("Clear")
40
+
41
+ def user(user_message, history):
42
+ return "", history + [[user_message, None]]
43
+
44
+ def bot(history):
45
+ # get user query
46
+ query = history[-1][0]
47
+
48
+ # get relevent documents through similarity search
49
+ relevent_docs = docsearch.similarity_search(query=query, k=4)
50
+
51
+ # pass the relevant docs to the chat model to generate the final answer.
52
+ bot_message = chain(
53
+ {"input_documents": relevent_docs, "question": query},
54
+ return_only_outputs=True,
55
+ )["output_text"].strip()
56
+
57
+ history[-1][1] = bot_message
58
+ time.sleep(1)
59
+ return history
60
+
61
+ msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(
62
+ bot, chatbot, chatbot
63
+ )
64
+ btn.click(user, [msg, chatbot], [msg, chatbot], queue=False).then(
65
+ bot, chatbot, chatbot
66
+ )
67
+ clear.click(lambda: None, None, chatbot, queue=False)
68
+
69
+ gr.Markdown("## Some Example Questions")
70
+ gr.Examples(
71
+ [
72
+ "What are some new companies that got involved with us?",
73
+ "What were the disadvantages of working remotely?",
74
+ ],
75
+ [msg],
76
+ )
77
+
78
+ demo.launch()