Spaces:
Sleeping
Sleeping
Commit
·
6814430
1
Parent(s):
571b70a
Adding status to document loading
Browse files
app.py
CHANGED
@@ -12,7 +12,7 @@ DEVICE = 'cpu '
|
|
12 |
FILE_EXT = ['pdf','text','csv','word','wav']
|
13 |
|
14 |
|
15 |
-
def
|
16 |
return "Loading..."
|
17 |
|
18 |
|
@@ -36,9 +36,15 @@ def get_hugging_face_model(model_id,API_key,temperature=0.1):
|
|
36 |
model_kwargs={"temperature": temperature, "max_new_tokens": 2048})
|
37 |
return chat_llm
|
38 |
|
39 |
-
def
|
40 |
-
|
|
|
|
|
|
|
|
|
41 |
|
|
|
|
|
42 |
document = None
|
43 |
if doc_type == 'pdf':
|
44 |
document = process_pdf_document(document_file_name=file_data)
|
@@ -49,12 +55,16 @@ def chat_api(file_data,doc_type='pdf',key=None,llm_model='HuggingFace'):
|
|
49 |
elif doc_type == 'word':
|
50 |
document = process_word_document(document_file_name=file_data)
|
51 |
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
else:
|
57 |
-
|
|
|
|
|
|
|
|
|
58 |
|
59 |
|
60 |
|
@@ -106,19 +116,20 @@ with gr.Blocks(css=css) as demo:
|
|
106 |
|
107 |
with gr.Column():
|
108 |
with gr.Box():
|
109 |
-
LLM_option = gr.Dropdown(['HuggingFace','OpenAI'],label='
|
110 |
-
API_key = gr.Textbox(label="
|
111 |
with gr.Column():
|
112 |
-
|
113 |
-
|
|
|
114 |
with gr.Row():
|
115 |
-
langchain_status = gr.Textbox(label="Status", placeholder="", interactive=False)
|
116 |
load_pdf = gr.Button("Load file to langchain")
|
|
|
117 |
|
118 |
chatbot = gr.Chatbot()
|
119 |
question = gr.Textbox(label="Question", placeholder="Type your question and hit Enter")
|
120 |
submit_button = gr.Button("Send Message")
|
121 |
-
load_pdf.click(
|
122 |
load_pdf.click(chat_api, inputs=[pdf_doc,file_extension,API_key,LLM_option], outputs=[langchain_status], queue=False)
|
123 |
# question.submit(add_text, [chatbot, question], [chatbot, question]).then(
|
124 |
# bot, chatbot, chatbot
|
|
|
12 |
FILE_EXT = ['pdf','text','csv','word','wav']
|
13 |
|
14 |
|
15 |
+
def loading_file():
|
16 |
return "Loading..."
|
17 |
|
18 |
|
|
|
36 |
model_kwargs={"temperature": temperature, "max_new_tokens": 2048})
|
37 |
return chat_llm
|
38 |
|
39 |
+
def chat_application(llm_model,key):
|
40 |
+
if llm_model == 'HuggingFace':
|
41 |
+
llm = get_hugging_face_model(model_id='tiiuae/falcon-7b-instruct',API_key=key)
|
42 |
+
else:
|
43 |
+
llm_model = get_openai_chat_model(API_key=key)
|
44 |
+
|
45 |
|
46 |
+
def document_loader(file_data,doc_type='pdf',key=None,llm_model='HuggingFace'):
|
47 |
+
embedding_model = SentenceTransformerEmbeddings(model_name='all-mpnet-base-v2',model_kwargs={"device": DEVICE})
|
48 |
document = None
|
49 |
if doc_type == 'pdf':
|
50 |
document = process_pdf_document(document_file_name=file_data)
|
|
|
55 |
elif doc_type == 'word':
|
56 |
document = process_word_document(document_file_name=file_data)
|
57 |
|
58 |
+
if document:
|
59 |
+
texts = process_documents(documents=document)
|
60 |
+
global vectordb
|
61 |
+
vectordb = FAISS.from_documents(documents=texts, embedding= embedding_model)
|
62 |
else:
|
63 |
+
return "Error in loading Documents "
|
64 |
+
|
65 |
+
return "Document loaded - Embeddings ready "
|
66 |
+
|
67 |
+
|
68 |
|
69 |
|
70 |
|
|
|
116 |
|
117 |
with gr.Column():
|
118 |
with gr.Box():
|
119 |
+
LLM_option = gr.Dropdown(['HuggingFace','OpenAI'],label='Large Language Model Selection',info='LLM Service')
|
120 |
+
API_key = gr.Textbox(label="Add {} API key".format(LLM_option), type="password")
|
121 |
with gr.Column():
|
122 |
+
with gr.row():
|
123 |
+
file_extension = gr.Dropdown(FILE_EXT, label="File Extensions", info="Select your files extensions!")
|
124 |
+
pdf_doc = gr.File(label="Upload File to start QA", file_types=FILE_EXT, type="file")
|
125 |
with gr.Row():
|
|
|
126 |
load_pdf = gr.Button("Load file to langchain")
|
127 |
+
langchain_status = gr.Textbox(label="Status", placeholder="", interactive=True)
|
128 |
|
129 |
chatbot = gr.Chatbot()
|
130 |
question = gr.Textbox(label="Question", placeholder="Type your question and hit Enter")
|
131 |
submit_button = gr.Button("Send Message")
|
132 |
+
load_pdf.click(loading_file, None, langchain_status, queue=False)
|
133 |
load_pdf.click(chat_api, inputs=[pdf_doc,file_extension,API_key,LLM_option], outputs=[langchain_status], queue=False)
|
134 |
# question.submit(add_text, [chatbot, question], [chatbot, question]).then(
|
135 |
# bot, chatbot, chatbot
|