Rehman1603 commited on
Commit
aaec2db
·
verified ·
1 Parent(s): 0b7ebc8

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +77 -0
app.py ADDED
@@ -0,0 +1,77 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import os
3
+ from langchain import PromptTemplate, LLMChain
4
+ from langchain_together import Together
5
+ import pdfplumber
6
+
7
+ # Set the API key with double quotes
8
+ os.environ['TOGETHER_API_KEY'] = "d88cb7414e4039a84d2ed63f1b47daaaa4230c4c53a422045d8a30a9a3bc87d8"
9
+
10
+ def extract_text_from_pdf(pdf_file, max_pages=16):
11
+ text = ""
12
+ with pdfplumber.open(pdf_file) as pdf:
13
+ for i, page in enumerate(pdf.pages):
14
+ if i >= max_pages:
15
+ break
16
+ text += page.extract_text() + "\n"
17
+ return text
18
+
19
+ def Bot(text, question):
20
+ chat_template = """
21
+ Based on the provided context: {text}
22
+ Please answer the following question: {Questions}
23
+ Only provide answers that are directly related to the context. If the question is unrelated, respond with "I don't know".
24
+ """
25
+ prompt = PromptTemplate(
26
+ input_variables=['text', 'Questions'],
27
+ template=chat_template
28
+ )
29
+ llama3 = Together(model="meta-llama/Llama-3-70b-chat-hf", max_tokens=50)
30
+ Generated_chat = LLMChain(llm=llama3, prompt=prompt)
31
+
32
+ try:
33
+ response = Generated_chat.invoke({
34
+ "text": text,
35
+ "Questions": question
36
+ })
37
+
38
+ response_text = response['text']
39
+
40
+ response_text = response_text.replace("assistant", "")
41
+
42
+ # Post-processing to handle repeated words and ensure completeness
43
+ words = response_text.split()
44
+ seen = set()
45
+ filtered_words = [word for word in words if word.lower() not in seen and not seen.add(word.lower())]
46
+ response_text = ' '.join(filtered_words)
47
+ response_text = response_text.strip() # Ensuring no extra spaces at the ends
48
+ if not response_text.endswith('.'):
49
+ response_text += '.'
50
+
51
+ return response_text
52
+ except Exception as e:
53
+ return f"Error in generating response: {e}"
54
+
55
+ def ChatBot(history, document, question):
56
+ greetings = ["hi", "hello", "hey", "greetings", "what's up", "howdy"]
57
+ question_lower = question.lower().strip()
58
+ if question_lower in greetings or any(question_lower.startswith(greeting) for greeting in greetings):
59
+ return history + [("User", question), ("Bot", "Hello! How can I assist you with the document today?")]
60
+
61
+ text = extract_text_from_pdf(document)
62
+ response = Bot(text, question)
63
+ history.append(("User", question))
64
+ history.append(("Bot", response))
65
+ return history
66
+
67
+ with gr.Blocks() as iface:
68
+ chatbot = gr.Chatbot()
69
+ document = gr.File(label="Upload PDF Document", type="filepath")
70
+ question = gr.Textbox(label="Ask a Question", placeholder="Type your question here...")
71
+
72
+ def respond(history, document, question):
73
+ return ChatBot(history, document, question)
74
+
75
+ question.submit(respond, [chatbot, document, question], chatbot)
76
+
77
+ iface.launch(debug=True)