DrishtiSharma's picture
Update app.py
b604a12 verified
raw
history blame
1.64 kB
import os
import requests
from langchain.chains import SequentialChain, LLMChain
from langchain.prompts import PromptTemplate
from langchain_groq import ChatGroq
from langchain.document_loaders import PDFPlumberLoader
from langchain_experimental.text_splitter import SemanticChunker
from langchain_huggingface import HuggingFaceEmbeddings
from langchain_chroma import Chroma
# Set API Keys
os.environ["GROQ_API_KEY"] = st.secrets.get("GROQ_API_KEY", "")
# Load LLM models
llm_judge = ChatGroq(model="deepseek-r1-distill-llama-70b")
rag_llm = ChatGroq(model="mixtral-8x7b-32768")
st.title("❓")
# Options for PDF input
pdf_source = st.radio("How would you like to provide a PDF?", ["Upload a PDF", "Enter a PDF URL"], index=0)
if pdf_source == "Upload a PDF":
uploaded_file = st.file_uploader("Upload your PDF file", type="pdf")
if uploaded_file:
with open("temp.pdf", "wb") as f:
f.write(uploaded_file.getbuffer())
pdf_path = "temp.pdf"
elif pdf_source == "Enter a PDF URL":
pdf_url = st.text_input("Enter PDF URL:")
if pdf_url:
try:
response = requests.get(pdf_url)
if response.status_code == 200:
with open("temp.pdf", "wb") as f:
f.write(response.content)
pdf_path = "temp.pdf"
st.success("βœ… PDF Downloaded Successfully!")
else:
st.error("❌ Failed to download PDF. Check the URL.")
pdf_path = None
except Exception as e:
st.error(f"Error downloading PDF: {e}")
pdf_path = None
else:
pdf_path = None