import gradio as gr from langchain.vectorstores import Chroma from langchain import HuggingFaceHub from langchain.embeddings import HuggingFaceEmbeddings from langchain.text_splitter import CharacterTextSplitter from langchain.document_loaders import TextLoader from langchain.chains import RetrievalQA import os os.environ['HUGGINGFACEHUB_API_TOKEN'] = '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' llm = HuggingFaceHub(repo_id="google/flan-t5-xxl") B=open('us.txt') Biden=B.read() text_splitter = CharacterTextSplitter(chunk_size=1000, chunk_overlap=0) texts = text_splitter.split_text(Biden) db = Chroma.from_documents(texts, embeddings) qa = RetrievalQA.from_chain_type(llm=llm, chain_type="stuff", retriever=retriever) def answer(query): out=qa.run(query) return out demo = gr.Interface(fn=answer, inputs='text',outputs='text',examples=[['What did the president say about Ketanji Jackson Brown']]) demo.launch()