Spaces:
Runtime error
Runtime error
File size: 3,833 Bytes
3ad39d0 2ce9cbb 3ad39d0 e6877b0 3ad39d0 2ce9cbb 3ad39d0 2ce9cbb 3ad39d0 2ce9cbb 3ad39d0 e6877b0 3ad39d0 2ce9cbb 3ad39d0 2ce9cbb e6877b0 2ce9cbb e6877b0 2ce9cbb e6877b0 2ce9cbb 3ad39d0 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 |
import chainlit as cl
from llama_index.llms import MonsterLLM
from llama_index import VectorStoreIndex,SimpleDirectoryReader, ServiceContext
def indexing(llm,path=None):
if path==None:
path="data.txt"
documents = SimpleDirectoryReader(input_files=[path]).load_data()
print("loading done")
service_context = ServiceContext.from_defaults(
chunk_size=1024, llm=llm, embed_model="local:BAAI/bge-small-en-v1.5"
)
print("indexing")
index = VectorStoreIndex.from_documents(documents, service_context=service_context, use_async=True)
query_engine = index.as_query_engine()
print("all done")
print(query_engine)
cl.user_session.set("engine", query_engine)
return query_engine
def qa(sp,engine,message):
message=message.content
ques=sp+" "+message
response=engine.query(ques)
return response
@cl.on_chat_start
async def factory():
url = await cl.AskUserMessage(author="Beast",content="Enter url").send()
print(url)
if url['output'][-1]=="/":
url['output']=url['output'].replace(".ai/",".ai")
auth = await cl.AskUserMessage(author="Beast",content="Enter auth token").send()
print(auth)
model = 'deploy-llm'
llm = MonsterLLM(model=model,base_url=url['output'],monster_api_key=auth['output'],temperature=0.75, context_window=1024)
cl.user_session.set("llm", llm)
# files = None
# while files is None:
# files = await cl.AskFileMessage(author="Beast",
# content="Please upload a PDF file to begin!",
# accept=["application/pdf"],
# max_size_mb=20,
# timeout=180,
# ).send()
# pdf = files[0]
# print(pdf)
res = await cl.AskActionMessage(author="Beast",
content="Do you want to enter system prompt?",
actions=[
cl.Action(name="yes", value="yes", label="β
Yes"),
cl.Action(name="no", value="no", label="β No"),
],
).send()
query_engine = await cl.make_async(indexing)(llm)
if res and res.get("value") == "yes":
sp = await cl.AskUserMessage(author="Beast",content="Enter system prompt").send()
await cl.Message(author="Beast",content="Noted. Go ahead as your questions!!").send()
cl.user_session.set("sp", sp["output"])
else:
await cl.Message(author="Beast",content="Okay, then you can start asking your questions!!").send()
@cl.on_message
async def main(message: cl.Message):
engine = cl.user_session.get("engine")
llm=cl.user_session.get("llm")
sp=cl.user_session.get("sp")
if sp==None:
sp=""
if not message.elements:
msg = cl.Message(author="Beast",content=f"Generating Response...", disable_feedback=False)
await msg.send()
response =await cl.make_async(qa)(sp,engine,message)
print(response)
msg.content = str(response)
await msg.update()
elif message.elements:
go=True
for file in message.elements:
if "pdf" in file.mime:
pdf=file
else:
await cl.Message(author="Beast",content="We only support PDF for now").send()
go=False
break
if go:
msg = cl.Message(author="Beast",content=f"Processing `{pdf.name}`...")
await msg.send()
query_engine = await cl.make_async(indexing)(llm,pdf.path)
msg.content = f"`{pdf.name}` processed."
await msg.update()
msg = cl.Message(author="Beast",content=f"Generating Response...", disable_feedback=False)
await msg.send()
response =await cl.make_async(qa)(sp,query_engine,message)
print(response)
msg.content = str(response)
await msg.update()
|