File size: 2,901 Bytes
3ad39d0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
import chainlit as cl
from llama_index.llms import MonsterLLM
from llama_index import VectorStoreIndex,SimpleDirectoryReader, ServiceContext

def indexing(llm,path):
    documents = SimpleDirectoryReader(input_files=[path]).load_data()
    print("loading done")
    service_context = ServiceContext.from_defaults(
        chunk_size=1024, llm=llm, embed_model="local:BAAI/bge-small-en-v1.5"
    )
    print("indexing")
    index = VectorStoreIndex.from_documents(documents, service_context=service_context, use_async=True)
    query_engine = index.as_query_engine()
    print("all done")
    print(query_engine)
    return query_engine

def qa(sp,engine,message):
    message=message.content
    ques=sp+" "+message
    response=engine.query(ques)
    return response

@cl.on_chat_start
async def factory():
    url = await cl.AskUserMessage(author="Beast",content="Enter url").send()
    print(url)
    if url['output'][-1]=="/":
        url['output']=url['output'].replace(".ai/",".ai")
    auth = await cl.AskUserMessage(author="Beast",content="Enter auth token").send()
    print(auth)
    model = 'deploy-llm'
    llm = MonsterLLM(model=model,base_url=url['output'],monster_api_key=auth['output'],temperature=0.75, context_window=1024)
    files = None
    while files is None:
        files = await cl.AskFileMessage(author="Beast",
            content="Please upload a PDF file to begin!",
            accept=["application/pdf"],
            max_size_mb=20,
            timeout=180,
        ).send()

    pdf = files[0]
    print(pdf)
    msg = cl.Message(author="Beast",content=f"Processing `{pdf.name}`...")
    await msg.send()    
    query_engine = await cl.make_async(indexing)(llm,pdf.path)
    msg.content = f"`{pdf.name}` processed."
    await msg.update()
    res = await cl.AskActionMessage(author="Beast",
        content="Do you want to enter system prompt?",
        actions=[
            cl.Action(name="yes", value="yes", label="βœ… Yes"),
            cl.Action(name="no", value="no", label="❌ No"),
        ],
    ).send()

    if res and res.get("value") == "yes":
        sp = await cl.AskUserMessage(author="Beast",content="Enter system prompt").send()
        await cl.Message(author="Beast",content="Noted. Go ahead as your questions!!").send()
        cl.user_session.set("sp", sp["output"])
    else:
        await cl.Message(author="Beast",content="Okay, then you can start asking your questions!!").send()
    cl.user_session.set("engine", query_engine)
    
    
@cl.on_message
async def main(message: cl.Message):
    msg = cl.Message(author="Beast",content=f"Processing...", disable_feedback=False)
    await msg.send()
    engine = cl.user_session.get("engine")
    sp=cl.user_session.get("sp")
    if sp==None:
        sp=""
    response =await cl.make_async(qa)(sp,engine,message)
    print(response)
    msg.content = str(response)
    await msg.update()