Nitish-py commited on
Commit
1d03ff3
·
1 Parent(s): 1fbcbcc

response without index

Browse files
Files changed (1) hide show
  1. app.py +41 -30
app.py CHANGED
@@ -1,20 +1,16 @@
1
  import chainlit as cl
2
  from llama_index.llms import MonsterLLM
3
  from llama_index import VectorStoreIndex,SimpleDirectoryReader, ServiceContext
 
 
4
 
5
- def indexing(llm,path=None):
6
- if path==None:
7
- path="data.txt"
8
  documents = SimpleDirectoryReader(input_files=[path]).load_data()
9
- print("loading done")
10
  service_context = ServiceContext.from_defaults(
11
  chunk_size=1024, llm=llm, embed_model="local:BAAI/bge-small-en-v1.5"
12
  )
13
- print("indexing")
14
  index = VectorStoreIndex.from_documents(documents, service_context=service_context, use_async=True)
15
  query_engine = index.as_query_engine()
16
- print("all done")
17
- print(query_engine)
18
  cl.user_session.set("engine", query_engine)
19
 
20
  def qa(sp,engine,message):
@@ -26,14 +22,15 @@ def qa(sp,engine,message):
26
  @cl.on_chat_start
27
  async def factory():
28
  url = await cl.AskUserMessage(author="Beast",content="Enter url").send()
29
- print(url)
30
  index_ai = url['output'].find(".monsterapi.ai")
31
  url_ai = url['output'][:index_ai + len(".monsterapi.ai")]
32
  auth = await cl.AskUserMessage(author="Beast",content="Enter auth token").send()
33
- print(auth)
34
  model = 'deploy-llm'
35
  llm = MonsterLLM(model=model,base_url=url_ai,monster_api_key=auth['output'],temperature=0.75, context_window=1024)
 
 
36
  cl.user_session.set("llm", llm)
 
37
  res = await cl.AskActionMessage(author="Beast",
38
  content="Do you want to enter system prompt?",
39
  actions=[
@@ -41,38 +38,27 @@ async def factory():
41
  cl.Action(name="no", value="no", label="❌ No"),
42
  ],
43
  ).send()
 
44
  if res and res.get("value") == "yes":
45
  sp = await cl.AskUserMessage(author="Beast",content="Enter system prompt").send()
46
- msg=cl.Message(author="Beast",content="Initiaing LLM....")
47
  await msg.send()
48
- await cl.make_async(indexing)(llm)
49
- msg.content="Noted. Go ahead as your questions!!"
50
- await msg.update()
51
  cl.user_session.set("sp", sp["output"])
 
52
  else:
53
- msg=cl.Message(author="Beast",content="Initiaing LLM....")
54
- await msg.send()
55
- await cl.make_async(indexing)(llm)
56
- msg.content="Okay, then you can start asking your questions!!"
57
- await msg.update()
58
-
59
-
60
-
61
  @cl.on_message
62
  async def main(message: cl.Message):
 
63
  engine = cl.user_session.get("engine")
64
  llm=cl.user_session.get("llm")
65
  sp=cl.user_session.get("sp")
 
66
  if sp==None:
67
  sp=""
68
- if not message.elements:
69
- msg = cl.Message(author="Beast",content=f"Generating Response...", disable_feedback=False)
70
- await msg.send()
71
- response =await cl.make_async(qa)(sp,engine,message)
72
- print(response)
73
- msg.content = str(response)
74
- await msg.update()
75
- elif message.elements:
76
  go=True
77
  for file in message.elements:
78
  if "pdf" in file.mime:
@@ -87,13 +73,38 @@ async def main(message: cl.Message):
87
  query_engine = await cl.make_async(indexing)(llm,pdf.path)
88
  msg.content = f"`{pdf.name}` processed."
89
  await msg.update()
90
- msg = cl.Message(author="Beast",content=f"Generating Response...", disable_feedback=False)
91
  await msg.send()
92
  response =await cl.make_async(qa)(sp,query_engine,message)
93
  print(response)
94
  msg.content = str(response)
95
  await msg.update()
96
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
97
 
98
 
99
 
 
1
  import chainlit as cl
2
  from llama_index.llms import MonsterLLM
3
  from llama_index import VectorStoreIndex,SimpleDirectoryReader, ServiceContext
4
+ from monsterapi import client as mclient
5
+ import json
6
 
7
+ def indexing(llm,path):
 
 
8
  documents = SimpleDirectoryReader(input_files=[path]).load_data()
 
9
  service_context = ServiceContext.from_defaults(
10
  chunk_size=1024, llm=llm, embed_model="local:BAAI/bge-small-en-v1.5"
11
  )
 
12
  index = VectorStoreIndex.from_documents(documents, service_context=service_context, use_async=True)
13
  query_engine = index.as_query_engine()
 
 
14
  cl.user_session.set("engine", query_engine)
15
 
16
  def qa(sp,engine,message):
 
22
  @cl.on_chat_start
23
  async def factory():
24
  url = await cl.AskUserMessage(author="Beast",content="Enter url").send()
 
25
  index_ai = url['output'].find(".monsterapi.ai")
26
  url_ai = url['output'][:index_ai + len(".monsterapi.ai")]
27
  auth = await cl.AskUserMessage(author="Beast",content="Enter auth token").send()
 
28
  model = 'deploy-llm'
29
  llm = MonsterLLM(model=model,base_url=url_ai,monster_api_key=auth['output'],temperature=0.75, context_window=1024)
30
+ service_client = mclient(api_key = auth['output'], base_url = url_ai)
31
+ cl.user_session.set("service_client",service_client)
32
  cl.user_session.set("llm", llm)
33
+
34
  res = await cl.AskActionMessage(author="Beast",
35
  content="Do you want to enter system prompt?",
36
  actions=[
 
38
  cl.Action(name="no", value="no", label="❌ No"),
39
  ],
40
  ).send()
41
+
42
  if res and res.get("value") == "yes":
43
  sp = await cl.AskUserMessage(author="Beast",content="Enter system prompt").send()
44
+ msg=cl.Message(author="Beast",content="Noted. Go ahead as your questions!!")
45
  await msg.send()
 
 
 
46
  cl.user_session.set("sp", sp["output"])
47
+
48
  else:
49
+ await cl.Message(author="Beast",content="Okay, then you can start asking your questions!!").send()
50
+
 
 
 
 
 
 
51
  @cl.on_message
52
  async def main(message: cl.Message):
53
+ service_client=cl.user_session.get("service_client")
54
  engine = cl.user_session.get("engine")
55
  llm=cl.user_session.get("llm")
56
  sp=cl.user_session.get("sp")
57
+
58
  if sp==None:
59
  sp=""
60
+
61
+ if message.elements:
 
 
 
 
 
 
62
  go=True
63
  for file in message.elements:
64
  if "pdf" in file.mime:
 
73
  query_engine = await cl.make_async(indexing)(llm,pdf.path)
74
  msg.content = f"`{pdf.name}` processed."
75
  await msg.update()
76
+ msg = cl.Message(author="Beast",content=f"Generating Response...")
77
  await msg.send()
78
  response =await cl.make_async(qa)(sp,query_engine,message)
79
  print(response)
80
  msg.content = str(response)
81
  await msg.update()
82
 
83
+ elif not message.elements and engine!=None:
84
+ msg = cl.Message(author="Beast",content=f"Generating Response...")
85
+ await msg.send()
86
+ response =await cl.make_async(qa)(sp,engine,message)
87
+ print(response)
88
+ msg.content = str(response)
89
+ await msg.update()
90
+
91
+ elif not message.elements and engine==None:
92
+ msg = cl.Message(author="Beast",content=f"Generating Response...")
93
+ await msg.send()
94
+ payload = {
95
+ "input_variables": {"system": sp,
96
+ "prompt":message.content},
97
+ "stream": False,
98
+ "temperature": 0.6,
99
+ "max_tokens": 512
100
+ }
101
+ output = service_client.generate(model = "deploy-llm", data = payload)
102
+ msg.content = str(output['text'][0])
103
+ await msg.update()
104
+
105
+ else:
106
+ cl.Message(author="Beast",content="Broken ;(")
107
+
108
 
109
 
110