camparchimedes commited on
Commit
8a0e8f1
ยท
verified ยท
1 Parent(s): d3dd419

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -35
app.py CHANGED
@@ -13,22 +13,6 @@ from langchain.prompts.chat import (
13
  )
14
  import chainlit as cl
15
 
16
- # ---------------------------------------------------for backend looks, example file:----------------------------------
17
- import os
18
-
19
- with open('/home/user/.local/lib/python3.10/site-packages/chainlit/config.py', 'r') as file: # line 622
20
- content = file.read()
21
- print("config.py:", content)
22
-
23
- # /home/user/.local/bin/chainlit
24
- # /home/user/app/.chainlit/config.toml
25
-
26
-
27
- #app_path = os.path.abspath(__file__)
28
- #print("[email protected]:", app_path)
29
-
30
- # ------------------------------------------------------the end--------------------------------------------------------
31
-
32
 
33
  text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=100)
34
 
@@ -71,26 +55,19 @@ async def init():
71
  msg = cl.Message(content=f"Building Index...")
72
  await msg.send()
73
 
74
- # --builds FAISS index from csv
75
  loader = CSVLoader(file_path="./data/total_faq.csv", source_column="Answer")
76
  data = loader.load()
77
 
78
- # --adding spec. metadata-------------------------------------------------------------------------------------------------
79
  for i, doc in enumerate(data):
80
- doc.metadata["row_index"] = i + 1 # --row index (1-based)
81
  doc.metadata["source"] = doc.metadata.get("Info_Url", "")
82
- # ------------------------------------------------------------------------------------------------------------------------
83
 
84
- # --pull some q's & dotted i's for menu ==================================================================================
85
- questions = [doc.page_content for doc in data[:5]]
86
- # ========================================================================================================================
87
  documents = text_splitter.transform_documents(data)
88
  store = LocalFileStore("./cache/")
89
  core_embeddings_model = OpenAIEmbeddings()
90
  embedder = CacheBackedEmbeddings.from_bytes_store(
91
  core_embeddings_model, store, namespace=core_embeddings_model.model
92
  )
93
- # --make async docsearch
94
  docsearch = await cl.make_async(FAISS.from_documents)(documents, embedder)
95
 
96
  chain = RetrievalQA.from_chain_type(
@@ -101,19 +78,12 @@ async def init():
101
  chain_type_kwargs = {"prompt": prompt}
102
  )
103
 
104
- #menu_message = (
105
- #"Index built! Bare spรธr ivei..\n\n"
106
- #"Her er noen spรธrsmรฅl vi ofte ser i forbindelse med DaysOff firmahytteordning:\n"
107
- #+ "\n".join([f"- {q}" for q in questions])
108
- #)
109
-
110
- #msg.content = menu_message
111
  msg.content = f"Index built! Bare spรธr ivei..๐Ÿค“"
112
  await msg.send()
113
 
114
-
115
  cl.user_session.set("chain", chain)
116
 
 
117
  @cl.on_message
118
  async def main(message):
119
  chain = cl.user_session.get("chain")
@@ -129,12 +99,10 @@ async def main(message):
129
  source_elements = []
130
  visited_sources = set()
131
 
132
- # --documents, user session
133
  docs = res.get("source_documents", [])
134
  metadatas = [doc.metadata for doc in docs]
135
- #all_sources = [m["source"] for m in metadatas]
136
 
137
- # --append source(s), specific rows only
138
  for doc, metadata in zip(docs, metadatas):
139
  row_index = metadata.get("row_index", -1)
140
  source = metadata.get("source", "")
 
13
  )
14
  import chainlit as cl
15
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
16
 
17
  text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=100)
18
 
 
55
  msg = cl.Message(content=f"Building Index...")
56
  await msg.send()
57
 
 
58
  loader = CSVLoader(file_path="./data/total_faq.csv", source_column="Answer")
59
  data = loader.load()
60
 
 
61
  for i, doc in enumerate(data):
62
+ doc.metadata["row_index"] = i + 1
63
  doc.metadata["source"] = doc.metadata.get("Info_Url", "")
 
64
 
 
 
 
65
  documents = text_splitter.transform_documents(data)
66
  store = LocalFileStore("./cache/")
67
  core_embeddings_model = OpenAIEmbeddings()
68
  embedder = CacheBackedEmbeddings.from_bytes_store(
69
  core_embeddings_model, store, namespace=core_embeddings_model.model
70
  )
 
71
  docsearch = await cl.make_async(FAISS.from_documents)(documents, embedder)
72
 
73
  chain = RetrievalQA.from_chain_type(
 
78
  chain_type_kwargs = {"prompt": prompt}
79
  )
80
 
 
 
 
 
 
 
 
81
  msg.content = f"Index built! Bare spรธr ivei..๐Ÿค“"
82
  await msg.send()
83
 
 
84
  cl.user_session.set("chain", chain)
85
 
86
+
87
  @cl.on_message
88
  async def main(message):
89
  chain = cl.user_session.get("chain")
 
99
  source_elements = []
100
  visited_sources = set()
101
 
 
102
  docs = res.get("source_documents", [])
103
  metadatas = [doc.metadata for doc in docs]
104
+
105
 
 
106
  for doc, metadata in zip(docs, metadatas):
107
  row_index = metadata.get("row_index", -1)
108
  source = metadata.get("source", "")