camparchimedes commited on
Commit
dfe4dc9
ยท
verified ยท
1 Parent(s): cbaa964

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +8 -6
app.py CHANGED
@@ -18,7 +18,7 @@ text_splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=10
18
 
19
  system_template = """
20
  Use the following pieces of context to answer the user's question.
21
- Please respond as if you were Ken from the movie Barbie. Ken is a well-meaning but naive character who loves to Beach. He talks like a typical Californian Beach Bro, but he doesn't use the word "Dude" so much.
22
  If you don't know the answer, just say that you don't know, don't try to make up an answer.
23
  You can make inferences based on the context as long as it still faithfully represents the feedback.
24
 
@@ -41,7 +41,7 @@ chain_type_kwargs = {"prompt": prompt}
41
 
42
  @cl.author_rename
43
  def rename(orig_author: str):
44
- rename_dict = {"RetrievalQA": "Consulting The Kens"}
45
  return rename_dict.get(orig_author, orig_author)
46
 
47
  @cl.on_chat_start
@@ -49,7 +49,7 @@ async def init():
49
  msg = cl.Message(content=f"Building Index...")
50
  await msg.send()
51
 
52
- # build FAISS index from csv
53
  loader = CSVLoader(file_path="./data/barbie.csv", source_column="Review_Url")
54
  data = loader.load()
55
  documents = text_splitter.transform_documents(data)
@@ -58,11 +58,11 @@ async def init():
58
  embedder = CacheBackedEmbeddings.from_bytes_store(
59
  core_embeddings_model, store, namespace=core_embeddings_model.model
60
  )
61
- # make async docsearch
62
  docsearch = await cl.make_async(FAISS.from_documents)(documents, embedder)
63
 
64
  chain = RetrievalQA.from_chain_type(
65
- ChatOpenAI(model="gpt-3.5-turbo", temperature=0, streaming=True),
66
  chain_type="stuff",
67
  return_source_documents=True,
68
  retriever=docsearch.as_retriever(),
@@ -97,7 +97,7 @@ async def main(message):
97
  if source in visited_sources:
98
  continue
99
  visited_sources.add(source)
100
- # Create the text element referenced in the message
101
  source_elements.append(
102
  cl.Text(content="https://www.imdb.com" + source, name="Review URL")
103
  )
@@ -106,5 +106,7 @@ async def main(message):
106
  answer += f"\nSources: {', '.join([e.content.decode('utf-8') for e in source_elements])}"
107
  else:
108
  answer += "\nNo sources found"
 
109
 
110
  await cl.Message(content=answer, elements=source_elements).send()
 
 
18
 
19
  system_template = """
20
  Use the following pieces of context to answer the user's question.
21
+ Please respond as if you were Pris from the movie โ€™Bladerunnerโ€™.
22
  If you don't know the answer, just say that you don't know, don't try to make up an answer.
23
  You can make inferences based on the context as long as it still faithfully represents the feedback.
24
 
 
41
 
42
  @cl.author_rename
43
  def rename(orig_author: str):
44
+ rename_dict = {"RetrievalQA": "Consulting.....?"}
45
  return rename_dict.get(orig_author, orig_author)
46
 
47
  @cl.on_chat_start
 
49
  msg = cl.Message(content=f"Building Index...")
50
  await msg.send()
51
 
52
+ # --build FAISS index from csv
53
  loader = CSVLoader(file_path="./data/barbie.csv", source_column="Review_Url")
54
  data = loader.load()
55
  documents = text_splitter.transform_documents(data)
 
58
  embedder = CacheBackedEmbeddings.from_bytes_store(
59
  core_embeddings_model, store, namespace=core_embeddings_model.model
60
  )
61
+ # --make async docsearch
62
  docsearch = await cl.make_async(FAISS.from_documents)(documents, embedder)
63
 
64
  chain = RetrievalQA.from_chain_type(
65
+ ChatOpenAI(model="gpt-3.5-turbo-instruct", temperature=0.9, streaming=True),
66
  chain_type="stuff",
67
  return_source_documents=True,
68
  retriever=docsearch.as_retriever(),
 
97
  if source in visited_sources:
98
  continue
99
  visited_sources.add(source)
100
+ # --createtext element referenced in message
101
  source_elements.append(
102
  cl.Text(content="https://www.imdb.com" + source, name="Review URL")
103
  )
 
106
  answer += f"\nSources: {', '.join([e.content.decode('utf-8') for e in source_elements])}"
107
  else:
108
  answer += "\nNo sources found"
109
+
110
 
111
  await cl.Message(content=answer, elements=source_elements).send()
112
+ return