on1onmangoes commited on
Commit
7a5ddde
·
verified ·
1 Parent(s): 4fd55c8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -19
app.py CHANGED
@@ -16,7 +16,6 @@ client = Client("on1onmangoes/CNIHUB101324v10", hf_token=HF_TOKEN)
16
  # Update the conversation history within the function.
17
  # Return the updated history along with any other required outputs.
18
 
19
- #@spaces.GPU()
20
  def stream_chat_with_rag(
21
  message: str,
22
  history: list,
@@ -33,30 +32,12 @@ def stream_chat_with_rag(
33
  print(f"Message: {message}")
34
  print(f"History: {history}")
35
 
36
- # OG CODE DELETE
37
- # # Add the knowledge Index or VectorStore, RERANKER,
38
- # knowledge_index = vectorstore
39
- # reranker = RERANKER
40
 
41
  # Build the conversation prompt including system prompt and history
42
  conversation = system_prompt + "\n\n" + "For Client:" + client_name
43
  for user_input, assistant_response in history:
44
  conversation += f"User: {user_input}\nAssistant: {assistant_response}\n"
45
  conversation += f"User: {message}\nAssistant:"
46
-
47
- # Optionally, if your `answer_with_rag` function or LLM supports context, you can include the conversation
48
- # Since you prefer not to modify `answer_with_rag`, we'll proceed with the message as is
49
- # OG CODE DELETE
50
- # # Call `answer_with_rag` to get the final answer
51
- # answer, relevant_docs = answer_with_rag(
52
- # question=message,
53
- # knowledge_index=knowledge_index,
54
- # reranker=reranker,
55
- # num_retrieved_docs=num_retrieved_docs,
56
- # num_docs_final=num_docs_final,
57
- # client_name=client_name,
58
- # )
59
-
60
 
61
  answer, relevant_docs = client.predict(question=question, api_name="/answer_with_rag")
62
  # debug 092624
 
16
  # Update the conversation history within the function.
17
  # Return the updated history along with any other required outputs.
18
 
 
19
  def stream_chat_with_rag(
20
  message: str,
21
  history: list,
 
32
  print(f"Message: {message}")
33
  print(f"History: {history}")
34
 
 
 
 
 
35
 
36
  # Build the conversation prompt including system prompt and history
37
  conversation = system_prompt + "\n\n" + "For Client:" + client_name
38
  for user_input, assistant_response in history:
39
  conversation += f"User: {user_input}\nAssistant: {assistant_response}\n"
40
  conversation += f"User: {message}\nAssistant:"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
41
 
42
  answer, relevant_docs = client.predict(question=question, api_name="/answer_with_rag")
43
  # debug 092624