Omnibus commited on
Commit
bbbbe65
·
verified ·
1 Parent(s): 2bfe489

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +3 -8
app.py CHANGED
@@ -17,14 +17,14 @@ InferenceClient(models[2]),
17
  InferenceClient(models[3]),
18
  ]
19
 
20
- def compress_history(history,client_choice,seed,temp,tokens,top_p,rep_p):
21
  client=clients[int(client_choice)-1]
22
  COMPRESS_HISTORY="""You are an Information Summarizer Agent. Your duty is to summarize the following information into a more concise format with far less words.
23
  Retain all the main points and provide a brief and concise summary of the conversation.
24
  Converstion:
25
  {history}"""
26
  print("COMPRESSING")
27
- formatted_prompt=f"{COMPRESS_HISTORY.format(history=history)}"
28
  generate_kwargs = dict(
29
  temperature=temp,
30
  max_new_tokens=1024,
@@ -72,7 +72,7 @@ def chat_inf(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p,r
72
  print("\n######### TOKENS "+str(tokens))
73
  if (in_len+tokens) > 8000:
74
  yield [(prompt,"Wait. I need to compress our Chat history...")]
75
- hist=compress_history(history,client_choice,seed,temp,tokens,top_p,rep_p)
76
  yield [(prompt,"History has been compressed, processing request...")]
77
  history = [(prompt,hist)]
78
  generate_kwargs = dict(
@@ -86,13 +86,8 @@ def chat_inf(system_prompt,prompt,history,client_choice,seed,temp,tokens,top_p,r
86
  #formatted_prompt=prompt
87
  formatted_prompt = format_prompt(f"{system_prompt}, {prompt}", history[0-chat_mem:])
88
  print("\n######### PROMPT "+str(len(formatted_prompt)))
89
-
90
-
91
-
92
-
93
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
94
  output = ""
95
-
96
  for response in stream:
97
  output += response.token.text
98
  yield [(prompt,output)]
 
17
  InferenceClient(models[3]),
18
  ]
19
 
20
+ def compress_history(history,client_choice,seed,temp,tokens,top_p,rep_p,chat_mem):
21
  client=clients[int(client_choice)-1]
22
  COMPRESS_HISTORY="""You are an Information Summarizer Agent. Your duty is to summarize the following information into a more concise format with far less words.
23
  Retain all the main points and provide a brief and concise summary of the conversation.
24
  Converstion:
25
  {history}"""
26
  print("COMPRESSING")
27
+ formatted_prompt=f"{COMPRESS_HISTORY.format(history=history[0-chat_mem:])}"
28
  generate_kwargs = dict(
29
  temperature=temp,
30
  max_new_tokens=1024,
 
72
  print("\n######### TOKENS "+str(tokens))
73
  if (in_len+tokens) > 8000:
74
  yield [(prompt,"Wait. I need to compress our Chat history...")]
75
+ hist=compress_history(history,client_choice,seed,temp,tokens,top_p,rep_p,chat_mem)
76
  yield [(prompt,"History has been compressed, processing request...")]
77
  history = [(prompt,hist)]
78
  generate_kwargs = dict(
 
86
  #formatted_prompt=prompt
87
  formatted_prompt = format_prompt(f"{system_prompt}, {prompt}", history[0-chat_mem:])
88
  print("\n######### PROMPT "+str(len(formatted_prompt)))
 
 
 
 
89
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
90
  output = ""
 
91
  for response in stream:
92
  output += response.token.text
93
  yield [(prompt,output)]