vericudebuget commited on
Commit
975f649
·
verified ·
1 Parent(s): b5cdba7

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -7
app.py CHANGED
@@ -15,11 +15,6 @@ def format_prompt(message, history):
15
  return prompt
16
 
17
  def generate(prompt, history, system_prompt, temperature=0.9, max_new_tokens=9048, top_p=0.95, repetition_penalty=1.0):
18
- # Load history from local storage
19
- if 'chat_history' in localStorage:
20
- history = JSON.parse(localStorage.getItem('chat_history'))
21
- else:
22
- history = []
23
 
24
  temperature = max(float(temperature), 1e-2)
25
  top_p = float(top_p)
@@ -37,8 +32,6 @@ def generate(prompt, history, system_prompt, temperature=0.9, max_new_tokens=904
37
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
38
  output = ""
39
 
40
- localStorage.setItem('chat_history', JSON.stringify(history))
41
-
42
  for response in stream:
43
  output += response.token.text
44
  yield output
 
15
  return prompt
16
 
17
  def generate(prompt, history, system_prompt, temperature=0.9, max_new_tokens=9048, top_p=0.95, repetition_penalty=1.0):
 
 
 
 
 
18
 
19
  temperature = max(float(temperature), 1e-2)
20
  top_p = float(top_p)
 
32
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
33
  output = ""
34
 
 
 
35
  for response in stream:
36
  output += response.token.text
37
  yield output