ColeGuion commited on
Commit
7e14f2f
·
verified ·
1 Parent(s): 9fb1e44

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -4
app.py CHANGED
@@ -10,7 +10,8 @@ def format_prompt_basic(message, history):
10
  prompt = "<s>"
11
 
12
  # String to add before every prompt
13
- prompt_prefix = "" #"Please correct the grammar in the following sentence:"
 
14
  prompt_template = "[INST] " + prompt_prefix + " {} [/INST]"
15
 
16
 
@@ -25,7 +26,7 @@ def format_prompt_basic(message, history):
25
  return prompt
26
 
27
  # Formats the prompt to hold all of the past messages
28
- def format_prompt(message, history):
29
  prompt = "<s>"
30
 
31
  # String to add before every prompt
@@ -58,8 +59,9 @@ def generate(prompt, history, system_prompt, temperature=0.9, max_new_tokens=256
58
 
59
  generate_kwargs = dict(temperature=temperature, max_new_tokens=max_new_tokens, top_p=top_p, repetition_penalty=repetition_penalty, do_sample=True, seed=42,)
60
 
61
- formatted_prompt = format_prompt(f"{system_prompt}, {prompt}", history)
62
- stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=False, return_full_text=False)
 
63
  output = ""
64
 
65
  for response in stream:
 
10
  prompt = "<s>"
11
 
12
  # String to add before every prompt
13
+ prompt_prefix = ""
14
+ #prompt_prefix = "Please correct the grammar in the following sentence:"
15
  prompt_template = "[INST] " + prompt_prefix + " {} [/INST]"
16
 
17
 
 
26
  return prompt
27
 
28
  # Formats the prompt to hold all of the past messages
29
+ def format_prompt_grammar(message, history):
30
  prompt = "<s>"
31
 
32
  # String to add before every prompt
 
59
 
60
  generate_kwargs = dict(temperature=temperature, max_new_tokens=max_new_tokens, top_p=top_p, repetition_penalty=repetition_penalty, do_sample=True, seed=42,)
61
 
62
+ #formatted_prompt = format_prompt(f"{system_prompt}, {prompt}", history)
63
+ formatted_prompt = format_prompt_grammar(f"{system_prompt}, {prompt}", history)
64
+ stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
65
  output = ""
66
 
67
  for response in stream: