ColeGuion commited on
Commit
2915268
·
verified ·
1 Parent(s): bdc235c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +27 -3
app.py CHANGED
@@ -6,7 +6,7 @@ client = InferenceClient(
6
  )
7
 
8
  # Formats the prompt to hold all of the past messages
9
- def format_prompt(message, history):
10
  prompt = "<s>"
11
 
12
  # String to add before every prompt
@@ -27,6 +27,30 @@ def format_prompt(message, history):
27
 
28
  return prompt
29
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
30
  def format_my_prompt(user_input):
31
  # Formatting the prompt as per the new template
32
  prompt = f"<s> [INST] Please correct the grammatical errors in the following sentence: {user_input} [/INST] Model answer</s> [INST] Return only the grammatically corrected sentence. [/INST]"
@@ -42,8 +66,8 @@ def generate(prompt, history, system_prompt, temperature=0.9, max_new_tokens=256
42
 
43
  generate_kwargs = dict(temperature=temperature, max_new_tokens=max_new_tokens, top_p=top_p, repetition_penalty=repetition_penalty, do_sample=True, seed=42,)
44
 
45
- #formatted_prompt = format_prompt(f"{system_prompt}, {prompt}", history)
46
- formatted_prompt = format_my_prompt(prompt)
47
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
48
  output = ""
49
 
 
6
  )
7
 
8
  # Formats the prompt to hold all of the past messages
9
+ def format_prompt1(message, history):
10
  prompt = "<s>"
11
 
12
  # String to add before every prompt
 
27
 
28
  return prompt
29
 
30
+ def format_prompt(message, history):
31
+ prompt = "<s>"
32
+
33
+ # String to add before every prompt
34
+ #prompt_prefix = "Please correct the grammar in the following sentence:"
35
+ #prompt_template = "[INST] " + prompt_prefix + " {} [/INST]"
36
+ prompt_prefix = "Correct any grammatical errors in the following sentence and provide the corrected version:\n\nSentence: "
37
+ prompt_template = "[INST] " + prompt_prefix + ' "{}" [/INST] Corrected Sentence:'
38
+
39
+ print("History Type: {}".format(type(history)))
40
+ if type(history) != type(list()):
41
+ print("\nOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO\nOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO\nOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOOO\n")
42
+ #history.append("It is my friends house in England.", "It is my friend's house in England.")
43
+ #history.append("Every girl must bring their books to school.", "Every girl must bring her books to school.")
44
+
45
+ # Iterates through every past user input and response to be added to the prompt
46
+ for user_prompt, bot_response in history:
47
+ prompt += prompt_template.format(user_prompt)
48
+ prompt += f" {bot_response}</s> "
49
+
50
+ prompt += prompt_template.format(message)
51
+ print("PROMPT: \n\t{}\n".format(prompt))
52
+ return prompt
53
+
54
  def format_my_prompt(user_input):
55
  # Formatting the prompt as per the new template
56
  prompt = f"<s> [INST] Please correct the grammatical errors in the following sentence: {user_input} [/INST] Model answer</s> [INST] Return only the grammatically corrected sentence. [/INST]"
 
66
 
67
  generate_kwargs = dict(temperature=temperature, max_new_tokens=max_new_tokens, top_p=top_p, repetition_penalty=repetition_penalty, do_sample=True, seed=42,)
68
 
69
+ formatted_prompt = format_prompt(f"{system_prompt}, {prompt}", history)
70
+ #formatted_prompt = format_my_prompt(prompt)
71
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
72
  output = ""
73