Colby commited on
Commit
714170e
·
verified ·
1 Parent(s): 3661457

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -10
app.py CHANGED
@@ -95,11 +95,11 @@ headers = {"Authorization": "Bearer " + os.environ['HF_TOKEN']}
95
 
96
  def merlin_chat(message, history):
97
  chat_text = ""
98
- chat_json = ""
99
  for turn in history:
100
- chat_text += f"USER: {turn[0]}\n\nASSISTANT: {turn[1]}\n\n"
101
- chat_json += json.dumps({"role": "user", "content": turn[0]})
102
- chat_json += json.dumps({"role": "assistant", "content": turn[1]})
103
  chat_text += f"USER: {message}\n"
104
  doc = nlp(chat_text)
105
  ents_found = []
@@ -135,24 +135,23 @@ def merlin_chat(message, history):
135
  continue
136
  else:
137
  context += entsum + '\n\n'
138
- context
139
  system_msg = {
140
  'role': 'system', 'content': context
141
  }
142
  user_msg = {'role': 'user', 'content': message}
143
- prompt = "[" + json.dumps(system_msg) + chat_json + json.dumps(user_msg) + "{'role': 'assistant, 'content': '"
 
 
 
144
  for attempt in range(3):
145
  # result = model(prompt, max_new_tokens=250, return_full_text=False, handle_long_generation="hole")
146
  result = generate_text(prompt, model_path, parameters, headers)
147
  response = result[0]
 
148
  start = 0
149
  end = 0
150
  cleanStr = response.lstrip()
151
  # cleanStr = cleanStr.replace(prompt,"")
152
- start = cleanStr.find('{') # this should skip over whatever it recalls to what it says next
153
- if start<=0:
154
- continue
155
- cleanStr = cleanStr[start:]
156
  end = cleanStr.find('}') + 1
157
  if end<=0:
158
  continue
 
95
 
96
  def merlin_chat(message, history):
97
  chat_text = ""
98
+ #chat_json = ""
99
  for turn in history:
100
+ chat_text += f"{turn[0]}\n\n{turn[1]}\n\n"
101
+ #chat_json += json.dumps({"role": "user", "content": turn[0]})
102
+ #chat_json += json.dumps({"role": "assistant", "content": turn[1]})
103
  chat_text += f"USER: {message}\n"
104
  doc = nlp(chat_text)
105
  ents_found = []
 
135
  continue
136
  else:
137
  context += entsum + '\n\n'
 
138
  system_msg = {
139
  'role': 'system', 'content': context
140
  }
141
  user_msg = {'role': 'user', 'content': message}
142
+ prompt_data = history
143
+ prompt_data.append(user_msg)
144
+ prompt_data.insert(0,system_msg)
145
+ prompt = json.dumps(system_msg)[:-1] + ",{'role': 'assistant', 'content': '"
146
  for attempt in range(3):
147
  # result = model(prompt, max_new_tokens=250, return_full_text=False, handle_long_generation="hole")
148
  result = generate_text(prompt, model_path, parameters, headers)
149
  response = result[0]
150
+ print(response) # so we can see it in logs
151
  start = 0
152
  end = 0
153
  cleanStr = response.lstrip()
154
  # cleanStr = cleanStr.replace(prompt,"")
 
 
 
 
155
  end = cleanStr.find('}') + 1
156
  if end<=0:
157
  continue