dlflannery commited on
Commit
3a2eaed
·
verified ·
1 Parent(s): 5511924

Update app.py

Browse files

Fixed chat changed state var model after image analysis chat

Files changed (1) hide show
  1. app.py +8 -5
app.py CHANGED
@@ -176,6 +176,7 @@ def updatePassword(txt):
176
  # return val
177
 
178
  def chat(prompt, user_window, pwd_window, past, response, gptModel, uploaded_image_file=''):
 
179
  user_window = user_window.lower().strip()
180
  isBoss = False
181
  if user_window == unames[0] and pwd_window == pwdList[0]:
@@ -200,13 +201,15 @@ def chat(prompt, user_window, pwd_window, past, response, gptModel, uploaded_ima
200
  return [past, response, None, gptModel, uploaded_image_file]
201
  if user_window in unames and pwd_window == pwdList[unames.index(user_window)]:
202
  past.append({"role":"user", "content":prompt})
203
- if uploaded_image_file == '':
 
204
  completion = client.chat.completions.create(model=gptModel,
205
  messages=past)
 
206
  else:
207
- gptModel = 'gpt-4o-2024-08-06'
208
- (completion, msg) = analyze_image(user_window, gptModel)
209
  uploaded_image_file= ''
 
210
  if not msg == 'ok':
211
  return [past, msg, None, gptModel, uploaded_image_file]
212
  reply = completion.choices[0].message.content
@@ -215,7 +218,7 @@ def chat(prompt, user_window, pwd_window, past, response, gptModel, uploaded_ima
215
  tokens = completion.usage.total_tokens
216
  response += "\n\nYOU: " + prompt + "\nGPT: " + reply
217
  if isBoss:
218
- response += f"\n{gptModel}: tokens in/out = {tokens_in}/{tokens_out}"
219
  if tokens > 40000:
220
  response += "\n\nTHIS DIALOG IS GETTING TOO LONG. PLEASE RESTART CONVERSATION SOON."
221
  past.append({"role":"assistant", "content": reply})
@@ -225,7 +228,7 @@ def chat(prompt, user_window, pwd_window, past, response, gptModel, uploaded_ima
225
  dataFile = new_func(user_window)
226
  with open(dataFile, 'a') as f:
227
  m = '4o'
228
- if 'mini' in gptModel:
229
  m = '4omini'
230
  f.write(f'{user_window}:{tokens_in}/{tokens_out}-{m}\n')
231
  accessOk = True
 
176
  # return val
177
 
178
  def chat(prompt, user_window, pwd_window, past, response, gptModel, uploaded_image_file=''):
179
+ image_gen_model = 'gpt-4o-2024-08-06'
180
  user_window = user_window.lower().strip()
181
  isBoss = False
182
  if user_window == unames[0] and pwd_window == pwdList[0]:
 
201
  return [past, response, None, gptModel, uploaded_image_file]
202
  if user_window in unames and pwd_window == pwdList[unames.index(user_window)]:
203
  past.append({"role":"user", "content":prompt})
204
+ gen_image = (uploaded_image_file != '')
205
+ if not gen_image:
206
  completion = client.chat.completions.create(model=gptModel,
207
  messages=past)
208
+ reporting_model = gptModel
209
  else:
210
+ (completion, msg) = analyze_image(user_window, image_gen_model)
 
211
  uploaded_image_file= ''
212
+ reporting_model = image_gen_model
213
  if not msg == 'ok':
214
  return [past, msg, None, gptModel, uploaded_image_file]
215
  reply = completion.choices[0].message.content
 
218
  tokens = completion.usage.total_tokens
219
  response += "\n\nYOU: " + prompt + "\nGPT: " + reply
220
  if isBoss:
221
+ response += f"\n{reporting_model}: tokens in/out = {tokens_in}/{tokens_out}"
222
  if tokens > 40000:
223
  response += "\n\nTHIS DIALOG IS GETTING TOO LONG. PLEASE RESTART CONVERSATION SOON."
224
  past.append({"role":"assistant", "content": reply})
 
228
  dataFile = new_func(user_window)
229
  with open(dataFile, 'a') as f:
230
  m = '4o'
231
+ if 'mini' in reporting_model:
232
  m = '4omini'
233
  f.write(f'{user_window}:{tokens_in}/{tokens_out}-{m}\n')
234
  accessOk = True