dlflannery commited on
Commit
2f30f18
·
verified ·
1 Parent(s): 4c09467

Update app.py

Browse files

Added do not use Latex for math to o1-mini prompt.

Files changed (1) hide show
  1. app.py +3 -1
app.py CHANGED
@@ -683,7 +683,8 @@ def chat(prompt, user_window, pwd_window, past, response, gptModel, uploaded_ima
683
  elif prompt.lower().startswith('o1m '):
684
  reasoning = True
685
  gptModel = 'o1-mini'
686
- prompt = prompt[4:] + '. Provide a detailed step-by-step description of your reasoning.'
 
687
  elif prompt.lower().startswith('solve'):
688
  prompt = 'How do I solve ' + prompt[5:] + ' Do not use Latex for math expressions.'
689
  chatType = 'math'
@@ -694,6 +695,7 @@ def chat(prompt, user_window, pwd_window, past, response, gptModel, uploaded_ima
694
  gen_image = (uploaded_image_file != '')
695
  if chatType in special_chat_types:
696
  (reply, tokens_in, tokens_out, tokens) = solve(prompt, chatType)
 
697
  reporting_model = image_gen_model
698
  elif not gen_image:
699
  if deepseek:
 
683
  elif prompt.lower().startswith('o1m '):
684
  reasoning = True
685
  gptModel = 'o1-mini'
686
+ prompt = prompt[4:] + \
687
+ '. Provide a detailed step-by-step description of your reasoning. Do not use Latex for math expressions.'
688
  elif prompt.lower().startswith('solve'):
689
  prompt = 'How do I solve ' + prompt[5:] + ' Do not use Latex for math expressions.'
690
  chatType = 'math'
 
695
  gen_image = (uploaded_image_file != '')
696
  if chatType in special_chat_types:
697
  (reply, tokens_in, tokens_out, tokens) = solve(prompt, chatType)
698
+ final_text = reply
699
  reporting_model = image_gen_model
700
  elif not gen_image:
701
  if deepseek: