rohan13 commited on
Commit
da174c1
Β·
1 Parent(s): c15f856

added condition

Browse files
Files changed (2) hide show
  1. main.py +2 -1
  2. utils.py +1 -1
main.py CHANGED
@@ -1,4 +1,5 @@
1
  from utils import create_index, get_agent_chain, get_prompt_and_tools, get_search_index
 
2
 
3
 
4
  def index():
@@ -17,7 +18,7 @@ def run(question):
17
  try:
18
  result = agent_chain.run(question)
19
  except ValueError as ve:
20
- if "Could not parse LLM output:" in ve.args[0] and question.lower().startswith(tuple(question_starters)):
21
  question = question + '?'
22
  result = agent_chain.run(question)
23
 
 
1
  from utils import create_index, get_agent_chain, get_prompt_and_tools, get_search_index
2
+ question_starters = ['who', 'why', 'what', 'how', 'where', 'when', 'which', 'whom', 'whose']
3
 
4
 
5
  def index():
 
18
  try:
19
  result = agent_chain.run(question)
20
  except ValueError as ve:
21
+ if "Could not parse LLM output:" in ve.args[0] and question.lower().startswith(tuple(question_starters)) and not question.lower().endswith('?'):
22
  question = question + '?'
23
  result = agent_chain.run(question)
24
 
utils.py CHANGED
@@ -16,7 +16,7 @@ from langchain.embeddings import OpenAIEmbeddings
16
  pickle_file = "open_ai.pkl"
17
  index_file = "open_ai.index"
18
 
19
- gpt_3_5 = OpenAI(model_name='text-davinci-002',temperature=0)
20
 
21
  embeddings = OpenAIEmbeddings()
22
 
 
16
  pickle_file = "open_ai.pkl"
17
  index_file = "open_ai.index"
18
 
19
+ gpt_3_5 = OpenAI(model_name='gpt-3.5-turbo',temperature=0)
20
 
21
  embeddings = OpenAIEmbeddings()
22