calmgoose commited on
Commit
1bad1df
Β·
1 Parent(s): e48d6fc

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -77,13 +77,13 @@ def load_prompt(book_name, author_name):
77
 
78
 
79
  @st.experimental_singleton(show_spinner=False)
80
- def load_chain(model: Literal["openai", "GPT-NeoXT-Chat-Base-20B"] ="openai"):
81
 
82
  # choose model
83
  if model=="openai":
84
  llm = OpenAI(temperature=0.2)
85
 
86
- if model=="GPT-NeoXT-Chat-Base-20B":
87
  # llm = HuggingFacePipeline.from_model_id(
88
  # model_id="togethercomputer/GPT-NeoXT-Chat-Base-20B",
89
  # task="text-generation",
@@ -105,7 +105,7 @@ def load_chain(model: Literal["openai", "GPT-NeoXT-Chat-Base-20B"] ="openai"):
105
  return_source_documents=True,
106
  )
107
 
108
- logging.debug(f"Loaded chain with {model}.")
109
 
110
  return chain
111
 
@@ -194,7 +194,7 @@ if ask:
194
  with st.spinner("Um... excuse me but... this can take about a minute for your first question because some stuff have to be downloaded πŸ₯ΊπŸ‘‰πŸ»πŸ‘ˆπŸ»"):
195
  try:
196
  answer, pages, extract = get_answer(question=user_input, model=choice)
197
- logging.debug(f"Answer successfully generated using {choice}.")
198
  except:
199
  if choice=="togethercomputer/GPT-NeoXT-Chat-Base-20B":
200
  st.write("The model probably timed out :(")
 
77
 
78
 
79
  @st.experimental_singleton(show_spinner=False)
80
+ def load_chain(model: Literal["openai", "togethercomputer/GPT-NeoXT-Chat-Base-20B"] ="openai"):
81
 
82
  # choose model
83
  if model=="openai":
84
  llm = OpenAI(temperature=0.2)
85
 
86
+ if model=="togethercomputer/GPT-NeoXT-Chat-Base-20B":
87
  # llm = HuggingFacePipeline.from_model_id(
88
  # model_id="togethercomputer/GPT-NeoXT-Chat-Base-20B",
89
  # task="text-generation",
 
105
  return_source_documents=True,
106
  )
107
 
108
+ logging.info(f"Loaded chain with {model}.")
109
 
110
  return chain
111
 
 
194
  with st.spinner("Um... excuse me but... this can take about a minute for your first question because some stuff have to be downloaded πŸ₯ΊπŸ‘‰πŸ»πŸ‘ˆπŸ»"):
195
  try:
196
  answer, pages, extract = get_answer(question=user_input, model=choice)
197
+ logging.info(f"Answer successfully generated using {choice}.")
198
  except:
199
  if choice=="togethercomputer/GPT-NeoXT-Chat-Base-20B":
200
  st.write("The model probably timed out :(")