Manel commited on
Commit
b9ca24f
·
verified ·
1 Parent(s): ee8853c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -7
app.py CHANGED
@@ -225,7 +225,6 @@ if __name__=="__main__":
225
  """
226
  st.markdown(hide_st_style, unsafe_allow_html=True)
227
 
228
- # set logger
229
  logger = logging.getLogger(__name__)
230
  logging.basicConfig(
231
  filename="app.log",
@@ -235,16 +234,13 @@ if __name__=="__main__":
235
  datefmt="%Y-%m-%d %H:%M:%S",)
236
 
237
 
238
- # model to use in spaces depends on the available device
239
  device = "cuda" if torch.cuda.is_available() else "cpu"
240
-
241
  model_name = "llama" if device=="cpu" else "mistral"
242
-
243
  logger.info(f"Running {model_name} model for inference on {device}")
244
 
245
-
246
  all_templates = { "llama_prompt_template" : """<s>[INST]\n<<SYS>>\nYou are a stoic teacher that provide guidance and advice inspired by Stoic philosophy on navigating life's challenges with resilience and inner peace. Emphasize the importance of focusing on what is within one's control and accepting what is not. Encourage the cultivation of virtue, mindfulness, and self-awareness as tools for achieving eudaimonia. Advocate for enduring hardships with fortitude and maintaining emotional balance in all situations. Your response should reflect Stoic principles of living in accordance with nature and embracing the rational order of the universe.
247
- You should guide the reader towards a fulfilling life focused on virtue rather than external things because living in accordance with virtue leads to eudaimonia or flourishing.
 
248
  context:
249
  {context}\n<</SYS>>\n\n
250
  question:
@@ -279,7 +275,6 @@ if __name__=="__main__":
279
 
280
 
281
  db = load_db(device)
282
-
283
  model, tokenizer = load_model(model_name)
284
 
285
  # streamlit chat
 
225
  """
226
  st.markdown(hide_st_style, unsafe_allow_html=True)
227
 
 
228
  logger = logging.getLogger(__name__)
229
  logging.basicConfig(
230
  filename="app.log",
 
234
  datefmt="%Y-%m-%d %H:%M:%S",)
235
 
236
 
 
237
  device = "cuda" if torch.cuda.is_available() else "cpu"
 
238
  model_name = "llama" if device=="cpu" else "mistral"
 
239
  logger.info(f"Running {model_name} model for inference on {device}")
240
 
 
241
  all_templates = { "llama_prompt_template" : """<s>[INST]\n<<SYS>>\nYou are a stoic teacher that provide guidance and advice inspired by Stoic philosophy on navigating life's challenges with resilience and inner peace. Emphasize the importance of focusing on what is within one's control and accepting what is not. Encourage the cultivation of virtue, mindfulness, and self-awareness as tools for achieving eudaimonia. Advocate for enduring hardships with fortitude and maintaining emotional balance in all situations. Your response should reflect Stoic principles of living in accordance with nature and embracing the rational order of the universe.
242
+ You should guide the reader towards a fulfilling life focused on virtue rather than external things because living in accordance with virtue leads to eudaimonia or flourishing.\n\n
243
+ Give a precise answer to the question based on the context. Don't be verbose.\n\n
244
  context:
245
  {context}\n<</SYS>>\n\n
246
  question:
 
275
 
276
 
277
  db = load_db(device)
 
278
  model, tokenizer = load_model(model_name)
279
 
280
  # streamlit chat