Mathias Lux commited on
Commit
f5fb9e3
·
1 Parent(s): 14e9219

granite didn't work as intended .. switch back to llama

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -4,8 +4,8 @@ from huggingface_hub import InferenceClient
4
  """
5
  For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
6
  """
7
- client = InferenceClient("ibm-granite/granite-3.1-2b-instruct") # specifically for long contexts.
8
- # client = InferenceClient("meta-llama/Llama-3.2-3B-Instruct") # too small context window
9
  # client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
10
 
11
  _sys_msg = """
 
4
  """
5
  For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
6
  """
7
+ # client = InferenceClient("ibm-granite/granite-3.1-2b-instruct") # specifically for long contexts.
8
+ client = InferenceClient("meta-llama/Llama-3.2-3B-Instruct") # too small context window
9
  # client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
10
 
11
  _sys_msg = """